repo_name
stringlengths 5
114
| repo_url
stringlengths 24
133
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| branch_name
stringclasses 209
values | visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 9.83k
683M
⌀ | star_events_count
int64 0
22.6k
| fork_events_count
int64 0
4.15k
| gha_license_id
stringclasses 17
values | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_language
stringclasses 115
values | files
listlengths 1
13.2k
| num_files
int64 1
13.2k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
nicole2kim/calculator | https://github.com/nicole2kim/calculator | 61f1b4b6ac5178a0353b594ae5a2a111eb5d6696 | 1211e85bc7aa2104e2c3ee0f461b96fefd26c241 | bfaf766f085594ea219054b2325d93d3dc4f4f92 | refs/heads/main | 2023-04-23T14:34:52.101013 | 2021-05-13T07:00:07 | 2021-05-13T07:00:07 | 366,958,917 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6310241222381592,
"alphanum_fraction": 0.6325300931930542,
"avg_line_length": 26.70833396911621,
"blob_id": "7ec4ccf359750560f86b7c0c6c6298ab4eb64f85",
"content_id": "138a261723ebf613d35717cd8bb68f250788d1e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 664,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 24,
"path": "/calculator1/views.py",
"repo_name": "nicole2kim/calculator",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\n\ndef calculate(request):\n return render(request, \"calculate.html\")\n\ndef result(request):\n firstNumber = int(request.GET['firstnumber'])\n secondNumber = int(request.GET['secondnumber'])\n select = request.GET['select']\n\n \n if select == '+':\n result=firstNumber+secondNumber\n if select == '-':\n result=firstNumber-secondNumber\n if select== '*':\n result=firstNumber*secondNumber\n if select== '/':\n if secondNumber==0:\n result=\"division by zero\"\n else:\n result=firstNumber/secondNumber\n\n return render(request, 'result.html', {'result' : result})"
}
] | 1 |
cami20/calculator-2 | https://github.com/cami20/calculator-2 | 56b9061486e15560d7d4c47639ece3a037c62db7 | dab05e26705db2b2a2ed4754f5e36cb26c3b9e71 | 9d762da912c0a9daf627727a17bdbf56b1f8e2c0 | refs/heads/master | 2021-01-19T04:25:37.919921 | 2017-04-06T19:56:04 | 2017-04-06T19:56:04 | 87,370,454 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6008037328720093,
"alphanum_fraction": 0.6255860924720764,
"avg_line_length": 20.955883026123047,
"blob_id": "4ee94f029ba710b684ff9e05721c2fc3382e19c4",
"content_id": "37df12066c893df3c75b244636f229c73bc5a16c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1493,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 68,
"path": "/arithmetic.py",
"repo_name": "cami20/calculator-2",
"src_encoding": "UTF-8",
"text": "def add(num1, num2):\n \"\"\"Return the sum of two numbers\"\"\"\n return num1 + num2\n\ndef add_list(num):\n \"\"\"Return the sum of a list of numbers\"\"\"\n total = 0\n for number in num:\n total += int(number)\n return total\n\ndef subtract(num1, num2):\n \"\"\"Return the difference of two numbers\"\"\"\n return num1 - num2\n\ndef subtract_list(num):\n \"\"\"Return the difference of a list of numbers\"\"\"\n total = int(num[0]) * 2\n for number in num:\n total -= int(number)\n return total\n\n\ndef multiply(num1, num2):\n \"\"\"Return the product of two numbers\"\"\"\n return num1 * num2\n\ndef multiply_list(num):\n \"\"\"Return the product of a list of numbers\"\"\"\n total = 1\n for number in num:\n total *= int(number)\n return total\n\n\ndef divide(num1, num2):\n \"\"\"Return the quotient of two numbers as a float\"\"\"\n return float(num1) / num2\n\n\ndef square(num):\n \"\"\"Return the square of a number\"\"\"\n return num * num\n\n\ndef cube(num):\n \"\"\"Return the cube of a number\"\"\"\n return num ** 3\n\n\ndef power(num, exponent):\n \"\"\"Return num raised to the power of exponent\"\"\"\n return num ** exponent\n\n\ndef mod(num1, num2):\n \"\"\"Return remainder of num1 divided by num2\"\"\"\n return num1 % num2\n\n\ndef add_mult(num1, num2, num3):\n \"\"\"Return sum of the first two numbers multiplied by third number\"\"\"\n return multiply(add(num1, num2), num3)\n\n\ndef add_cubes(num1, num2):\n \"\"\"Return sum of cube of both numbers\"\"\"\n return add(cube(num1), cube(num2))\n"
},
{
"alpha_fraction": 0.5381097793579102,
"alphanum_fraction": 0.5493902564048767,
"avg_line_length": 35.85393142700195,
"blob_id": "4129954ee66952f31c23bb5641d568d3f736daba",
"content_id": "8ec2a438fe95f611aba90588bb067ed767d90012",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3280,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 89,
"path": "/calculator_file.py",
"repo_name": "cami20/calculator-2",
"src_encoding": "UTF-8",
"text": "\"\"\"A prefix-notation calculator.\n\nUsing the arithmetic.py file from Calculator Part 1, create the\ncalculator program yourself in this file.\n\nTakes inputs from file and outputs to the same file\n\"\"\"\n\nfrom arithmetic import *\n\ninput_file = raw_input(\"What file would you like to open: \")\noutput_file = raw_input(\"What file would you like to write into: \")\n\nmath_data = open(input_file)\nanswers_data = open(output_file, 'r+')\n\n# Your code goes here\n# No setup\n# Iterates through each line of the text file:\nfor calc in math_data:\n # Splits each line of text in file into a list of its parts\n input_string = calc.split(\" \")\n\n # try:\n # decide which math function to call based on first token\n if input_string[0] == \"+\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n num_add = str(add_list(input_string[1:]))\n answers_data.write(num_add + \"\\n\")\n elif input_string[0] == \"-\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n num_substract = str(subtract_list(input_string[1:]))\n answers_data.write(num_substract + \"\\n\")\n elif input_string[0] == \"*\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n num_mult = str(multiply_list(input_string[1:]))\n answers_data.write(num_mult + \"\\n\")\n elif input_string[0] == \"/\":\n if len(input_string) > 3:\n print \"Too many inputs :(\"\n else:\n num_divide = str(divide(int(input_string[1]), int(input_string[2])))\n answers_data.write(num_divide + \"\\n\")\n elif input_string[0] == \"square\":\n if len(input_string) > 2:\n print \"Too many inputs :(\"\n else:\n num_square = str(square(int(input_string[1])))\n answers_data.write(num_square + \"\\n\")\n elif input_string[0] == \"cube\":\n if len(input_string) > 2:\n print \"Too many inputs\"\n else:\n num_cube = str(cube(int(input_string[1])))\n answers_data.write(num_cube + \"\\n\")\n elif input_string[0] == \"pow\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n num_power = str(power(int(input_string[1]), int(input_string[2])))\n answers_data.write(num_power + \"\\n\")\n elif input_string[0] == \"mod\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n num_mod = str(mod(int(input_string[1]), int(input_string[2])))\n answers_data.write(num_mod + \"\\n\")\n elif input_string[0] == \"x+\":\n if len(input_string) > 4:\n print \"Too many inputs\"\n else:\n num_add_mult = str(add_mult(int(input_string[1]), int(input_string[2]), int(input_string[3])))\n answers_data.write(num_add_mult + \"\\n\")\n elif input_string[0] == \"cubes+\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n num_add_cubes = str(add_cubes(int(input_string[1]), int(input_string[2])))\n answers_data.write(num_add_cubes + \"\\n\")\n else:\n print \"I do not understand.\"\n # except:\n # print \"I do not understand\"\n"
},
{
"alpha_fraction": 0.43790850043296814,
"alphanum_fraction": 0.45098039507865906,
"avg_line_length": 36.269229888916016,
"blob_id": "4b263f6d53b6a0a58611c8ef8765f309144b2454",
"content_id": "29a4edf3c34a8cbf2f63a2968240e8ae0bdd348e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2907,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 78,
"path": "/calculator.py",
"repo_name": "cami20/calculator-2",
"src_encoding": "UTF-8",
"text": "\"\"\"A prefix-notation calculator.\n\nUsing the arithmetic.py file from Calculator Part 1, create the\ncalculator program yourself in this file.\n\"\"\"\n\nfrom arithmetic import *\n\n# Your code goes here\n# No setup\n# repeat forever:\nwhile True:\n # read input\n input = raw_input(\"> \")\n # tokenize input\n input_string = input.split(\" \")\n # if the first token is \"q\":\n if input_string[0] == \"q\":\n # quit\n break\n # else:\n else:\n try:\n # decide which math function to call based on first token\n if input_string[0] == \"+\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n print add_list(input_string[1:])\n elif input_string[0] == \"-\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n print subtract_list(input_string[1:])\n elif input_string[0] == \"*\":\n if len(input_string) < 3:\n print \"I don't understand\"\n else:\n print multiply_list(input_string[1:])\n elif input_string[0] == \"/\":\n if len(input_string) > 3:\n print \"Too many inputs :(\"\n else:\n print divide(int(input_string[1]), int(input_string[2]))\n elif input_string[0] == \"square\":\n if len(input_string) > 2:\n print \"Too many inputs :(\"\n else:\n print square(int(input_string[1]))\n elif input_string[0] == \"cube\":\n if len(input_string) > 2:\n print \"Too many inputs\"\n else:\n print cube(int(input_string[1]))\n elif input_string[0] == \"pow\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n print power(int(input_string[1]), int(input_string[2]))\n elif input_string[0] == \"mod\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n print mod(int(input_string[1]), int(input_string[2]))\n elif input_string[0] == \"x+\":\n if len(input_string) > 4:\n print \"Too many inputs\"\n else:\n print add_mult(int(input_string[1]), int(input_string[2]), int(input_string[3]))\n elif input_string[0] == \"cubes+\":\n if len(input_string) > 3:\n print \"Too many inputs\"\n else:\n print add_cubes(int(input_string[1]), int(input_string[2]))\n else:\n print \"I do not understand.\"\n except:\n print \"I do not understand\"\n"
}
] | 3 |
eugenio114/stock-market-data | https://github.com/eugenio114/stock-market-data | a9510c8b8f8b1730b8f317170e3e619aec93abbe | d40b3c3d6c57c8669d714511e8e8e3f70d932395 | d68f696ff93b35c3dcdcdf2187208d9ec2bfc753 | refs/heads/main | 2023-05-15T02:36:42.682156 | 2021-06-09T09:59:19 | 2021-06-09T09:59:19 | 373,855,176 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6391659379005432,
"alphanum_fraction": 0.659111499786377,
"avg_line_length": 27.052631378173828,
"blob_id": "4a89c03bdced49e0cd85f985027e2ac3d12fd0b6",
"content_id": "ae55bcd0268c42ec9b910739ae997df64417b092",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2206,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 76,
"path": "/backtesting_file.py",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "import pandas as pd\r\nimport numpy as np\r\nimport datetime\r\nimport matplotlib.pyplot as plt\r\nfrom pandas_datareader import data as pdr\r\nimport yfinance\r\nimport datetime\r\nfrom scipy.stats import gaussian_kde\r\nfrom list_stocks import stocks_list, short_list, aapl\r\n\r\nticker = ['A']\r\n\r\nstock = pdr.get_data_yahoo(ticker, start=datetime.datetime(2017, 12, 1), end=datetime.date.today())\r\n\r\n# Initialize the short and long windows\r\nshort_window = 40\r\nlong_window = 100\r\n\r\n# Initialize the `signals` DataFrame with the `signal` column\r\nsignals = pd.DataFrame(index=stock.index)\r\n\r\n#signals['Code'] = stock['Ticker']\r\nsignals['signal'] = 0.0\r\n\r\n# Create short simple moving average over the short window\r\nsignals['short_mavg'] = stock['Close'].rolling(window=short_window, min_periods=1, center=False).mean()\r\n\r\n# Create long simple moving average over the long window\r\nsignals['long_mavg'] = stock['Close'].rolling(window=long_window, min_periods=1, center=False).mean()\r\n\r\n# Create signals\r\nsignals['signal'][short_window:] = np.where(signals['short_mavg'][short_window:]\r\n > signals['long_mavg'][short_window:], 1.0, 0.0)\r\n\r\n# Generate trading orders\r\nsignals['positions'] = signals['signal'].diff()\r\n\r\n\r\nprint(stock)\r\nprint(signals)\r\n\r\n# Initialize the plot figure\r\nfig = plt.figure()\r\n\r\n# Add a subplot and label for y-axis\r\nax1 = fig.add_subplot(111, ylabel='Price in $')\r\n\r\n# Plot the closing price\r\nstock['Close'].plot(ax=ax1, color='r', lw=2.)\r\n\r\n# Plot the short and long moving averages\r\nsignals[['short_mavg', 'long_mavg']].plot(ax=ax1, lw=2.)\r\n\r\n# Plot the buy signals\r\nax1.plot(signals.loc[signals.positions == 1.0].index,\r\n signals.short_mavg[signals.positions == 1.0],\r\n '^', markersize=10, color='b')\r\n\r\n# Plot the sell signals\r\nax1.plot(signals.loc[signals.positions == -1.0].index,\r\n signals.short_mavg[signals.positions == -1.0],\r\n 'v', markersize=10, color='y')\r\n\r\nstock['Diff_Open_Close'] = stock.Open - stock.Close\r\n\r\nfont = {'family': 'sanserif',\r\n 'color': 'black',\r\n 'weight': 'normal',\r\n 'size': 16,\r\n }\r\n\r\nplt.title(ticker, fontdict=font)\r\n\r\n# Show the plot\r\nplt.show()\r\nprint(stock)"
},
{
"alpha_fraction": 0.6278020143508911,
"alphanum_fraction": 0.6432129740715027,
"avg_line_length": 42.93706130981445,
"blob_id": "2a6e062113ac01ba271e99260518f0ea864bba8d",
"content_id": "535666392b0e35809f30cf79425acc1f8527d17c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6424,
"license_type": "no_license",
"max_line_length": 165,
"num_lines": 143,
"path": "/stocktwits-data-extraction.py",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "import time\r\nimport os\r\nimport pandas as pd\r\nfrom datetime import datetime\r\nfrom selenium import webdriver\r\nfrom selenium.webdriver.chrome.options import Options\r\nfrom selenium.webdriver.common.keys import Keys\r\n\r\nstartTime = datetime.now()\r\n\r\nchromedriver = '' #add chomedriver path here\r\n\r\nchrome_options = Options()\r\nchrome_options.add_argument(\"headless\")\r\n\r\n#runs the browser in the background\r\ndriver = webdriver.Chrome(chromedriver, options=chrome_options, keep_alive=True)\r\n\r\n\r\n\r\n#_______TRENDING STOCKS ON STOCKTWITS__https://stocktwits.com/______#\r\ndriver.get(\"https://stocktwits.com/rankings/trending\")\r\n\r\nt_def = []\r\nt_symbol_list = []\r\nt_name_list = []\r\nt_score_list = []\r\nt_price_list = []\r\nt_price_pc_change_list = []\r\n\r\nfor i in range (1, 10):\r\n\r\n t_def.append('Trending')\r\n\r\n t_symbol_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[2]/span'''\r\n t_symbol = driver.find_element_by_xpath(t_symbol_list_path)\r\n t_symbol_list.append(t_symbol.text)\r\n\r\n t_name_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[3]/span'''\r\n t_name = driver.find_element_by_xpath(t_name_list_path)\r\n t_name_list.append(t_name.text)\r\n\r\n t_score_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[4]'''\r\n t_score = driver.find_element_by_xpath(t_score_list_path)\r\n t_score_list.append(t_score.text)\r\n\r\n t_price_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[5]'''\r\n t_price = driver.find_element_by_xpath(t_price_list_path)\r\n t_price_list.append(t_price.text)\r\n\r\n t_price_pc_change_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[6]/span'''\r\n t_price_pc_change = driver.find_element_by_xpath(t_price_pc_change_list_path)\r\n t_price_pc_change_list.append(t_price_pc_change.text)\r\n\r\ntrending = list(zip(t_symbol_list, t_name_list, t_score_list, t_price_list, t_price_pc_change_list))\r\n\r\n\r\ndf = pd.DataFrame(trending, columns= ['symbol', 'name', 'score', 'price', 'price_pc_change'])\r\ndf.to_csv('stocktwits_trending.csv', index=False)\r\nprint(f'''Execution Time: {datetime.now()-startTime}''')\r\nprint(trending)\r\n\r\n\r\n# _______MESSAGES STOCKS ON STOCKTWITS__https://stocktwits.com/______#\r\ndriver.get(\"https://stocktwits.com/rankings/messages\")\r\n\r\nm_def = []\r\nm_symbol_list = []\r\nm_name_list = []\r\nm_count_list = []\r\nm_price_list = []\r\nm_price_pc_change_list = []\r\n\r\nfor i in range(1, 10):\r\n t_def.append('Messages')\r\n\r\n m_symbol_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[2]/span'''\r\n m_symbol = driver.find_element_by_xpath(m_symbol_list_path)\r\n m_symbol_list.append(m_symbol.text)\r\n\r\n m_name_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[3]/span'''\r\n m_name = driver.find_element_by_xpath(m_name_list_path)\r\n m_name_list.append(m_name.text)\r\n\r\n m_count_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[4]'''\r\n m_count = driver.find_element_by_xpath(m_count_list_path)\r\n m_count_list.append(m_count.text)\r\n\r\n m_price_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[5]'''\r\n m_price = driver.find_element_by_xpath(m_price_list_path)\r\n m_price_list.append(m_price.text)\r\n\r\n m_price_pc_change_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[6]/span'''\r\n m_price_pc_change = driver.find_element_by_xpath(m_price_pc_change_list_path)\r\n m_price_pc_change_list.append(m_price_pc_change.text)\r\n\r\nmessages = list(zip(m_symbol_list, m_name_list, m_count_list, m_price_list, m_price_pc_change_list))\r\n\r\ndf = pd.DataFrame(messages, columns=['symbol', 'name', 'count', 'price', 'price_pc_change'])\r\ndf.to_csv('stocktwits_messages.csv', index=False)\r\nprint(f'''Execution Time: {datetime.now() - startTime}''')\r\nprint(messages)\r\n\r\n\r\n#_______MESSAGES STOCKS ON STOCKTWITS__https://stocktwits.com/______#\r\ndriver.get(\"https://stocktwits.com/rankings/watchers\")\r\n\r\nw_def = []\r\nw_symbol_list = []\r\nw_name_list = []\r\nw_count_list = []\r\nw_price_list = []\r\nw_price_pc_change_list = []\r\n\r\nfor i in range(1, 10):\r\n w_def.append('Watchers')\r\n\r\n w_symbol_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[2]/span'''\r\n w_symbol = driver.find_element_by_xpath(w_symbol_list_path)\r\n w_symbol_list.append(w_symbol.text)\r\n\r\n w_name_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[3]/span'''\r\n w_name = driver.find_element_by_xpath(w_name_list_path)\r\n w_name_list.append(w_name.text)\r\n\r\n w_count_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[4]'''\r\n w_count = driver.find_element_by_xpath(w_count_list_path)\r\n w_count_list.append(w_count.text)\r\n\r\n w_price_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[5]'''\r\n w_price = driver.find_element_by_xpath(w_price_list_path)\r\n w_price_list.append(w_price.text)\r\n\r\n w_price_pc_change_list_path = f'''//*[@id=\"app\"]/div/div/div[2]/div/div/div/div[2]/div/div/div[2]/div/div/div/div[3]/div[1]/div/table/tbody/tr[{i}]/td[6]/span'''\r\n w_price_pc_change = driver.find_element_by_xpath(w_price_pc_change_list_path)\r\n w_price_pc_change_list.append(w_price_pc_change.text)\r\n\r\nwatchers = list(zip(w_symbol_list, w_name_list, w_count_list, w_price_list, w_price_pc_change_list))\r\n\r\ndf = pd.DataFrame(watchers, columns=['symbol', 'name', 'count', 'price', 'price_pc_change'])\r\ndf.to_csv('stocktwits_watchers.csv', index=False)\r\nprint(f'''Execution Time: {datetime.now() - startTime}''')\r\nprint(watchers)"
},
{
"alpha_fraction": 0.6665421724319458,
"alphanum_fraction": 0.6792382597923279,
"avg_line_length": 26.46808433532715,
"blob_id": "fd0ac4534480d51fdc5f22aa00726f89e42a653c",
"content_id": "84bbb080089dc46d439fa26dad80100ec19f2c3a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2678,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 94,
"path": "/data_prep.py",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "import statsmodels.api as sm\r\nimport pandas as pd\r\nfrom pandas import tseries\r\nimport numpy as np\r\nimport datetime\r\nimport matplotlib.pyplot as plt\r\nfrom pandas_datareader import data as pdr\r\nimport yfinance\r\nimport datetime\r\nfrom scipy.stats import gaussian_kde\r\nfrom list_stocks import stocks_list, short_list, aapl\r\n\r\n\r\ntickers = stocks_list\r\n\r\ndef get(tickers, startdate, enddate):\r\n def data(tickers):\r\n return (pdr.get_data_yahoo(tickers, start=startdate, end=enddate))\r\n datas = map (data, tickers)\r\n return(pd.concat(datas, keys=tickers, names=['Ticker', 'Date']))\r\n\r\n\r\nstock = get(tickers, datetime.datetime(2018, 1, 1), datetime.date.today())\r\n\r\n#add Difference from open to close column in the data\r\nstock['Diff_Open_Close'] = stock.Open - stock.Close\r\n\r\ndaily_close_px = stock[['Adj Close']]\r\n\r\n\r\n# Calculate the daily percentage change for `daily_close_px`\r\ndaily_pct_change = daily_close_px.pct_change()\r\n\r\nstock['Daily_PC_Change'] = daily_pct_change\r\n\r\nmin_periods = 75\r\n\r\n# Calculate the volatility VIX indicator to exploit market's fear\r\n#https://www.forex.in.rs/volatility-75-index/\r\nvol = daily_pct_change.rolling(min_periods).std() * np.sqrt(min_periods)\r\n\r\nstock['75_Vol_VIX'] = vol\r\n\r\nshort_window = 40\r\nlong_window = 100\r\n\r\n# Initialize the `signals` DataFrame with the `signal` column\r\n\r\nstock['signal'] = 0.0\r\n\r\n# Create short simple moving average over the short window\r\nstock['short_mavg'] = stock['Close'].rolling(window=short_window, min_periods=1, center=False).mean()\r\n\r\n# Create long simple moving average over the long window\r\nstock['long_mavg'] = stock['Close'].rolling(window=long_window, min_periods=1, center=False).mean()\r\n\r\n# Create signals\r\nstock['signal'][short_window:] = np.where(stock['short_mavg'][short_window:]\r\n > stock['long_mavg'][short_window:], 1.0, 0.0)\r\n\r\n# Generate trading orders\r\nstock['positions'] = stock['signal'].diff()\r\n\r\n\r\n####stock.to_csv(\"/Users/carpanie/Desktop/Stocks/stock.csv\")\r\nprint(stock)\r\nprint(stock.columns)\r\n\r\n# Buy a 100 shares when signal turns positive\r\nstock['Pos'] = 1*stock['signal']\r\n\r\n#print(stock)\r\n\r\n# Initialize the portfolio with value owned\r\nstock['Portfolio'] = stock['Pos'].multiply(stock['Adj Close'], axis=0)\r\n\r\n# Store the difference in shares owned\r\nstock['Pos_diff'] = stock['Pos'].diff()\r\n\r\n# Add `holdings` to portfolio\r\n\r\nstock.loc[stock['positions'] == 1, 'Buy'] = stock['Portfolio']\r\nstock.loc[stock['positions'] == -1, 'Sell'] = stock['Portfolio'].shift(1)\r\n\r\nstock['Returns'] = (stock['Sell'].sum() + stock['Portfolio'][-1]) - stock['Buy'].sum()\r\n\r\n\r\n# Print CAGR\r\n\r\n\r\nprint(stock)\r\n\r\n\r\nstock.to_csv(\"apple.csv\")\r\n\r\n"
},
{
"alpha_fraction": 0.6578746438026428,
"alphanum_fraction": 0.6957186460494995,
"avg_line_length": 22.240739822387695,
"blob_id": "9e84a4b3e060d63145cc654233f9fe5345962558",
"content_id": "7ae55d2bcdf1b5dfde9401077b77f6e8cf2515bd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2616,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 108,
"path": "/appl-ticker-metrics.py",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "import pandas as pd\r\nimport numpy as np\r\nimport datetime\r\nimport matplotlib.pyplot as plt\r\nfrom pandas_datareader import data as pdr\r\nimport yfinance\r\nimport datetime\r\nfrom list_stocks import stocks_list\r\n\r\n#data = pdr.get_data_yahoo(stocks_list, start=datetime.datetime(2017, 12, 31), end=datetime.datetime(2020, 12, 31))\r\naapl = pdr.get_data_yahoo('AAPL', start=datetime.datetime(2017, 12, 1), end=datetime.date.today())\r\nprint(aapl.head())\r\n\r\n#inspect the index\r\nprint(aapl.index)\r\n# Inspect the columns\r\nprint(aapl.columns)\r\n\r\n# Select only the last 10 observations of `Close` and print them\r\nts = aapl['Close'][-10:]\r\nprint(ts)\r\n\r\n# Inspect the first rows of November-December 2020\r\nprint(aapl.loc[pd.Timestamp('2020-11-01'):pd.Timestamp('2020-12-31')].head())\r\n\r\n# Inspect the first rows of 2020\r\nprint(aapl.loc['2020'].head())\r\n\r\n# Inspect Feb 2018 using iloc\r\nprint(aapl.iloc[22:43])\r\n\r\n# Inspect the 'Open' and 'Close' values at 2018-02-02 and 2018-03-06\r\nprint(aapl.iloc[[22,43], [0, 3]])\r\n\r\n# Sample 20 rows\r\nsample = aapl.sample(20)\r\n\r\n# Print `sample`\r\nprint(sample)\r\n\r\n# Resample to monthly level\r\nmonthly_aapl = aapl.resample('M')\r\n\r\n# Print `monthly_aapl`\r\nprint(monthly_aapl)\r\n\r\n# Add a column `diff` to `aapl`\r\naapl['diff'] = aapl.Open - aapl.Close\r\n\r\n# Delete the new `diff` column\r\ndel aapl['diff']\r\n\r\n# Plot the closing prices for `aapl`\r\naapl['Close'].plot(grid=True)\r\n\r\n# Show the plot\r\nplt.show()\r\n\r\n# Assign `Adj Close` to `daily_close`\r\ndaily_close = aapl[['Adj Close']]\r\n\r\n# Daily returns\r\ndaily_pct_c = daily_close.pct_change()\r\n\r\n# Replace NA values with 0\r\ndaily_pct_c.fillna(0, inplace=True)\r\n\r\n# Inspect daily returns\r\nprint(daily_pct_c)\r\n\r\n# Daily log returns\r\ndaily_log_returns = np.log(daily_close.pct_change()+1)\r\n\r\n# Print daily log returns\r\nprint(daily_log_returns)\r\n\r\n# Resample `aapl` to business months, take last observation as value\r\nmonthly = aapl.resample('BM').apply(lambda x: x[-1])\r\n\r\n# Calculate the monthly percentage change\r\nprint(monthly.pct_change())\r\n\r\n# Resample `aapl` to quarters, take the mean as value per quarter\r\nquarter = aapl.resample(\"4M\").mean()\r\n\r\n# Calculate the quarterly percentage change\r\nprint(quarter.pct_change())\r\n\r\n# Daily returns\r\ndaily_pct_c = daily_close / daily_close.shift(1) - 1\r\n\r\n# Print `daily_pct_c`\r\nprint(daily_pct_c)\r\n\r\n# Plot the distribution of `daily_pct_c`\r\ndaily_pct_c.hist(bins=50)\r\n\r\n# Show the plot\r\nplt.show()\r\n\r\n# Pull up summary statistics\r\nprint(daily_pct_c.describe())\r\n\r\n# Calculate the cumulative daily returns\r\ncum_daily_return = (1 + daily_pct_c).cumprod()\r\n\r\n# Print `cum_daily_return`\r\nprint(cum_daily_return)"
},
{
"alpha_fraction": 0.6571335792541504,
"alphanum_fraction": 0.6779330372810364,
"avg_line_length": 27.605770111083984,
"blob_id": "e14862f8270f9fbdc227dda680fb0d56ed75dd46",
"content_id": "ab9a943ed730228d2fd176877288c2a06bda9e69",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3077,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 104,
"path": "/stocks_adv.py",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "import pandas as pd\r\nimport numpy as np\r\nimport datetime\r\nimport matplotlib.pyplot as plt\r\nfrom pandas_datareader import data as pdr\r\nimport yfinance\r\nimport datetime\r\nfrom scipy.stats import gaussian_kde\r\nfrom list_stocks import stocks_list, short_list, aapl\r\n\r\n\r\n\r\n\r\napple = pdr.get_data_yahoo('AMZN', start=datetime.datetime(2017, 12, 1), end=datetime.date.today())\r\ndef get(tickers, startdate, enddate):\r\n def data(ticker):\r\n return (pdr.get_data_yahoo(ticker, start=startdate, end=enddate))\r\n datas = map (data, tickers)\r\n return(pd.concat(datas, keys=tickers, names=['Ticker', 'Date']))\r\n\r\ntickers = short_list\r\nall_data = get(tickers, datetime.datetime(2018, 1, 1), datetime.date.today())\r\nprint(all_data.head())\r\n\r\ndaily_close_px = all_data[['Adj Close']].reset_index().pivot('Date', 'Ticker', 'Adj Close')\r\n\r\n# Calculate the daily percentage change for `daily_close_px`\r\ndaily_pct_change = daily_close_px.pct_change()\r\n\r\n# Plot the distributions\r\ndaily_pct_change.hist(bins=50, sharex=True, figsize=(12,8))\r\n\r\n# Show the resulting plot\r\nplt.show()\r\n\r\n# Plot a scatter matrix with the `daily_pct_change` data\r\npd.plotting.scatter_matrix(daily_pct_change, diagonal='kde', alpha=0.1,figsize=(12,12))\r\n\r\n# Show the plot\r\nplt.show()\r\n\r\nmin_periods = 75\r\n\r\n# Calculate the volatility\r\nvol = daily_pct_change.rolling(min_periods).std() * np.sqrt(min_periods)\r\n\r\n# Plot the volatility\r\nvol.plot(figsize=(10, 8))\r\n\r\n# Show the plot\r\nplt.show()\r\n\r\n# Initialize the short and long windows\r\nshort_window = 60\r\nlong_window = 150\r\n\r\n# Initialize the `signals` DataFrame with the `signal` column\r\nsignals = pd.DataFrame(index=apple.index)\r\nsignals['signal'] = 0.0\r\n\r\n# Create short simple moving average over the short window\r\nsignals['short_mavg'] = apple['Close'].rolling(window=short_window, min_periods=1, center=False).mean()\r\n\r\n# Create long simple moving average over the long window\r\nsignals['long_mavg'] = apple['Close'].rolling(window=long_window, min_periods=1, center=False).mean()\r\n\r\n# Create signals\r\nsignals['signal'][short_window:] = np.where(signals['short_mavg'][short_window:]\r\n > signals['long_mavg'][short_window:], 1.0, 0.0)\r\n\r\n# Generate trading orders\r\nsignals['positions'] = signals['signal'].diff()\r\n\r\n\r\n\r\nprint(signals)\r\n\r\n# Initialize the plot figure\r\nfig = plt.figure()\r\n\r\n# Add a subplot and label for y-axis\r\nax1 = fig.add_subplot(111, ylabel='Price in $')\r\n\r\n# Plot the closing price\r\napple['Close'].plot(ax=ax1, color='r', lw=2.)\r\n\r\n# Plot the short and long moving averages\r\nsignals[['short_mavg', 'long_mavg']].plot(ax=ax1, lw=2.)\r\n\r\n# Plot the buy signals\r\nax1.plot(signals.loc[signals.positions == 1.0].index,\r\n signals.short_mavg[signals.positions == 1.0],\r\n '^', markersize=10, color='b')\r\n\r\n# Plot the sell signals\r\nax1.plot(signals.loc[signals.positions == -1.0].index,\r\n signals.short_mavg[signals.positions == -1.0],\r\n 'v', markersize=10, color='y')\r\n\r\napple['Diff_Open_Close'] = apple.Open - apple.Close\r\n\r\n# Show the plot\r\nplt.show()\r\nprint(apple)"
},
{
"alpha_fraction": 0.8104089498519897,
"alphanum_fraction": 0.8141263723373413,
"avg_line_length": 99.875,
"blob_id": "05e1aefb4bf74d58cac9754c6aab667a2ed10664",
"content_id": "6e13d4795010bfc85c3c669c7eb25bba8b9ef212",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 807,
"license_type": "no_license",
"max_line_length": 255,
"num_lines": 8,
"path": "/README.md",
"repo_name": "eugenio114/stock-market-data",
"src_encoding": "UTF-8",
"text": "# Stock Market Data\nin this reporsitory you can find scripts that analyse stock market data. data are pulled from yfinance library.\n\n- list-stocks: contains list of some S&P 500 tickers \n- aapl-ticker-metrics: it gets AAPL data from yfinance and calculates diff Open to Close price, it plots the closing price, calculates daily return in percentage, monthly percentage change and plots the distribution of daily percentage change in histogram\n- data-prep: it extracts data from some major company tickers, does some calculations and prepare them for data visualization \n- backtesting-file: it looks at historical records and estimate expected return based on moving average model\n- stocktwits-data extraction: web scraping script that extracts top trending, top watched and top messages from stocktwits.com\n"
}
] | 6 |
LightCC/OpenPegs | https://github.com/LightCC/OpenPegs | b9144cbb2ad1f343e20358b24b5b851fd574df14 | 9f3d90f7152c0fd98b06e07416d3e6a070a45808 | df61e15ee39afef0a694b609c18132b6f7e2a60b | refs/heads/master | 2022-01-27T08:39:26.107616 | 2020-03-17T03:37:25 | 2020-03-17T03:37:25 | 246,702,994 | 0 | 1 | null | 2020-03-11T23:46:55 | 2020-03-17T03:38:11 | 2022-01-21T20:15:31 | Python | [
{
"alpha_fraction": 0.6106155514717102,
"alphanum_fraction": 0.6132916808128357,
"avg_line_length": 45.226802825927734,
"blob_id": "685b6bbff8d1bbe6676b188cab78d893a4f10f62",
"content_id": "e306f32097e9aa39dc0575e1dc34d07e3f4a4267",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4484,
"license_type": "no_license",
"max_line_length": 197,
"num_lines": 97,
"path": "/src/PegBoard.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "try:\n from .PegNode import PegNode\nexcept ImportError:\n print(\"\\n{}: Try running `pegs` from the command line!!\\nor run with `python run_pegs.py` from root directory\\n\".format(__file__))\n \nclass PegException(Exception):\n pass\n\nclass PegBoard:\n '''PegBoard is a linked list of nodes on a Peg Board\n \n PegBoard includes information on the board geometry of each\n individual node where a peg can be, in addition to information\n about how moves can happen (i.e. which nodes are adjacent,\n and where a jump over that adjacent node will land)\n \n Arguments:\n node_ids as list:\n list of ids to use for each node that is created\n [node_ids_str] as list:\n list of id strings that matchs the node_ids. These will be returned when attempting to print a node. If left off or empty, the built-in string function for each id will be used instead. \n '''\n def __init__(self, node_ids, node_ids_str=None):\n # Ensure node_ids is a list\n if not isinstance(node_ids, list):\n raise ValueError('node_ids (arg 1) was type \"{}\", expected \"list\"'.format(type(node_ids)))\n \n # If not provided, set node_ids_str to a list of the default string output of the node_ids list items\n if node_ids_str == None:\n node_ids_str = [str(x) for x in node_ids]\n else:\n # if node_ids_str was given, check that it is a list\n if not isinstance(node_ids_str, list):\n raise ValueError('node_ids_str (arg 2) was type \"{}\", expected \"list\"'.format(type(node_ids)))\n # if it is a list, check if all items are strings\n if not all(isinstance(x, str) for x in node_ids_str):\n raise ValueError('if provided, all items in Arg 3, node_ids_str, \"{}\" list must be strings'.format(node_ids_str))\n \n # Ensure input args are the same length as lists\n if len(node_ids) != len(node_ids_str):\n raise ValueError('Length of node_ids (arg 1) [{}] does not equal length of node_ids_str (arg 2) [{}]'.format(len(node_ids), len(node_ids_str)))\n \n ## create the nodes list\n nodes = {}\n newnodes = {node_id: PegNode(nodes, node_id, node_ids_str[index]) for index, node_id in enumerate(node_ids)}\n nodes.update(newnodes)\n\n ## Assign all object properties \n self._node_ids = node_ids\n self._node_ids_str = node_ids_str\n self._nodes = nodes\n \n # Setup _format_str to None so it is initialized,\n # need child class to set this up!!\n self._format_str = None\n \n def node(self, node_id):\n return self._nodes[node_id]\n \n def nodes(self):\n return self._nodes\n\n ## Format Strings and functions for printing Board status and other info strings.\n # Note: Format string is set by the user/child class, the PegBoard class just fills in the information from the class object (i.e. filling in node ids, peg positions, etc.)\n def format_str(self):\n if self._format_str == None:\n raise ValueError('Child Class must create _format_str variable!!')\n return self._format_str\n\n def nodes_str(self, indent=0):\n outstr = self.format_str().format(x=self._node_ids_str)\n return self._indent_string(outstr, indent)\n \n def pegs_str(self, indent=0):\n pegs = [ self.node(node_id).peg_str() for node_id in self._node_ids ]\n outstr = self.format_str().format(x=pegs)\n return self._indent_string(outstr, indent)\n \n def full_str(self, indent=0):\n fullstr = [ '{}:{}'.format(self._nodes[node_id].node_id_str(), self._nodes[node_id].peg_str()) for node_id in self._node_ids ]\n outstr = self.format_str().format(x=fullstr)\n spaces = ' ' * 3\n outstr = outstr.replace(' ', spaces)\n return self._indent_string(outstr, indent)\n \n def node_and_pegs_str(self, indent=0, space_between=3):\n node = self.nodes_str()\n pegs = self.pegs_str()\n nodelines = node.splitlines()\n peglines = pegs.splitlines()\n outstr = '\\n'.join([ '{}{}{}'.format(nodelines[index], ' ' * space_between, peglines[index]) for index, _ in enumerate(nodelines) ])\n return self._indent_string(outstr, indent)\n \n def _indent_string(self, text, indent):\n spaces = ' ' * indent\n outstr = ''.join([spaces + line + '\\n' for line in text.splitlines()])\n return outstr[:-1]\n"
},
{
"alpha_fraction": 0.6907785534858704,
"alphanum_fraction": 0.706239640712738,
"avg_line_length": 36.72916793823242,
"blob_id": "c824a6e9f7d251e6b1f437133b622760c2a2dd67",
"content_id": "5b3e0ac53997bda73565500e2a94485ff353ef60",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1811,
"license_type": "no_license",
"max_line_length": 283,
"num_lines": 48,
"path": "/README.md",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "# OpenPegs - Peg Hopper Game and Analysis package\n\nPeg Hopper is a game where there is a geometric layout of peg holes. The game board is filled with pegs, generally allowing the player to choose one hole on the board to start with as empty (i.e. without a peg).\n\nPlay proceeds with the player jumping any peg over another peg, into an empty peg hole, removing the peg that was jumped from the board. The goal is to remove the pegs in a way that the remaining number of pegs is the least possible, generally to a single remaining peg on the board.\n\nA classic version of the game had a board with a triangle or pyramid pattern, with 1 hole in the top row, 2 in the next, increasing by one until there are 5 in the bottom row. It is possible for the game to reduce to a single peg for a winning game with this pattern.\n\nFor example, if you take the board with pegs identified as below:\n```\n 1\n 2 3\n 4 5 6\n 7 8 9 a\n b c d e f\n```\n\nand if the initial hole left open is hole 1, the board will look like the following, with `x` indicating a peg is present, and `o` indicating the peg is open:\n```\n o\n x x\n x x x\n x x x x\n x x x x x\n```\n\nThe the first move must be one of the following:\n1. Jump peg 4 over peg 2, removing peg 2 and filling hole 1, leaving holes 2 and 4 open\n```\n x\n o x\n o x x\n x x x x\n x x x x x\n```\n\n2. Jump peg 6 over peg 3, removing peg 3 and filling hole 1, leaving holes 3 and 6 open\n```\n x\n x o\n x x o\n x x x x\n x x x x x\n```\n\nAfter either of these moves, there will be 4 possible moves to jump one of the remaining pegs over another peg into one of the empty holes.\n\nPlay proceeds until there are no pegs adjacent to each other. The remaining pegs on the board are counted to create the final score, with a lower score being better.\n"
},
{
"alpha_fraction": 0.5429241061210632,
"alphanum_fraction": 0.5461471080780029,
"avg_line_length": 39.630950927734375,
"blob_id": "f3ddeb3188c51a827c2c609dcb921bd3b7835818",
"content_id": "7438bce3d78367e0ec8fc784d123557c5c9e7f18",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3413,
"license_type": "no_license",
"max_line_length": 182,
"num_lines": 84,
"path": "/src/pegs.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "try:\n from .PegPyramid import PegPyramid\nexcept ImportError:\n print(\"\\n{}: Try running `pegs` from the command line!!\\nor run with `python run_pegs.py` from root directory\\n\".format(__file__))\n\ndef main():\n indent = 3\n print(\"Running Pegs Game...\")\n pyramid = PegPyramid()\n print('\\n'\n 'Game board node names, but no pegs!!')\n print(pyramid.node_and_pegs_str(indent))\n \n ## Setup the game board\n valid_start_node = False\n while valid_start_node == False:\n start_node = input('\\nStart: Which node on left should be empty? ')\n valid_start_node = pyramid.setup_game_board(start_node)\n print('\\n'\n 'All Nodes but Start Node filled')\n print(pyramid.node_and_pegs_str(indent))\n \n ## Begin play\n remaining_moves = True\n \n ## Evaluate available moves\n while remaining_moves:\n remaining_moves = pyramid.valid_moves()\n ## If there are available moves, print them and have user select one\n if remaining_moves:\n print('\\nValid Remaining Moves:')\n for index, available_move in enumerate(remaining_moves):\n print(' Move #{}: {}'.format(index, available_move))\n print('')\n selected_move = None\n while selected_move == None:\n move_str = input('Which move will you make? ')\n try:\n move_index = int(move_str)\n if move_index < 0 or move_index > index:\n raise ValueError\n selected_move = remaining_moves[move_index]\n except:\n if len(remaining_moves) == 1:\n valid_range = '0'\n else:\n valid_range = '0 to {}'.format(len(remaining_moves) - 1)\n print('ERROR!! Invalid selection... must be {}!'.format(valid_range))\n continue\n # A valid move was picked, execute it\n pyramid.execute_jump_move(selected_move)\n print('\\n Peg in {} jumped to {}, removing {}'.format(selected_move.start_node().node_id(), selected_move.end_node().node_id(), selected_move.adjacent_node().node_id()))\n print('')\n print(pyramid.node_and_pegs_str(3))\n else:\n valid_moves_remain = False\n\n ## No more available moves, game is done!\n pegs = sum(node.peg() for node in pyramid.nodes().values())\n print('\\n'\n 'No moves available:')\n print('\\n'\n ' You finished the game with {} remaining pegs'.format(pegs))\n if pegs >= 4:\n print(' It takes someone special to leave that many pegs on the board!!')\n elif pegs == 3:\n print(' I can do that well with random moves!!')\n elif pegs == 2:\n print('\\n'\n ' You might be getting the hang of this!!\\n'\n ' But you can still do better...')\n elif pegs == 1:\n print('\\n'\n ' What? You solved it?!\\n'\n ' We worship the ground you walk on!!\\n'\n ' But can you do it again...')\n else:\n Exception('Not a possible outcome - someone cheated! (or someone didn\\'t program right...)')\n \n ## Pause for user to press enter, so that window will not disappear if run directly from *.exe\n input('\\n=== PRESS ENTER TO END GAME ===')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5582278370857239,
"alphanum_fraction": 0.5590717196464539,
"avg_line_length": 34.92424392700195,
"blob_id": "fa41604d0fae1b45caec3bdfec96749a2e152ccd",
"content_id": "2e884bbe16374bb186adb2df7b65803507abed75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2370,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 66,
"path": "/src/PegNode.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "try:\n from .PegNodeLink import PegNodeLink\nexcept ImportError:\n print(\"\\n{}: Try running `pegs` from the command line!!\\nor run with `python run_pegs.py` from root directory\\n\".format(__file__))\n\nclass PegNode:\n '''Create a new PegNode instance\n \n Arguments:\n parent: the parent that owns this node, a dict with {node_id: node} entries\n node_id: a unique key that identifies this PegNode\n node_id_str: a string that will be printed out for the node_id. This will be created from the default __str__() of the node_id if not provided\n '''\n def __init__(self, parent, node_id, node_id_str='', peg=False):\n self._node_id = node_id\n if node_id_str:\n self._node_id_str = node_id_str\n else:\n self._node_id_str = str(node_id)\n if not isinstance(self._node_id_str, str):\n raise ValueError('\"node_id_str\" (arg 3) must be a string, it was {}'.format(type(self._node_id_str)))\n self._parent = parent\n self._links = []\n # If peg arg evaluates to anything, set to True, else False\n self._peg = True if peg else False\n \n def peg(self):\n return self._peg\n \n def peg_str(self):\n return 'x' if self._peg else 'o'\n \n def set_peg(self):\n if self._peg:\n raise ValueError('Peg already present at Node {}, cannot add'.format(self.node_id()))\n else:\n self._peg = True\n \n def clear_peg(self):\n if self._peg:\n self._peg = False\n else:\n raise ValueError('No peg was present at Node {} to remove'.format(self.node_id()))\n \n def node_id(self):\n return self._node_id\n \n def node_id_str(self):\n return self._node_id_str\n \n def links(self):\n return self._links\n \n def add_link(self, adjacent_node, end_node):\n self._links.append(PegNodeLink(self, adjacent_node, end_node))\n \n def __str__(self):\n outstr = ('Node ID: {} (Type: {})\\n'\n 'Node ID String: \"{}\"\\n'\n 'Links:\\n'.format(self._node_id, type(self._node_id), self._node_id_str))\n if self._links:\n for index, link in enumerate(self._links):\n outstr += ' #{}: {}\\n'.format(index, link)\n else:\n outstr += ' None\\n'\n return outstr[:-1] # Strip last '\\n'"
},
{
"alpha_fraction": 0.5487581491470337,
"alphanum_fraction": 0.564260721206665,
"avg_line_length": 48.578514099121094,
"blob_id": "ab684dc3ae7a328670f4899c7ee6dd29de054951",
"content_id": "6c3ec42f4078b56127e0223515048d4d8467e64b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5999,
"license_type": "no_license",
"max_line_length": 250,
"num_lines": 121,
"path": "/tests/test_PegBoard.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom src.PegBoard import PegBoard\nfrom src.PegNode import PegNode\nfrom src.PegNodeLink import PegNodeLink\n\ndef fake_function():\n pass\n\[email protected](params=[1, 1.1, {1, 2, 3}, 'string', {1: 1, 2: '2'}, (1, '1'), fake_function])\ndef not_a_list(request):\n '''A test fixture \"not_a_list\" that can be used as an argument to supply parameterized test cases that have separate objects of different types that are not of type <list>\n '''\n return request.param\n\[email protected](params=[1, 1.1, {'1', '2', '3'}, ['1', '2', '3'], {'1': '1', '2': '2'}, ('1', '1'), fake_function])\ndef not_a_string(request):\n '''A test fixture \"not_a_string\" that provides multiple parameterized test cases with separate objects that are not of type <str>\n '''\n return request.param\n\nclass TestPegBoard:\n def test_basic_PegBoard_object_creation(self):\n '''Ensure that basic PegBoard object creation is working correctly. i.e. Node objects are created that return the correct node ids, and different methods of accessing them through the .nodes() and .node(node_id) functions work.\n '''\n node_ids = [1, 2]\n board = PegBoard(node_ids)\n nodes = board.nodes()\n assert len(nodes) == 2\n assert board.node(1).node_id() == 1\n assert board.node(2).node_id() == 2\n assert nodes[1] is board.node(1)\n assert nodes[2] is board.node(2)\n \n def test_format_string_outputs(self):\n '''test the creation of a format_str\n \n Whether a format string is working is tested by the outputs of .node_str(), .pegs_str(), .full_str(), and .node_and_pegs_str().\n '''\n node_ids = [1, 2, 3]\n board = PegBoard(node_ids)\n # Setup a format string in a pyramid\n test_str = (' {x[0]} \\n'\n '{x[1]} {x[2]}')\n board._format_str = test_str\n assert board.format_str() == test_str\n assert board.nodes_str() == (' 1 \\n'\n '2 3')\n assert board.nodes_str(indent=2) == (' 1 \\n'\n ' 2 3')\n assert board.pegs_str() == (' o \\n'\n 'o o')\n \n ## Set a peg in every position (switch from o's to x's)\n for node in board.nodes().values():\n node.set_peg()\n assert board.pegs_str() == (' x \\n'\n 'x x')\n assert board.pegs_str(1) == (' x \\n'\n ' x x')\n assert board.full_str() == (' 1:x \\n'\n '2:x 3:x')\n assert board.full_str(indent=5) == (' 1:x \\n'\n ' 2:x 3:x')\n assert board.node_and_pegs_str() == (' 1 x \\n'\n '2 3 x x')\n assert board.node_and_pegs_str(indent=3, space_between=0) == (' 1 x \\n'\n ' 2 3x x')\n def test_raises_ValueError_if_format_string_is_not_set(self):\n '''Ensure a ValueError is raised if the .format_str() function is called before the ._format_str property is set by the parent\n '''\n node_ids = [1, 2]\n board = PegBoard(node_ids)\n with pytest.raises(ValueError):\n board.format_str()\n \n def test_raises_ValueError_if_node_ids_or_node_ids_str_are_not_a_list(self, not_a_list):\n '''Ensure a ValueError is raised when creating a PegBoard with either a node_ids or node_ids_str argument that are not a list\n '''\n ## Test an node_ids that is not a list raises ValueError)\n with pytest.raises(ValueError):\n PegBoard(not_a_list)\n # convert the invalid node_ids into a list that is valid, either by adding each item in the object to a list, or adding the object directly as the only item in a list\n try:\n valid_node_ids = [ x for x in not_a_list ]\n except TypeError: # TypeError is thrown if node_ids is not iterable\n valid_node_ids = [ not_a_list ]\n # Use the valid node_ids value with a node_ids_str that is not a list, and ensure a ValueError is raised \n with pytest.raises(ValueError):\n PegBoard(valid_node_ids, node_ids_str=not_a_list)\n # Now test with both invalid\n with pytest.raises(ValueError):\n PegBoard(not_a_list, not_a_list)\n \n def test_raises_ValueError_if_arg_lengths_are_not_equal(self):\n '''Ensure a ValueError is raised if the node_ids and node_ids_str are both provided, and are both lists, but are not the same length\n '''\n node_ids = [1, 2, 3]\n node_ids_str = ['1', '2']\n with pytest.raises(ValueError):\n PegBoard(node_ids, node_ids_str=node_ids_str)\n \n def test_raises_ValueError_if_node_ids_str_arg_items_are_not_strings(self, not_a_string):\n '''Ensure a ValueError is raised if any item in the node_ids_str argument are not a string, when node_ids_str is provided and is a list (i.e. is not the empty string, which is default and will auto-create a list of strings from the node_ids).\n '''\n node_ids = [1, 2, 3]\n node_ids_str = ['1', '2', '3']\n # No Exception should be raised\n PegBoard(node_ids, node_ids_str=node_ids_str)\n \n # Set up three test lists with the current non-string in each of the 3 positions in the list then ensure it generates a ValueError\n node_ids_str_test1 = [not_a_string, '2', '3']\n with pytest.raises(ValueError):\n PegBoard(node_ids, node_ids_str=node_ids_str_test1)\n \n node_ids_str_test2 = ['1', not_a_string, '3']\n with pytest.raises(ValueError):\n PegBoard(node_ids, node_ids_str=node_ids_str_test2)\n \n node_ids_str_test3 = ['1', '2', not_a_string]\n with pytest.raises(ValueError):\n PegBoard(node_ids, node_ids_str=node_ids_str_test3)\n"
},
{
"alpha_fraction": 0.5417276620864868,
"alphanum_fraction": 0.5763787031173706,
"avg_line_length": 37.9523811340332,
"blob_id": "9c928d350e531b9802baf8cda78839c438afd0ba",
"content_id": "ac2cdd2cc7c48fded7f33656263fd6f2bf5c0b66",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4098,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 105,
"path": "/tests/test_PegNode.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom src.PegNode import PegNode\n\ndef fake_function():\n pass\n\n## supplies valid (node_id, node_id_str)\[email protected](params=[(1, '1'), ('string', 'string'), (1.1, '1.1')])\ndef valid_node_id_type(request):\n return request.param\n\n## supplies values that are not strings\[email protected](params=[1, 1.1, ['1.1'], {2, 1}, fake_function, ('abc', 'def')])\ndef not_a_string(request):\n return request.param\n\nclass TestPegNode:\n def test_init_sets_node_id_and_string(self, valid_node_id_type):\n (node_id, node_id_str) = valid_node_id_type\n node = PegNode(None, node_id, node_id_str)\n assert node.node_id() == node_id\n assert node.node_id_str() == node_id_str\n\n def test_generates_correct_node_id_string(self, valid_node_id_type):\n node_id, node_id_str = valid_node_id_type\n node = PegNode(None, node_id)\n assert node.node_id_str() == node_id_str\n \n def test_init_raises_valueerror_if_node_id_str_arg_is_not_str(self, not_a_string):\n with pytest.raises(ValueError):\n PegNode(None, 1, node_id_str=not_a_string)\n \n def test_init_peg_at_initialization(self):\n node_peg_false = PegNode(None, 1, peg=False)\n assert node_peg_false.peg() == False\n assert node_peg_false.peg_str() == 'o'\n node_peg_false2 = PegNode(None, 1, peg=0)\n assert node_peg_false2.peg() == False\n assert node_peg_false2.peg_str() == 'o'\n node_peg_true = PegNode(None, 2, peg=True)\n assert node_peg_true.peg() == True\n assert node_peg_true.peg_str() == 'x'\n node_peg_true2 = PegNode(None, 2, peg=9999)\n assert node_peg_true2.peg() == True\n assert node_peg_true2.peg_str() == 'x'\n \n def test_setting_and_removing_pegs(self):\n node = PegNode(None, 1)\n assert node.peg() == False\n # With peg not present, should have error clearing it\n with pytest.raises(ValueError):\n node.clear_peg()\n node.set_peg()\n assert node.peg() == True\n # With peg already present, should have error setting it\n with pytest.raises(ValueError):\n node.set_peg()\n node.clear_peg()\n assert node.peg() == False\n \n \n def test_add_links_to_node(self):\n nodes = {}\n ## Make 4 nodes that can be linked\n # will be in diamond pattern\n # 1 -> 2 -> 4\n # 1 -> 3 -> 4\n # and reverse from 4 to 1 on each path\n node1 = PegNode(nodes, 1)\n node2 = PegNode(nodes, 2)\n node3 = PegNode(nodes, 3)\n node4 = PegNode(nodes, 4)\n nodes.update({1: node1, 2: node2, 3: node3, 4: node4})\n node1.add_link(node2, node4)\n assert len(node1._links) == 1\n node1.add_link(node3, node4)\n assert len(node1._links) == 2\n node4.add_link(node2, node1)\n assert len(node4._links) == 1\n node4.add_link(node3, node1)\n assert len(node1._links) == 2\n assert len(node2._links) == 0\n assert len(node3._links) == 0\n assert len(node4._links) == 2\n assert node1._links[0]._start_node is node1\n assert node1._links[0]._adjacent_node is node2\n assert node1._links[0]._end_node is node4\n assert node1._links[1]._start_node is node1\n assert node1._links[1]._adjacent_node is node3\n assert node1._links[1]._end_node is node4\n assert node4._links[1]._start_node is node4\n assert node4._links[1]._adjacent_node is node3\n assert node4._links[1]._end_node is node1\n assert str(node1._links[0]) == '1->2->4'\n assert str(node1._links[1]) == '1->3->4'\n assert str(node4._links[0]) == '4->2->1' \n assert str(node4._links[1]) == '4->3->1'\n assert node1._links[0]._end_node._parent[1] is node1\n assert node1._links[0]._end_node._parent[4] is node4\n assert len(node4._parent) == 4\n assert len(node1._parent) == 4\n \n ## Test the .links() method\n links = node1.links()\n assert links is node1._links\n "
},
{
"alpha_fraction": 0.6337748169898987,
"alphanum_fraction": 0.6337748169898987,
"avg_line_length": 39.81081008911133,
"blob_id": "96f13c6319ef6fc88c36936222002c51ab946a07",
"content_id": "dabf07bb6d8707c3baf5642bc66b4548b2b97977",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1510,
"license_type": "no_license",
"max_line_length": 240,
"num_lines": 37,
"path": "/src/PegNodeLink.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "class PegNodeLink:\n '''PegNodeLink objects provide mapping of legal jumps from a PegNode\n \n When jumping, a PegNodeLink provides the start_node that a peg is currently located at, an adjacent_node that can be jumped over (if a peg is at that location), and an end_node (which must be empty) for the peg to land at after jumping.\n \n Arguments:\n start_node(PegNode): Beginning Node Position\n adjacent_node(PegNode): Adjacent Node that will be jumped over\n end_node(PegNode): Ending Node that will be jumped to\n '''\n def __init__(self, start_node, adjacent_node, end_node):\n from .PegNode import PegNode\n\n if isinstance(start_node, PegNode):\n self._start_node = start_node\n else:\n raise ValueError('start_node must be a PegNode instance')\n if isinstance(adjacent_node, PegNode):\n self._adjacent_node = adjacent_node\n else:\n raise ValueError('adjacent_node must be a PegNode instance')\n if isinstance(end_node, PegNode):\n self._end_node = end_node\n else:\n raise ValueError('end_node must be a PegNode instance')\n \n def start_node(self):\n return self._start_node\n \n def adjacent_node(self):\n return self._adjacent_node\n \n def end_node(self):\n return self._end_node\n \n def __str__(self):\n return '{}->{}->{}'.format(self._start_node.node_id_str(), self._adjacent_node.node_id_str(), self._end_node.node_id_str())\n"
},
{
"alpha_fraction": 0.5052316784858704,
"alphanum_fraction": 0.695067286491394,
"avg_line_length": 16.605262756347656,
"blob_id": "6e1c0fc6ab71e08ff1b423350a710ebd31134da0",
"content_id": "1b11f3d2bd028abd84fe3e833a2368a2c18cf4af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 669,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 38,
"path": "/requirements.txt",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "altgraph==0.17\nastroid==2.3.3\natomicwrites==1.3.0\nattrs==19.3.0\nbackcall==0.1.0\ncolorama==0.4.3\ndecorator==4.4.2\nfuture==0.18.2\nimportlib-metadata==1.5.0\nipython==7.13.0\nipython-genutils==0.2.0\nisort==4.3.21\njedi==0.16.0\nlazy-object-proxy==1.4.3\nmccabe==0.6.1\nmore-itertools==8.2.0\n-e [email protected]:LightCC/OpenPegs.git@b6d2ced4ace7a19c035c82a3995d56873e73379d#egg=OpenPegs\npackaging==20.3\nparso==0.6.2\npefile==2019.4.18\npickleshare==0.7.5\npluggy==0.13.1\nprompt-toolkit==3.0.4\npy==1.8.1\nPygments==2.6.1\nPyInstaller==3.6\npylint==2.4.4\npyparsing==2.4.6\npytest==5.3.5\npywin32-ctypes==0.2.0\nrope==0.16.0\nsix==1.14.0\nsty==1.0.0b12\ntraitlets==4.3.3\ntyped-ast==1.4.1\nwcwidth==0.1.8\nwrapt==1.11.2\nzipp==3.1.0\n"
},
{
"alpha_fraction": 0.539880633354187,
"alphanum_fraction": 0.5691806674003601,
"avg_line_length": 33.14814758300781,
"blob_id": "0c20082a81fd2761df5e8b0bbfa88f3358d9da6c",
"content_id": "4cb98079883033fe398c1f86c168963a8438a426",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1843,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 54,
"path": "/tests/test_PegNodeLink.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom src.PegNodeLink import PegNodeLink\nfrom src.PegNode import PegNode\n\ndef fake_function():\n pass\n\n## supply a node_arg for instantiating PegNodeLink objects\n# that is not a PegNode object to trigger a ValueError exception\[email protected](params=[None,\n 1,\n 'string',\n 1.1,\n ['list', 1],\n {1: 'dict'},\n {'set', 1, 2},\n fake_function\n ])\ndef node_arg(request):\n return request.param\n\nclass TestPegNodeLink:\n def test_init_raises_valueerror_if_arg_is_not_PegNode(self, node_arg):\n node1 = PegNode(None, 1)\n # No Exception should be raised\n PegNodeLink(node1, node1, node1)\n # Ensure bad first arg raises ValueError\n with pytest.raises(ValueError):\n PegNodeLink(node_arg, node1, node1)\n # Ensure bad second arg raises ValueError\n with pytest.raises(ValueError):\n PegNodeLink(node1, node_arg, node1)\n # Ensure bad third arg raises ValueError\n with pytest.raises(ValueError):\n PegNodeLink(node1, node1, node_arg)\n \n def test_returning_all_nodes(self):\n # Setup\n node1 = PegNode(None, 1)\n node2 = PegNode(None, 2)\n node3 = PegNode(None, 3)\n link = PegNodeLink(node1, node2, node3)\n \n # Test node access methods\n assert link.start_node() is node1\n assert link.adjacent_node() is node2\n assert link.end_node() is node3\n \n def test_PegNodeLink_string_output(self):\n node1 = PegNode(None, 1)\n node2 = PegNode(None, '2')\n node3 = PegNode(None, 3.14159)\n link1 = PegNodeLink(node1, node2, node3)\n assert str(link1) == '1->2->3.14159'"
},
{
"alpha_fraction": 0.5081997513771057,
"alphanum_fraction": 0.5409986972808838,
"avg_line_length": 45.38461685180664,
"blob_id": "12a6679eb5ad046e93b8f5d1e3ee3c3307cf746c",
"content_id": "78e20e40669ef8a2c6b41895033c6bed381c8171",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5427,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 117,
"path": "/src/PegPyramid.py",
"repo_name": "LightCC/OpenPegs",
"src_encoding": "UTF-8",
"text": "try:\n from .PegNode import PegNode\n from .PegBoard import PegBoard\nexcept ImportError:\n print(\"\\n{}: Try running `pegs` from the command line!!\\nor run with `python run_pegs.py` from root directory\\n\".format(__file__))\n \nclass PegPyramid(PegBoard):\n def __init__(self):\n node_ids = list(range(1, 16))\n node_ids_str = [ '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' ]\n nodes = {}\n node_dict = { node_id: PegNode(nodes, node_id, node_ids_str[index]) for index, node_id in enumerate(node_ids) }\n nodes.update(node_dict)\n super().__init__(node_ids, node_ids_str)\n self._rows = [\n [ nodes[1] ],\n [ nodes[2], nodes[3] ],\n [ nodes[4], nodes[5], nodes[6] ],\n [ nodes[7], nodes[8], nodes[9], nodes[10] ],\n [ nodes[11], nodes[12], nodes[13], nodes[14], nodes[15] ]\n ]\n self._setup_links()\n self._format_str = self._create_format_str()\n \n def _create_format_str(self):\n ## Create a dict of rows with their lengths\n row_lengths = [ len(row) for row in self._rows ]\n max_nodes_in_row = max(row_lengths)\n ## Now create a string for each row and combine them\n rows = []\n for row in self._rows:\n # Center each row by adding spaces\n row_center_spacing = ' ' * (max_nodes_in_row - len(row))\n rowstr = row_center_spacing\n for node in row:\n node_index = self._node_ids.index(node.node_id())\n rowstr += '{{x[{node_index}]}} '.format(node_index=node_index)\n rowstr += row_center_spacing\n # rowstr will have one extra space at the end from the loop, strip one off\n rows.append(rowstr[:-1])\n # Remove the final '\\n' from outstr\n return '\\n'.join(rows)\n \n def _setup_links(self):\n self._create_link_by_id(1, 2, 4)\n self._create_link_by_id(1, 3, 6)\n self._create_link_by_id(2, 4, 7)\n self._create_link_by_id(2, 5, 9)\n self._create_link_by_id(3, 5, 8)\n self._create_link_by_id(3, 6, 10)\n self._create_link_by_id(4, 2, 1)\n self._create_link_by_id(4, 5, 6)\n self._create_link_by_id(4, 7, 11)\n self._create_link_by_id(4, 8, 13)\n self._create_link_by_id(5, 8, 12)\n self._create_link_by_id(5, 9, 14)\n self._create_link_by_id(6, 3, 1)\n self._create_link_by_id(6, 5, 4)\n self._create_link_by_id(6, 9, 13)\n self._create_link_by_id(6, 10, 15)\n self._create_link_by_id(7, 4, 2)\n self._create_link_by_id(7, 8, 9)\n self._create_link_by_id(8, 5, 3)\n self._create_link_by_id(8, 9, 10)\n self._create_link_by_id(9, 5, 2)\n self._create_link_by_id(9, 8, 7)\n self._create_link_by_id(10, 6, 3)\n self._create_link_by_id(10, 9, 8)\n self._create_link_by_id(11, 7, 4)\n self._create_link_by_id(11, 12, 13)\n self._create_link_by_id(12, 8, 5)\n self._create_link_by_id(12, 13, 14)\n self._create_link_by_id(13, 8, 4)\n self._create_link_by_id(13, 9, 6)\n self._create_link_by_id(13, 12, 11)\n self._create_link_by_id(13, 14, 15)\n self._create_link_by_id(14, 9, 5)\n self._create_link_by_id(14, 13, 12)\n self._create_link_by_id(15, 10, 6)\n self._create_link_by_id(15, 14, 13)\n \n def _create_link_by_id(self, start_node_id, adjacent_node_id, end_node_id):\n self._nodes[start_node_id].add_link(self.node(adjacent_node_id), self.node(end_node_id))\n\n def setup_game_board(self, start_node_id_str):\n if start_node_id_str in self._node_ids_str:\n for node in self._nodes.values():\n if start_node_id_str != node.node_id_str():\n node.set_peg()\n return True\n else: # the node_id_str passed in was not found\n return False\n \n def valid_moves(self):\n moves = []\n for node in self._nodes.values():\n for link in node.links():\n if self.link_has_valid_jump(link):\n moves.append(link)\n return moves\n \n def link_has_valid_jump(self, link):\n # If start node has a peg, and adjacent node has a peg to jump, and end node is empty to land, then link is valid for a jump\n return all( [link.start_node().peg(), link.adjacent_node().peg(), not link.end_node().peg()] )\n \n def execute_jump_move(self, link):\n if self.link_has_valid_jump(link):\n link.adjacent_node().clear_peg() # Jump over here and remove peg from board\n link.start_node().clear_peg() # Jump from here, peg moves\n link.end_node().set_peg() # peg lands here and fills the spot \n else:\n if not link.start_node().peg():\n raise ValueError('Link {} is not valid - No peg to jump with in start node {}'.format(link, link.start_node().node_id_str))\n elif not link.adjacent_node().peg():\n raise ValueError('Link {} is not valid - No peg to jump over in adjacent node {}'.format(link, link.adjacent_node().node_id_str))\n if link.end_node().peg():\n raise ValueError('Link {} is not valid - Peg already present in end node {}'.format(link, link.end_node().node_id_str))\n"
}
] | 10 |
pujaarajan/InsightDataScience | https://github.com/pujaarajan/InsightDataScience | 4422da4586d7cd8ae8db663530320adc2134147b | 8b74ce751c9dbf30d192aec23addace29d2cec6d | 38f8251bdcf3dbe31be7ed1e7e9f3ada87e03703 | refs/heads/master | 2020-04-03T04:55:32.138186 | 2018-10-30T20:01:18 | 2018-10-30T20:01:18 | 155,028,318 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6372239589691162,
"alphanum_fraction": 0.6451104283332825,
"avg_line_length": 43.64788818359375,
"blob_id": "1c4c43705b0a751a716e60bb8baab4cd4fa42865",
"content_id": "679f69997f3b6dcb912c80dcd3abf2b2798bf863",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3171,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 71,
"path": "/src/data.py",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"Functions to read and write H1B data\"\"\"\n\nimport logging\nimport csv\nfrom collections import Counter, OrderedDict\n\n__author__ = \"Pujaa Rajan\"\n__email__ = \"[email protected]\"\n\n\ndef read_h1b_data(args):\n \"\"\"\n Read H1B data and count the number of certified applications for the input column group\n @param args:\n @return Dictionary of H1B data where the keys are the input column and the values are the counts. This dictionary is\n sorted by decreasing frequency and then alphabetically:\n \"\"\"\n log = logging.getLogger('H1B_data_analysis.data.read_h1b_data')\n log.info('Beginning to read H1B data from {args.input_file}')\n\n try:\n certified_count = Counter()\n\n with open(args.input_file, newline='') as input_file:\n log.info('Opened H1B data input file')\n reader = csv.DictReader(input_file, delimiter=args.delimiter, quoting=csv.QUOTE_MINIMAL, skipinitialspace=True,\n escapechar='\\\\')\n for line in reader:\n if line[args.status_column] == 'CERTIFIED':\n group_certified = line[args.input_column]\n # Increase count by one for each person in the column group who was certified\n certified_count[group_certified] += 1\n log.info('Finished reading in H1B data input file')\n log.info('Sorting the list firstÒ by decreasing frequency and then alphabetically')\n sorted_counts = OrderedDict(sorted(certified_count.items(), key=lambda item: (-item[1], item[0])))\n log.info('Finished counting certified H1B applications')\n return sorted_counts\n except Exception as error:\n log.exception(\n 'Error when reading H1B data and counting the number of certified applications for the input column group!\\nQuitting now.')\n quit()\n\n\ndef write_h1b_data(args, top_certified, total_certified):\n \"\"\"\n Write the H1B data analysis to output files\n @param args:\n @param top_certified:\n @param total_certified:\n @return Nothing is returned. You can find the H1B data analysis is in the output files.:\n \"\"\"\n\n try:\n log = logging.getLogger('H1B_data_analysis.data.write_h1b_data')\n log.info('Beginning to write H1B data output file to {args.output_file}')\n with open(args.output_file, 'w', newline='') as output_file:\n log.info('Opened H1B data output file')\n fieldnames = [args.output_column, 'NUMBER_CERTIFIED_APPLICATIONS', 'PERCENTAGE']\n writer = csv.DictWriter(output_file, fieldnames=fieldnames, delimiter=args.delimiter, skipinitialspace=True)\n writer.writeheader()\n for key, value in top_certified:\n writer.writerow({args.output_column: key, 'NUMBER_CERTIFIED_APPLICATIONS': value,\n 'PERCENTAGE': '{:.1%}'.format(value / total_certified)})\n log.info('Finished writing H1B data output file to {args.output_file}')\n except Exception as error:\n log.exception(\n 'Error when writing the H1B data analysis to output files!\\nQuitting now.')\n quit()\n"
},
{
"alpha_fraction": 0.7149631381034851,
"alphanum_fraction": 0.7260273694992065,
"avg_line_length": 37.73469543457031,
"blob_id": "359b6126a95902b0b27ea76eca800ce33a3f98c6",
"content_id": "8becd9bda2906a1adc56ea952c65db5ca2848e7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1898,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 49,
"path": "/src/analysis.py",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"Functions to analyze H1B data\n\nCurrently, the following data analysis functions are supported:\n1. Extract the top N number of certified H1B applications for the input column group\n2. Counting the total number of certified H1B applications in the H1B data input file\n\n\"\"\"\n\nimport logging\nfrom collections import Counter, OrderedDict\n\n__author__ = \"Pujaa Rajan\"\n__email__ = \"[email protected]\"\n\n\ndef count_top_certified(args, certified_count):\n \"\"\"\n Extract the top N number of certified H1B applications for the input column group\n @param certified_count:\n @param arguments:\n @return the top N number of certified H1B applications for the input column group sorted by decreasing frequency and\n then alphabetically:\n \"\"\"\n log = logging.getLogger('H1B_data_analysis.analysis.count_top_certified')\n log.info('Counting the top N number of certified H1B applications for the input column group')\n try:\n return Counter(certified_count).most_common(args.top_n)\n except Exception as error:\n log.exception('Error when extracting the top N number of certified H1B applications for the input column group!\\nQuitting now.')\n quit()\n\n\ndef count_total_certified(certified_count):\n \"\"\"\n Count the total number of certified H1B applications in the H1B data input file\n @param certified_count:\n @return the total number of certified H1B applications in the H1B data input file as an integer:\n \"\"\"\n log = logging.getLogger('H1B_data_analysis.analysis.count_total_certified')\n log.info('Counting the total number of certified H1B applications in the H1B data input file')\n\n try:\n return sum(certified_count.values())\n except Exception as error:\n log.exception('Error when counting the total number of certified H1B applications in the H1B data input file!\\nQuitting now.')\n quit()\n"
},
{
"alpha_fraction": 0.7391048073768616,
"alphanum_fraction": 0.7616523504257202,
"avg_line_length": 77.19078826904297,
"blob_id": "d02d2c5b47ba8aa06986fb5a1aa8ba3aba53d289",
"content_id": "f5c7e04346d06b23561d3343f647597986fd416e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 11888,
"license_type": "no_license",
"max_line_length": 772,
"num_lines": 152,
"path": "/README.md",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "# Insight Data Engineering Fellows Assignment\n# H1B Data Analysis\n\n## Table of Contents\n1. [Problem](README.md#problem)\n2. [Approach](README.md#approach)\n3. [Run](README.md#Run)\n4. [Input Data](README.md#input-data)\n5. [Output Data](README.md#output-data)\n6. [Tests](README.md#tests)\n7. [Future Work](README.md#future-work)\n8. [Questions?](README.md#questions?)\n\n## Problem\n\nA newspaper editor was researching immigration data trends on H1B(H-1B, H-1B1, E-3) visa application processing over the past years, trying to identify the occupations and states with the most number of approved H1B visas. She has found statistics available from the US Department of Labor and its [Office of Foreign Labor Certification Performance Data](https://www.foreignlaborcert.doleta.gov/performancedata.cfm#dis). But while there are ready-made reports for [2018](https://www.foreignlaborcert.doleta.gov/pdf/PerformanceData/2018/H-1B_Selected_Statistics_FY2018_Q4.pdf) and [2017](https://www.foreignlaborcert.doleta.gov/pdf/PerformanceData/2017/H-1B_Selected_Statistics_FY2017.pdf), the site doesn’t have them for past years. \n\nThe problem I am trying to solve is:\n1. To create a mechanism to analyze past years data, specifically calculate two metrics: **Top 10 Occupations** and **Top 10 States** for **certified** visa applications.\n2. To reuse the mechanism to analyze data for the year 2019 without needing to change the code.\n\nYou can read more about my solution here in [Approach](README.md#approach).\n\nRequirements:\n2. The README.md should include Problem, Approach, and Run sections.\n4. Project follows specified directory structure.\n1. The code is modular and reusable for future.\n3. The code scales for a large amount of data.\n5. Code is clean, well-tested, well-documented, and well-commented.\n6. The output should only have 10 lines, even when there are ties in the count.\n7. The output should have less than 10 lines if there are fewer than 10 groups of data.\n\n## Approach\n\nWhile each of these steps are in their respective files noted below, the functions are called in this order in h1b_counting.py. i did this because it makes the code more modular, and easy to add to. Using this structure, if someone wanted to add the capability to read in additional data sets, they can add it to the data.py file. Or if they wanted to add more analyses, then they can add it to analysis.py. Here are the steps of my approach:\n\n**1. Set up logging and parse input arguments. See [utilities.py](https://github.com/pujaarajan/InsightDataScience/blob/master/src/analysis.py).**\n\nFirst, I set up useful utilities, including logging and an argument parser, for analyzing H1B data. It is my personal preference to log every step. You see the logging printed to the console and printed to the H1B_data_analysis.log in the main project directory.\n\nThe required and optional input arguments are listed below followed by their rationale.\n\n| Input Parameter | Description | Example | Required? | Default |\n| --- | --- | --- | --- | --- |\n| --input_file | Input file full path | ./h1b_input.csv | Yes | N/A |\n| --output_file | Output file full path | ./top_10_occupations.csv | Yes | N/A |\n| --input_column | Return the top n categories (e.g. occupations, states) with the most certified visa applications | ./top_10_occupations.csv | Yes | N/A |\n| --status_column | Input file status column to count number certified | STATUS or CASE_STATUS | Yes | N/A |\n| --output_column | Input file column to group by and analyze | LCA_CASE_SOC_NAME | Yes | N/A |\n| --top_n | Output file column header name | 100 | No | 10 |\n| --delimiter | Delimiter of the input and output file used when reading and writing files | , | No | ; |\n\nI added this because I don't believe it is good practice to hardcode integer values. With this additional argument, we can return any number of results. If you don't specify a value, then it defaults to 10.\ne. - In the future, if we want to read and write using different delimiters, we could add an additional parameter for this. I added this as an optional input argument in case future H1B data files use a different delimiter.\n\nThe column name inputs, top_n, and delimiter options allow the code to be reusable and used year after year even with different file formats. For example, status is not hardcoded. This column is crucial for the analysis because we need this column to know if a person was certified or not. This column was also named differently in the 2014 and the 2015 data sets. Based on that pattern, I extracted the name using regex to just grab the column that says status in it anywhere. All the columns are user inputs because column names can change year to year. There's evidence of this in the provided sample input files in the google drive above. I considered using regex to search for the correct column, but having these be user inputs, ensures the right column is selected.\n\n**2. Read input data and count the number of certified applications for the input column group. See [data.py](README_GIVEN.md#data.py)**\n\nI created a dictionary of H1B data where the keys are the input column and the values are the counts. This dictionary is sorted by decreasing frequency and then alphabetically.\n\n**3. Perform data analysis. See [analysis.py](https://github.com/pujaarajan/InsightDataScience/blob/master/src/analysis.py).**\n\nCurrently, the following data analysis functions are supported:\n1. count_top_certified - Extract the top N number of certified H1B applications for the input column group\n2. count_total_certified - Counting the total number of certified H1B applications in the H1B data input file\n\n**4. Write data. See [data.py](https://github.com/pujaarajan/InsightDataScience/blob/master/src/data.py).**\n\nWrite the H1B data analysis to output files.\n\n## Run\n\n1. Clone Github repo. `git clone https://github.com/pujaarajan/h1b_statistics.git`\n2. Change directory to project folder. `cd h1b_statistics`\n3. Run the help command to understand the input arguments: `python3 ./src/h1b_counting.py --help`\n4. Run this command after replacing variables. See below for example commands: `python3 ./src/h1b_counting.py --input_file <var> --output_file <var> --top_n <var> --input_column <var> --status_column <var> --output_column <var>`\n\n### Example Run Command using the data in the input folder\n```shell\npython3 ./src/h1b_counting.py --input_file ./input/h1b_input.csv --output_file ./output/top_10_states.txt --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\npython3 ./src/h1b_counting.py --input_file ./input/h1b_input.csv --output_file ./output/top_10_occupations.txt --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\n```\n\n### Run Commands for 2014 Input Data\n```shell\npython3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2014.csv --output_file ./output/top_10_occupations.txt --input_column LCA_CASE_SOC_NAME --status_column STATUS --output_column TOP_OCCUPATIONS\npython3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2014.csv --output_file ./output/top_10_states.txt --input_column LCA_CASE_WORKLOC1_STATE --status_column STATUS --output_column TOP_STATES\n```\n\n### Run Commands for 2015 Input Data\n```shell\npython3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2015.csv --output_file ./output/top_10_occupations.txt --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\npython3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2015.csv --output_file ./output/top_10_states.txt --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\n```\n\n### Run Commands for 2016 Input Data\n```shell\npython3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2016.csv --output_file ./output/top_10_occupations.txt --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\npython3 ./src/h1b_counting.py --input_file ./h1b_statistics/input/H1B_FY_2016.csv --output_file ./output/top_10_states.txt --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\n```\n\n## Input Data\n\nRaw data could be found [here](https://www.foreignlaborcert.doleta.gov/performancedata.cfm) under the __Disclosure Data__ tab (i.e., files listed in the __Disclosure File__ column with \".xlsx\" extension). I used the converted the Excel files into a semicolon separated (\";\") format and placed them into this Google drive [folder](https://drive.google.com/drive/folders/1Nti6ClUfibsXSQw5PUIWfVGSIrpuwyxf?usp=sharing). I tested your code on the files provided on the Google drive an\n\nI also created 6 extra test data files you will see in the insight_testsuite. Each year of data can have different columns, so I made sure to test that my program could handle that in my test data. It's for this reason, column names are required as inputs to the program.\n\n## Output Data\n\nMy program currently creates 2 output files:\n* `top_10_occupations.txt`: Top 10 occupations for certified visa applications\n* `top_10_states.txt`: Top 10 states for certified visa applications\n\nEach line of the `top_10_occupations.txt` file contains fields in this order:\n1. __`TOP_OCCUPATIONS`__: Occupation name associated with an application's Standard Occupational Classification (SOC) code\n2. __`NUMBER_CERTIFIED_APPLICATIONS`__: Number of applications that have been certified for that occupation. An application is considered certified if it has a case status of `Certified`\n3. __`PERCENTAGE`__: % of applications that have been certified for that occupation compared to total number of certified applications regardless of occupation. \n\nEach line of the `top_10_states.txt` file contains these fields in this order:\n1. __`TOP_STATES`__: State where the work will take place\n2. __`NUMBER_CERTIFIED_APPLICATIONS`__: Number of applications that have been certified for work in that state. An application is considered certified if it has a case status of `Certified`\n3. __`PERCENTAGE`__: % of applications that have been certified in that state compared to total number of certified applications regardless of state.\n\nEach line holds one record and each field on each line is separated by a semicolon (;) by default, but you can change the delimiter used for reading and writing using the optional delimiter input argument.\nPercentages are rounded off to 1 decimal place. For instance, 1.05% should be rounded to 1.1% and 1.04% should be rounded to 1.0%. Also, 1% should be represented by 1.0%\nThe records in both files are sorted by __`NUMBER_CERTIFIED_APPLICATIONS`__ field, and in case of a tie, alphabetically by __`TOP_XXXXX`__. \nThere, however, should not be more than 10 lines in each file. In case of ties, only list the top 10 based on the sorting instructions given above. This is tested by test_1.\nDepending on the input, there may be fewer than 10 lines in each file. This is tested by test_2.\n\n## Tests\n\nI created 4 tests to test a variety of cases described below. I found a bug in run_tests.sh. See run_tests.sh lines 65-67.\n\n1. test_1 - Given\n2. your-own-test-1 - Tests if # output rows < 10 when # input data rows < 10\n3. your-own-test-2 - Tests what happens if the input file only has a header row with no data\n4. your-own-test-3 - Tests what happens if input file is empty\n\nI also manually tested the code using the H1B_FY_2014.csv, H1B_FY_2015.csv, and H1B_FY_2016.csv from the Google Drive. See Run section for commands.\n\n## Future Work\n\nIf I had more time, then I would work on the following:\n\n1. Add a debug mode. I currently log everything on all runs which makes the console output lengthy, and sometimes hard to follow. \n2. I would log the number of lines in each file that's read in and sample outputs in the console to help debug.\n3. I would create multiple run options so that you can run multiple analysis (e.g. occupations, states) at the same time.\n\n## Questions\n\nContact Pujaa Rajan and [email protected]. \n"
},
{
"alpha_fraction": 0.692074716091156,
"alphanum_fraction": 0.7405350804328918,
"avg_line_length": 65.03333282470703,
"blob_id": "4ea8983cacd7da9cd8123dc3f74ad49c4e6f460a",
"content_id": "f61d60c0c895f90f3c97b3b15d081f7d2e77cda2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1981,
"license_type": "no_license",
"max_line_length": 219,
"num_lines": 30,
"path": "/run.sh",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n#\n# Use this shell script to compile (if necessary) your code and then execute it. Below is an example of what might be found in this file if your program was written in Python\n#\n\n#python3 ./src/h1b_counting.py --help\n\n# Test Data\n\npython3 ./src/h1b_counting.py --input_file ./input/h1b_input.csv --output_file ./output/top_10_occupations.txt --top_n 10 --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\npython3 ./src/h1b_counting.py --input_file ./input/h1b_input.csv --output_file ./output/top_10_states.txt --top_n 10 --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\n\n# 2014 Input Data\n\n#python3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2014.csv --output_file ./output/top_10_occupations.txt --top_n 10 --input_column LCA_CASE_SOC_NAME --status_column STATUS --output_column TOP_OCCUPATIONS\n\n#python3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2014.csv --output_file ./output/top_10_states.txt --top_n 10 --input_column LCA_CASE_WORKLOC1_STATE --status_column STATUS --output_column TOP_STATES\n\n\n# 2015 Input Data\n\n#python3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2015.csv --output_file ./output/top_10_occupations.txt --top_n 10 --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\n\n#python3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2015.csv --output_file ./output/top_10_states.txt --top_n 10 --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\n\n# 2016 Input Data\n\n#python3 ./src/h1b_counting.py --input_file ./input/H1B_FY_2016.csv --output_file ./output/top_10_occupations.txt --top_n 10 --input_column SOC_NAME --status_column CASE_STATUS --output_column TOP_OCCUPATIONS\n\n#python3 ./src/h1b_counting.py --input_file ./h1b_statistics/input/H1B_FY_2016.csv --output_file ./output/top_10_states.txt --top_n 10 --input_column WORKSITE_STATE --status_column CASE_STATUS --output_column TOP_STATES\n"
},
{
"alpha_fraction": 0.665847659111023,
"alphanum_fraction": 0.6793611645698547,
"avg_line_length": 25.25806427001953,
"blob_id": "6e98e277560bbdc5328c083328139994e901bc78",
"content_id": "13572886b7cbb41145c63ab921bc2887444e63b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 814,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 31,
"path": "/src/h1b_counting.py",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"Main file for analyzing H1B data\"\"\"\n\nimport utilities\nimport logging\nimport data\nimport analysis\nimport os\n\n__author__ = \"Pujaa Rajan\"\n__email__ = \"[email protected]\"\n\nif __name__ == '__main__':\n utilities.logger()\n log = logging.getLogger('H1B_data_analysis.main')\n\n log.info('Beginning to run H1B data analysis code')\n input_args = utilities.parse_arguments()\n\n h1b_data = data.read_h1b_data(input_args)\n\n top_certified = analysis.count_top_certified(input_args, h1b_data)\n total_certified = analysis.count_total_certified(h1b_data)\n\n data.write_h1b_data(input_args, top_certified, total_certified)\n\n log.info('Finished running H1B data analysis code')\n #log.info(f'See log file here: {os.getcwd()}/H1B_data_analysis.log')\n log.info(\"Exiting\")\n"
},
{
"alpha_fraction": 0.661497175693512,
"alphanum_fraction": 0.6644795536994934,
"avg_line_length": 41.44303894042969,
"blob_id": "288ec4356f80feb0809190f2e3bd50d06f38aaec",
"content_id": "1272234964a4f40bf18707defc7096ef19f10ef6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3353,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 79,
"path": "/src/utilities.py",
"repo_name": "pujaarajan/InsightDataScience",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"Utilities for analyzing H1B data\"\"\"\n\nimport argparse\nimport logging\nimport sys\n\n__author__ = \"Pujaa Rajan\"\n__email__ = \"[email protected]\"\n\n\ndef logger():\n \"\"\"\n Create and format logger that logs to file and console\n @return None:\n \"\"\"\n logger = logging.getLogger('H1B_data_analysis')\n logger.setLevel(logging.DEBUG)\n # create file handler which logs even debug messages\n fh = logging.FileHandler('H1B_data_analysis.log')\n fh.setLevel(logging.DEBUG)\n # create console handler with the same log level\n ch = logging.StreamHandler(sys.stdout)\n ch.setLevel(logging.DEBUG)\n # create formatter and add it to the handlers\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n fh.setFormatter(formatter)\n ch.setFormatter(formatter)\n # add the handlers to the logger\n logger.addHandler(fh)\n logger.addHandler(ch)\n logger.info('Finished creating logger')\n\n\ndef parse_arguments():\n \"\"\"\n Define and parse input arguments\n @return: Input arguments\n \"\"\"\n log = logging.getLogger('H1B_data_analysis.utilities.parse_arguments')\n log.info(\"Beginning to parse input arguments\")\n\n parser = argparse.ArgumentParser(\n description='Return the top 10 categories (e.g. occupations, states) with the most certified visa applications')\n\n parser.add_argument('--input_file', required=True, action=\"store\", default=None, help='Input file full path')\n parser.add_argument('--output_file', required=True, action='store', default=None, help='Output file full path')\n parser.add_argument('--input_column', required=True, action=\"store\", default=None,\n help='Input file column to group by and analyze (e.g. LCA_CASE_SOC_NAME, LCA_CASE_WORKLOC1_STATE)')\n parser.add_argument('--status_column', required=True, action=\"store\", default=None,\n help='Input file status column to count number certified (e.g. STATUS, CASE_STATUS)')\n parser.add_argument('--output_column', required=True, action=\"store\", default=None,\n help='Output file column header name (e.g. TOP_STATES, TOP_OCCUPATIONS')\n parser.add_argument('--top_n', required=False, action='store', default=10, type=int,\n help='Return the top n categories (e.g. occupations, states) with the most certified visa applications')\n parser.add_argument('--delimiter', required=False, action='store', default=';',\n help='Delimiter of the input and output file (used when reading and writing files)')\n\n try:\n input_arguments = parser.parse_args()\n\n log.info('Reading input arguments')\n log.info('Input file:{input_arguments.input_file}')\n log.info('Output file: {input_arguments.output_file}')\n log.info('Top N rows: {input_arguments.top_n}')\n log.info('Input column: {input_arguments.input_column}')\n log.info('Status column: {input_arguments.status_column}')\n log.info('Output column: {input_arguments.output_column}')\n log.info('Finished reading input arguments')\n\n return input_arguments\n\n except Exception as error:\n log.exception('Error when parsing input arguments!\\nQuitting now.')\n parser.error(str(error))\n log.error('Quitting')\n quit()\n"
}
] | 6 |
solucoeswebmarcio/textCrawler | https://github.com/solucoeswebmarcio/textCrawler | d6aa1c708e9e33477a708d87472c3c6de40032fa | b2025632f82e94fcf5626ec3d4208adf640579d8 | 4326ef52e2891889ae81ca2ed6240f2ca5d07838 | refs/heads/master | 2020-04-08T06:42:23.264624 | 2018-11-16T10:05:45 | 2018-11-16T10:05:45 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7637426853179932,
"alphanum_fraction": 0.7754386067390442,
"avg_line_length": 24.787878036499023,
"blob_id": "dcf8f816c740644def82e9e79e318caa429e6ebd",
"content_id": "c04c22682c160c89f5b29ace20f039fc76ffd170",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 855,
"license_type": "no_license",
"max_line_length": 176,
"num_lines": 33,
"path": "/README.md",
"repo_name": "solucoeswebmarcio/textCrawler",
"src_encoding": "UTF-8",
"text": "# textCrawler\n\nInstallation\n\n1. Download and install anaconda\nhttps://www.anaconda.com/download/\n\n2. Clone this repository\n$ git clone https://github.com/rvitorgomes/textCrawler folderName\n\n3. Go to the created folder and check if the requirements are installed\n$ cd folderName\n$ python --version\n$ conda --version\n\n4. Create and activate a new conda environment\n$ conda create -n crawler\n$ activate crawler\n\n4. Para linux\n$ source activate crawler\n\n5. Install the packages\n$ conda install scrapy\n\n6. Check if you have the latest WebDriver for Firefox (geckodriver.exe) inside the project root, otherwise you can download from https://github.com/mozilla/geckodriver/releases\n\n7. Check if you have a output file inside the project root (data.txt)\n\n8. Run some crawler and watch out the magic\n$ scrapy runspider tcc/italian.py\n\n9. Check the output file\n\n\n\n\n"
},
{
"alpha_fraction": 0.7291666865348816,
"alphanum_fraction": 0.75,
"avg_line_length": 31.33333396911621,
"blob_id": "79dd18c2d0290447e612cbab8b1e3c24b42fca0c",
"content_id": "5b58c3d3f119988e1e27de820087b4423faaa3d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 96,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 3,
"path": "/tcc/notebooks/start.sh",
"repo_name": "solucoeswebmarcio/textCrawler",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# encoding: utf-8\nPYTHONIOENCODING=\"utf-8\" PYTHONPATH=../ jupyter notebook"
}
] | 2 |
m0hamed-alaa/CarND-Behavioral-Cloning-P4 | https://github.com/m0hamed-alaa/CarND-Behavioral-Cloning-P4 | 0143007594076ea7c61714858035df3316414a7c | b8174e02eb71cb8bc12797d86a663c689b549cc1 | cd5022f83454e35d1d7d31fb403ecf70a8adb37e | refs/heads/master | 2020-04-02T08:37:58.149760 | 2018-10-27T03:22:29 | 2018-10-27T03:22:29 | 154,253,073 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7652959823608398,
"alphanum_fraction": 0.7810885906219482,
"avg_line_length": 74.69924926757812,
"blob_id": "0f66bd4824064a97091f5441253c44756099ce1a",
"content_id": "6b14eb5351a36be86fbf0a4e5e33d34da048c607",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 10074,
"license_type": "permissive",
"max_line_length": 1203,
"num_lines": 133,
"path": "/writeup.md",
"repo_name": "m0hamed-alaa/CarND-Behavioral-Cloning-P4",
"src_encoding": "UTF-8",
"text": "# **Behavioral Cloning for self-driving cars** \n\nIn this project, A convolutional neural network is used to clone driving behavior. The model is trained, validated and tested using Keras. The model will output a steering angle to an autonomous vehicle.\n\n---\n\n**The goals / steps of this project are the following:**\n* Use the simulator to collect data of good driving behavior\n* Build, a convolution neural network in Keras that predicts steering angles from images\n* Train and validate the model with a training and validation set\n* Test that the model successfully drives around track one without leaving the road\n* Summarize the results with a written report\n\n\n[//]: # (Image References)\n\n[nvidia]: ./examples/cnn_architecture.png \"Model Visualization\"\n[model]: ./examples/model_summary.png \"Model layers\"\n[center]: ./examples/center_images.png \"center camera images\"\n[l_r]: ./examples/camera_images.png \"camera images\"\n[flip]: ./examples/flipped_images.png \"flipped images\"\n\n\n---\n\n## Rubric Points\n### Here I will consider the [rubric points](https://review.udacity.com/#!/rubrics/432/view) individually and describe how I addressed each point in my implementation. \n\n---\n### Files Submitted \n\n#### 1. Submission includes all required files and can be used to run the simulator in autonomous mode\n\n**This github repository includes the following files:**\n* `model.py` containing the script to create and train the model\n* `drive.py` for driving the car in autonomous mode\n* `model.h5` containing a trained convolution neural network \n* `writeup.md` summarizing the results\n* `autonomous_driving.mp4` which is a video recording of the vehicle driving autonomously around the track for one full lap\n\n\n#### 2. Submission includes functional code\nUsing the Udacity provided simulator and my drive.py file, the car can be driven autonomously around the track by executing \n```sh\npython drive.py model.h5\n```\n\n#### 3. Submission code is usable and readable\n\nThe model.py file contains the code for training and saving the convolution neural network. The file shows the pipeline I used for training and validating the model, and it contains comments to explain how the code works.\n\n### Model Architecture and Training Strategy\n\n#### 1. An appropriate model architecture has been employed\nThe data is cropped to focus on the road section because the car hood and the environment above the horizon may be a distraction for the model then normalized in the model using a Keras `Cropping2D` and `Lambda` layers (clone.py lines 76-77).\nThis is a regression problem as the steering command should be predicted from the road images captured by the front-facing camera.I implemented the nvidia architectire published in this paper [End to End Learning for Self-Driving Cars](https://arxiv.org/pdf/1604.07316.pdf). \nThe model consists of 3 convolution layers with 5x5 kernel sizes , strides of 2 and features maps of 24 , 36 , 48 and `valid` padding (clone.py lines 81-86) , followed by 3 convolutional layers with 3x3 kernel sizes , strides of 1 and features maps of 64 , 64 and `valid` padding (clone.py lines 88-91).The output of the last convolutional layer is flattened and fed into 3 fully-connected layers with hidden units 100 , 50 , 10 and finally a single output node (clone.py lines 95-99).The model includes RELU layers to introduce nonlinearity.\n\n![alt text][nvidia] \n\n#### 2. Attempts to reduce overfitting in the model\n\nThe model contains dropout layers in order to reduce overfitting (clonel.py line 96). \nThe model was trained and validated on different data sets to ensure that the model was not overfitting (clone.py line 107). The model was tested by running it through the simulator and ensuring that the vehicle could stay on the track.\n\n#### 3. Model parameter tuning\n\nThe model used an adam optimizer with the default `learning rate = 0.001` . Dropout was used in order to avoid overfitting and after some fine-tuning , it as set to 0.2 which achieved the best performance\n\n#### 4. Appropriate training data\n\nTraining data was chosen to keep the vehicle driving on the road. I used a combination of center lane driving, recovering from the left and right sides of the road back to the center by making use of multiple camera approach mentioned in the paper. This is clarification made by udacity that helps understand it.\n\n>In the simulator, you can weave all over the road and turn recording on and off to record recovery driving. \n>In a real car, however, that’s not really possible. At least not legally.\n>so a real car, we’ll have multiple cameras on the vehicle, and we’ll map recovery paths from each camera. For example, if you train the model to associate a given image from the center camera with a left turn, then you could also train the model to associate the corresponding image from the left camera with a somewhat softer left turn. And you could train the model to associate the corresponding image from the right camera with an even harder left turn.\n>In that way, you can simulate your vehicle being in different positions.\n>From the perspective of the left camera, the steering angle would be less than the steering angle from the center camera. From the right camera's perspective, the steering angle would be larger than the angle from the center camera.\n\n---\n\n### Model Architecture and Training Strategy\n\n#### 1. Solution Design Approach\n\nThe overall strategy for deriving a model architecture was to build a simple model to verify that it can be designed in keras , be trained and validated on the camera road images then used to drive the car in autonomous mode in the car simulator.\n\nMy first step was to use a LeNet5 architecture because it's was used in an image recognition task . I thought this model might be a starting point.\n\nIn order to gauge how well the model was working, I split my image and steering angle data into a training and validation set as it helps measure how well the model generalizes to useen samples. I found that my first model had a high mean squared error on the training set and a high mean squared error on the validation set. This implied that the model was underfitting. \n\nTo combat the underfitting, I modified the model so that It can achieve better peformance on both the training and validation sets by preceding the convolutional layers by data processing layers.A cropping layer was added into the begining of the model to crop the images from top and bottom and eliminate the portions of the scene that may distract the model in order to focus on the road section which contains more beneficial features to predict the steering angle .After that , A normalization layer was added to normalize the image pixel values to `[-0.5 , 0.5]` as it helps the training converge faster. The performance was better. I decided to use the center camera images as well as flipping them and reversing the associated angle to remove the bias in the training set towards left turns. Then I implemented the nvidia architecture which is depicted in the above figure.the training loss was 0.0086 and the validation loss with 0.0094. There are different approachs to teach the car how to recover from the sides of the road back to the center. I decided to make use of the left and right camera images by adding a correction factor to the associated steering angle as the following: <br> \n`left_angle = associated center angle + correction factor` to train the network to steer a little harder to the right<br>\n`right_angle = associated center angle - correction factor` to train the network to steer a little harder to the left\n\nThose additional images are incorported in the training data and the model was trained on it to achieve training loss of 0.0162 and validation loss of 0.0207 at a correction factor 0f `0.3`. The correction factor was fine-tuned until it reached `0.15` . To avoid overfitting a dropout layer was used after the flatten layer with dropout probability of `0.2`.\nthe model was compiled using Adam optimizer and mean squred error as loss then it was trained and validated on training and validation sets using 5 epochs .The model was saved and the `model.h5' file was produced , which stores the network weights . \nThe final step was to run the simulator to see how well the car was driving around track one. It completed track one successfully without leaving the drivable portion of the track surface.\n\nAt the end of the process, the vehicle is able to drive autonomously around the track without leaving the road.\n\n#### 2. Final Model Architecture\n\nThe final model architecture is located in (clone.py lines 70-100) and here is a visualization of the architecture.\n\n![alt text][model]\n\n**Total params: 981,819** \n**Trainable params: 981,819** \n**Non-trainable params: 0** \n\n#### 3. Creation of the Training Set & Training Process\n\nI used [the sample driving data](https://d17h27t6h515a5.cloudfront.net/topher/2016/December/584f6edd_data/data.zip) provided by udacity. It records several driving laps through track one.\n\n![alt text][center]\n\nI also used left and right camera images along with center images to collect more training data and to make the model learn the recovery process .\n\n![alt text][l_r]\n\nTo augment the data set, I also flipped images and angles thinking that this would remove the bias towards left turns because most turns in track one are left turns. \n For example, here is an image that has then been flipped\n\n\n![alt text][flip]\n\n\nAfter the collection process, I had 30000 number of data points.\n\nI finally randomly shuffled the data set and put 20% of the data into a validation data.\nI used data generators for training set and validation set to reduce memory consumption. \nI used this training data for training the model. The validation set helped determine if the model was over or under fitting. The number of epochs was 5. The batch size was 64. The loss function used was mean squared error because it suits the regression problem. I used an adam optimizer so that manually training the learning rate wasn't necessary.\n"
},
{
"alpha_fraction": 0.6532411575317383,
"alphanum_fraction": 0.6762316226959229,
"avg_line_length": 29.13020896911621,
"blob_id": "28e92729bac703986f8c5c9b149a553f6fd74578",
"content_id": "15ae5b57bc99b2da18e83234504680b926926dbd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5785,
"license_type": "permissive",
"max_line_length": 192,
"num_lines": 192,
"path": "/model.py",
"repo_name": "m0hamed-alaa/CarND-Behavioral-Cloning-P4",
"src_encoding": "UTF-8",
"text": "# import libraries\n\nfrom scipy import ndimage\nimport numpy as np\nimport csv \nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.utils import shuffle\n\nfrom keras.models import Sequential\nfrom keras.layers import Flatten , Dense , Conv2D , MaxPooling2D , Activation , Cropping2D , Lambda , Dropout\n\n\n# parse driving_log.csv \n\nsamples = []\nwith open(\"./data/driving_log.csv\") as csv_file :\n\treader = csv.reader(csv_file)\n\tfor line in reader :\n\t\t# exclude column labels\n\t\tif line == ['center', 'left', 'right', 'steering', 'throttle', 'brake', 'speed'] :\n\t\t\tcontinue\n\t\tsamples.append(line)\n\n# divide samples into train_samples and validation_samples\n\ntrain_samples , validation_samples = train_test_split(samples , test_size = 0.2)\n\n# plot sample images \n\ndef plot_samples(samples , label) :\n\t'''\n\tThis function plots sample images from the data set\n\tlabel argument determines whether 'center' or 'center and left and right' or 'center and flipped'\n\timages are visulaized\n\t''' \n\n\tif label == 'center' :\n\t\tf , axes = plt.subplots(3,3 , figsize=(10,6))\n\t\tfor i in range(9):\n\t\t\tidx = np.random.randint(len(samples))\n\t\t\taxes = axes.ravel()\n\t\t\tsource_path = './data/IMG/'\n\t\t\timage_path = source_path + train_samples[idx][0].split('/')[-1]\n\t\t\timage = mpimg.imread(image_path)\n\t\t\taxes[i].imshow(image)\n\t\t\taxes[i].axis('off')\n\t\t\taxes[i].set_title('center')\n\t\tf.subplots_adjust(hspace = 0.001 , wspace= 0.2)\n\t\tf.savefig('./examples/center_images.png')\n\n\telif label == 'all' :\n\t\tf , axes = plt.subplots(3,3 , figsize=(10,6))\n\t\tfor i in range(3):\n\t\t\tidx = np.random.randint(len(samples))\n\t\t\tsource_path = './data/IMG/'\n\t\t\ttitles = ['center' , 'left' , 'right']\n\t\t\tfor j in range(3) :\n\t\t\t\timage_path = source_path + samples[idx][j].split('/')[-1]\n\t\t\t\timage = mpimg.imread(image_path)\n\t\t\t\taxes[i][j].imshow(image)\n\t\t\t\taxes[i][j].axis('off')\n\t\t\t\taxes[i][j].set_title(titles[j])\n\t\tf.subplots_adjust(hspace = 0.001 , wspace= 0.2)\n\t\tf.savefig('./examples/camera_images.png')\n\n\telif label == 'flip' :\n\t\tf , axes = plt.subplots(3,2 , figsize=(10,6))\n\t\tfor i in range(3):\n\t\t\tidx = np.random.randint(len(samples))\n\t\t\tsource_path = './data/IMG/'\n\t\t\tfor j in range(2) :\n\t\t\t\timage_path = source_path + samples[idx][0].split('/')[-1]\n\t\t\t\timage = mpimg.imread(image_path)\n\t\t\t\tflipped_image = np.fliplr(image)\n\t\t\t\timg = flipped_image if (j+1)%2 == 0 else image\n\t\t\t\ttitle = 'flipped' if (j+1)%2 == 0 else 'original'\n\t\t\t\taxes[i][j].imshow(img)\n\t\t\t\taxes[i][j].axis('off')\n\t\t\t\taxes[i][j].set_title(title)\n\t\tf.subplots_adjust(hspace = 0.001 , wspace= 0.2)\n\t\tf.savefig('./examples/flipped_images.png')\n\n\t\t\n\n\tplt.show()\n\n\nplot_samples(train_samples , 'center')\nplot_samples(train_samples , 'all')\nplot_samples(train_samples , 'flip')\n\n# create data generator\n\ndef generator( samples , batch_size=32 ) :\n\tnum_samples = len(samples)\n\t\n\twhile 1: # Loop forever so the generator never terminates\n\t\tshuffle(samples)\n\t\tfor offset in range(0 , num_samples , batch_size) :\n\t\t\tbatch_samples = samples[offset : offset + batch_size]\n\n\t\t\tcorrection=0.15\n\t\t\t\n\t\t\timages = []\n\t\t\tangles = []\n\n\t\t\tfor batch_sample in batch_samples :\n\t\t\t\tsource_path = './data/IMG/'\n\t\t\t\tcenter_image = ndimage.imread(source_path+batch_sample[0].split('/')[-1])\n\t\t\t\tleft_image = ndimage.imread(source_path+batch_sample[1].split('/')[-1])\n\t\t\t\tright_image = ndimage.imread(source_path+batch_sample[2].split('/')[-1])\n\t\t\t\t\n\t\t\t\tcenter_angle = float(batch_sample[3])\n\t\t\t\tleft_angle = center_angle + correction\n\t\t\t\tright_angle = center_angle - correction\n\n\t\t\t\tflipped_image = np.fliplr(center_image)\n\t\t\t\tflipped_angle = -center_angle\n\t\t\t\t\n\t\t\t\timages.extend([ center_image , left_image , right_image , flipped_image ])\n\t\t\t\tangles.extend([ center_angle , left_angle , right_angle , flipped_angle ])\n\n\t\t\tX_data = np.array(images)\n\t\t\ty_data = np.array(angles)\n\n\t\t\tyield shuffle(X_data,y_data)\n\n\ntrain_generator = generator(train_samples , batch_size = 64)\nvalidation_generator = generator(validation_samples , batch_size = 64)\n\n# build model architecture\n\nmodel = Sequential()\n\n# data pre-processing layers\n\nmodel.add(Cropping2D(cropping=((50,20),(0,0)) , input_shape=(160,320,3))) # cropping to focus on the road section \nmodel.add(Lambda(lambda x : (x/255.0) - 0.5 )) # normalization\n\n# convolutional layers\n\nmodel.add(Conv2D(filters=24 , kernel_size=5 , strides=2 , padding='valid'))\nmodel.add(Activation('relu'))\nmodel.add(Conv2D(filters=36 , kernel_size=5 , strides=2 , padding='valid'))\nmodel.add(Activation('relu'))\nmodel.add(Conv2D(filters=48 , kernel_size=5 , strides=2 , padding='valid'))\nmodel.add(Activation('relu'))\n\nmodel.add(Conv2D(filters=64 , kernel_size=3 , strides=1 , padding='valid'))\nmodel.add(Activation('relu'))\nmodel.add(Conv2D(filters=64 , kernel_size=3 , strides=1 , padding='valid'))\nmodel.add(Activation('relu'))\n\n# fully_connected layers\n\nmodel.add(Flatten())\nmodel.add(Dropout(0.2))\nmodel.add(Dense(units=100 , activation='relu'))\nmodel.add(Dense(units=50 , activation='relu'))\nmodel.add(Dense(units=10 , activation='relu'))\nmodel.add(Dense(units=1))\n\n# configure the training process\n\nmodel.compile(loss='mse' , optimizer='adam')\n\n# summarize the model\n\nmodel.summary()\n\n# train the model\n\nhistory_object = model.fit_generator(train_generator , steps_per_epoch=len(train_samples)/64 , epochs=5 , validation_data = validation_generator , validation_steps=len(validation_samples)/64 )\n\n\n# loss visualization\n\nplt.plot(history_object.history['loss'])\nplt.plot(history_object.history['val_loss'])\nplt.xlabel('epoch')\nplt.ylabel('mean squared error loss')\nplt.title('loss during training')\nplt.legend(['train_loss' , 'valid_loss'] , loc='upper right')\nplt.savefig('visualizing_loss.png')\n\n# save the model\n\nmodel.save('model.h5')\n\nprint('model saved !')\n"
}
] | 2 |
ericandrewlewis/doe-meal-locator | https://github.com/ericandrewlewis/doe-meal-locator | 940bbbcb7db5e6a1942eea33bb3550c2da2ee2cc | b4853584d13f75055e68a4565436fa988fc5053b | 7a554f2ae8bb80385514a5cc92c662e84cfb82ea | refs/heads/master | 2021-05-25T22:11:34.380252 | 2020-04-08T00:08:46 | 2020-04-08T00:08:46 | 253,941,817 | 0 | 0 | null | 2020-04-08T00:11:00 | 2020-04-06T18:25:21 | 2020-04-06T18:25:19 | null | [
{
"alpha_fraction": 0.6291390657424927,
"alphanum_fraction": 0.6703792810440063,
"avg_line_length": 42.68421173095703,
"blob_id": "e2dd480c780161e3a26712432416480346f4126e",
"content_id": "a6f36a2eca384e4036563135f9600e2cbb508815",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3322,
"license_type": "no_license",
"max_line_length": 239,
"num_lines": 76,
"path": "/scrape.py",
"repo_name": "ericandrewlewis/doe-meal-locator",
"src_encoding": "UTF-8",
"text": "import requests\nfrom bs4 import BeautifulSoup\nimport sys\nimport csv\nimport time\n\nheaders = {\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',\n 'Accept-Encoding': 'gzip, deflate, br',\n 'Accept-Language': 'en-US,en;q=0.9',\n 'Cache-Control': 'max-age=0',\n 'Connection': 'keep-alive',\n 'Content-Length': '5426',\n 'Content-Type': 'application/x-www-form-urlencoded',\n 'Cookie': 'ASP.NET_SessionId=4xfa521x31weu1p0zlzibhzs; ASPSESSIONIDAGSDSBBC=KJLFCOEBFOBBFBALIFOCGLLO',\n 'Host': 'www.opt-osfns.org',\n 'Origin': 'https://www.opt-osfns.org',\n 'Referer': 'https://www.opt-osfns.org/schoolfoodny/meals/default.aspx',\n 'Sec-Fetch-Dest': 'document',\n 'Sec-Fetch-Mode': 'navigate',\n 'Sec-Fetch-Site': 'same-origin',\n 'Sec-Fetch-User': '?1',\n 'Upgrade-Insecure-Requests': '1',\n 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'\n}\n\ndef build_post_data(district_number):\n padded_district_number = str(district_number).zfill(2)\n data = {\n 'ToolkitScriptManager1_HiddenField': ';;AjaxControlToolkit, Version=3.5.40412.0, Culture=neutral, PublicKeyToken=28f01b0e84b6d53e:en-US:47d532b1-93b1-4f26-a107-54e5292e1525:de1feab2:f2c8e708:720a52bf:f9cec9bc:589eaa30:698129cf:7a92f56c',\n '__EVENTTARGET': ' ',\n '__EVENTARGUMENT': ' ',\n '__LASTFOCUS': ' ',\n '__VIEWSTATE': 'zHlmdYsFFFgcckEzQ3juthxS+12TnAKAlZ7jw/lKNADYSMxsQ8XQ76Jifc5w6ZXWWh6QR0Fd68DR67ZLfhGg955Nrq8dU+5hmLjwL65f9zAOc5p9iwqqkW2nU7Kd0uGeR02zkZeErRfIkyYGpkAb6vSnatbSm47JfPNCM6OCFGRRzS7XKDGFdmCAJEXq7y0Fuj4ZOvXFxsCsk7Bw767Hp0N5bRPaem+ML7qnsvFuinUstFTgdUoPVnm9LwdJVSfI7OkHbOw/eee1zdxk+rA9Q0IzKjTzFzpPdWRHeUqf3HecYEImnJZT2/Ku2nBLVg5IhA6utbmAC1KZnplTBIOUL2StCVpCTI/zQpijsfZ0ABhzHdMq8b37RpYDMq/kO/zq7fI7WEQHfnNZwzir4ulHIb9CjRMWomVVHBqE9bV54V4m8adISrxM+ipstDLUddkgvUcQ9hs2L/3bgIdYAXl4v3nuHIYHlqKyfm33dKrsWTR5r5LM8YRJj9OeFoFQ+W3rQeU1Wv9127yDLPO8M8dVqktND+ZGKXL2ejWJTWJN2ZXr+Gh/PM31KHu/aZGg7oSsGShJoAB0XsC58eQL7PreXI85CtF6V1ZbW2ZLT8lKAFBda+wY/wKl1ErpL5en6G63r2PcMVgcIvLzlAqCXPfx62hS1p66ipvm1ZePBcRndEo0quM++SEbRmpA0Ye2MnVWXKcEdLZ7eitnkss07FXS9IZAuT7+fDj7GYhg2gD1LX923wt5NLjU50X1P61GkxKgqm3ZVQ4HvWV0HGwyuTAextYRUxd4ENYWh4/hCCyzwUURte2xeIulUqUFy5+Uh5hJhrcsi8aAJV+x/K65FMVXPnx56PTYK3+8WqJvQlrZ/zVIif4BrV1+PX9y80JR2ovy11YmVI24YQJ5LCHodhl/4AT6M8KzbzazvWCj2EvOJk0MVOqK2V0atm5EC0YneFwC+94bh3tU89FN1VQVGMIm4esF+dSiEGsCC4F1TcaaP/c9VQ1PrDHRi7HUFTEPFBNNJyZ2tSCeDdnEIAgzIELGcY6ZJ4Lce+cQivQ2d3lTeYwPPp7p8OzQtfyJHiCr+Q6EjyrXyE+D8y9ENIiQ/uqhxoTv5RqiejVhXHbcL+y+scuRbURU+v4blLfightM8xE9rdE7caFdfslz2R1Lr+uW5F24ahGkYhoRQSFE31d3l88BigW/4H7i8WeON2JJrFg5uY719xHprkbLt9+tovfWAxeJ5vJn0Yn/genPE1lsZpSJYGpGI34DlGbMKCXu27j/ICUpa3chb2RV+ejtHzDoJ+gcyLs9kZKfVjCv8cXyFsSfFTL7RAzkBZwre6rRXNL8mVHbm5nQEkumW4x6Y6AXDcTebP5Kc/l7DT2YBDXKb1qVYMlYp6C5p5tJd1ZgmQJgtY2jwGNgAnyP18CjFhUC0Dl7EosfvIET5Rhvr68imM4kSSqRQDWBsDJr/aw8QKo/1ZTUZkMnSEX7jsw/KOLfokuAgAG0QtoRD4XKuQTJ1A1K2oXoFN+Q2FeEu6MNjvAEMwM4pF3VojeIvYCt9v7K4SYnrz7cT1DtzVmNmeQO8m6MbILWByPIr8WCrFY5xac/UsaAEtM/qMtzSzB2qhyOAbe+gAnx2liTrc8TzFpO9ZEE7iCPr8S+B59slmilWTwTlIbOUtTWFfMwyX+wRdWAQYusJ733C7gA+97EmrNQw+KcbHYTSBkUyMxt3hHS9myXFEqnmpSg3BwuR8WpIQvNqk/LJM6tJGTKOQpyp+Ojls4B/CKPPhF82L7b1sf1iczQgXFTaiai0f8ADrEQqH1Y+M1ocOg5U/4wjDKdqj4Zu0i3axBWet8R62wC/4+l+StDmtPB34lxUhRyc5yk7Yq4AUZaWkZHfXG1i/6yZTQ1OZB7jaIePSZRtN2nZqY1427ph9+pB2E0tk68dniAjZ18RKpbHeCeNFUbb5qx8rubCelvvgLDdCSh18WmlTPSB+5UugGp0NSp61P6B2bkdDkbNzlQxuH4IJDdseIXkszQjCCEEslrLEV9u2Ao5rMmau67+IEzdUnjkZ5jr9L9oqu0nGT36QvG+ZcTf6JRirjl2P1UMMExTZdIC1PK4RtAKheAomHJ9wWtpCL0F3q/9c0rH9auo2d/PPQBNIh0LoHDEbqhFbuag1We2rpJBD7UsmOqYV3JQ9x68Pd3c82+DN7913hX+VUxx8cqc4hcA9mh+ipfPsdqxsduUTIqLQ7PqUa+8UaCKfxbeDj+ULD4jlZh9DczPK8oC+ovexAheMZZ39D3CBUtkbgg5JyRgEyC8wFZ1gMOAXh2+HsPX3ZjhinPY63dIrtns84nWriF3PZaE4hdf5Zn9TjJJo99U2Tr5NvvFDlya/j6krdwPHFWH9BGQ43dnjgRRY/Gt13bokSxGUzbwT6D/yU+3NrcPkErOalfo0u/3JPPP7YVZy6bMYAFosLdChd5xd/dtDcDCnHVczzpvXyChbsgFMw/T22fvg6hqrbt60n2xgxrNgoPUhHoMgd0PzmKpyG4AuIpeFYbhPoZYdw/LZkC+kZSHgg4BD7eUruLn/+rjN8nPmiry9y21QwgwScpB2Bwg1d8yUP1HM6hQj/FVTTEzJpz3Le/3p3tSDaP72vXdYMXeZ5KdPBw9dvgfjbft2ZJW/uFixW0PWme5QuKY//vCQuE31k8fZWeNF7xAnfsNSSZAkHJUbc6rJoln8qndSWJcVgQHQc1oDoaKYQmF+pTTqGpgaViLoFaGC4OXohXkxb5oa6Z+d5xJGBwwU7Pn2zuxwDVRDD9HwGAyPzPNkleVxEVHL04EVz+fGdGZTMPY/1vZXesLMUYIN/ax9Ue5xt0hKo0sq8Mp+Oa69npxyT2eKqh36KuLon5rMAOiPnMHg22rt+RP8u69njXcP5ArwfeiEHEe5NpLYGhbGqmJqzIlfCftcS+MTWekC8k1qH3LBuIAhstnVaJRudc/aJ+GQNBbhyXXM3oQXjLBSXbWZO+XnOXzOQ7VqUJh8xIuStXPhQWQqKsNFwDIWXRBFyYcgiRTeAfA+zUKgNPOn7DjIHCybCG5TmhuV8yq/QTxXmX1W+LbyEHtijQ4DxfGykQZIL9ZqjxTJQCPy0aBvN248SKQGIbdTNMeSs/REE4Tkx6A1a96rUKf7c0fD7eGlq4oi1TIJi1HtiFG90ZILzz7Xh/yAZoSa1jSNK++aCQffGJkRIrru9TpQMmCuJb1z0cGWXUuf5fQ3ONgxQOfKJk',\n '__VIEWSTATEGENERATOR': '025A2AC0',\n '__VIEWSTATEENCRYPTED': ' ',\n '__EVENTVALIDATION': 'akeMF/0JgNik+L2VSPmhcIxXQd6VfknGCktfR4sjH8dAhlHCZcHXwV89gkXd6XFZ0pZPBGhL2t4iI1ZV/efLrmIeP3MucSSxbu+RN3GgOy+eixU+4JVr2xeZzB7hQSvn9ev62s8nJlf51zwhhxDnh/8qZODQ5tqHP0odqCmlNSDXssdeldAvWwBFCGAMtHfxsocJnvjD1paB0Q8Lra2wEWW2S42FViXWYKqeiR35VxtifswNw9g85N36ZT/0AS/mR8aEYVAl6GrDQpjFHfiLCOJeA++JeEdjUivA7/U32yJkHF0sR8+3+O191Mzyt8VbvFfnb/mKPQZ+dl5/SdYT/A2OTVrNyLrQPzNCR29/rn6NRH62Mor3YFhUIMGGKFbb61+gGqW5u3JJED6sIIoRlnpGfCTzmprMXJi5sVpo+WoHMMG9KkaGDL0tEqZr+4Vt+zTjami3wI+RSgOzs92oonL08BfK5aRLTHhvhuJ1sASZXi0uofESRcfTWno/TpL+uJIfi6Os2wjfNfBXpwSEDmsMy0QPzy87S5pvP46jNUnPKguuBzHbiA0iLMV2p3XJmxy695CvRta5fuDNq8g11imbOfr5eK3VhB17z5pZyguX9NHCCmGzLH/2qoe8YDmWiQ5GVhD2BiYTza0zOXjQNQUB5T2zYf846YBt28qKS1CUDkCpUR8uqMOHjfQmNt1ZRyJAoCo4H9VGFEZY/2h7S/co7QFvWfVcsSCE4RdNjKTKdcmwnlpdDVFHpb3G3VgXYwC3XQ5oIWCWc+wQhzpl74nXAdiPK3R7+QXz6VpuzhMF07nri4o5nYQG7JPfZimmQMdNOCBK72PYPlrIJ62+2wpy+tLvapUQogatyWj7US1chPObU+tKaOwK8Zb4y/vJrwenglIiRqrBMPKUVs4dk6TX+w8WFur7NEk59wREb+BPBWhlM9a2nmaRm3F1/tkvQr4iyO3mGNyGpoG0sy77zmYbRCdzT1O0rrvIX7tU9ko1o1ujLK/pydTKUoGd7eogx1XDbGCSKdbgwo1IOckgddTV0btMmuVg7Fy8iYMGFcqwsuuIwDTwJZLE9lAt1oTCTuBnwumiRZD1COQNVncMPUtEnkq80ttowWDQtvGiRmhbBpsELssVCgVh+so/ubx5ar3P9KdloWs9TW0H8hlF8C6mQcv0kPJabuF94oPNeovOsfBDtugbDQHIikUUrJ2QGswgLiWaS4mxm9rS+WYiM32QnXGoM4S1i/3BqvJ5wzgfxgPQHY4udSuALZWAKWESLdaDkDcoR2zPl931TfyYSjKlfcQqH55lAEzFW3z+M4FOArZyRbqHfiD0tDk=',\n 'ddlBoro': '0',\n 'ddlDistrict': padded_district_number,\n 'txtSchool': '',\n 'ddlMile': '0',\n 'btnGo': 'Search',\n 'ddlRecords': '20',\n 'hiddenInputToUpdateATBuffer_CommonToolkitScripts': '1'\n }\n return data\n\nurl = \"https://www.opt-osfns.org/schoolfoodny/meals/default.aspx\"\n\n# An array of the addresses extracted from the DOE website.\naddresses = []\n\n# Loop through each school district number, and get the addresses of each location.\n# TODO: The DOE page displays the first 20 locations in the district on the webpage.\n# If there are more than 20, there are pagination links to view more locations.\n# Currently this code only scrapes the first 20 locations from each district.\n# We should update it to walk through the pages of each district.\nfor district_number in range(1, 33):\n # Build the HTTP post data for the request.\n post_data = build_post_data(district_number)\n # Get the response from the DOE website.\n response = requests.post(url, data=post_data)\n # Parse the HTML.\n soup = BeautifulSoup(response.text, 'html.parser')\n # Find the anchor tags which contain the meal pick-up location address.\n for tr in soup.find_all('tr', attrs={\"style\": \"background-color:White;\"}):\n anchor = tr.find('a')\n address_text = anchor.text.strip()\n addresses.append(address_text)\n time.sleep(1)\n\n# Write the addresses to a CSV file.\nwith open('addresses.csv', mode='w') as addresses_file:\n addresses_writer = csv.writer(addresses_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n for address in addresses:\n addresses_writer.writerow([address])\n\n\n"
}
] | 1 |
niloo9876/marow | https://github.com/niloo9876/marow | 2aeb86a9fcbbeda4d25ddb58536d7fa3317c80db | 873ee6ab76a37219ded3ab2ce26ba537b6992468 | 1179a5e8ecf5e8eb92f035a404d5861f58c3e10c | refs/heads/master | 2023-04-18T06:22:20.313239 | 2021-05-01T17:10:31 | 2021-05-01T17:10:31 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.567693293094635,
"alphanum_fraction": 0.5773727297782898,
"avg_line_length": 34.47321319580078,
"blob_id": "9bc30853de27edfc6bb9772ad88bec64be7d6042",
"content_id": "ef390107b5af2dae289add4fa44b7af3b2e7a316",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7955,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 224,
"path": "/functions/marla_mapper_CH.py",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": "# MARLA - MApReduce on AWS Lambda\n# Copyright (C) GRyCAP - I3M - UPV \n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. \n\nimport boto3\nimport os\nimport json\nimport math\nimport user_functions\nimport hashlib\n\ndef handler(event, context):\n #extract filename and the partition number\n FileName = event[\"FileName\"]\n NodeNumber = int(event[\"NodeNumber\"])\n TotalNodes = int(event[\"TotalNodes\"])\n ChunkSize = int(event[\"ChunkSize\"])\n FileSize = int(event[\"FileSize\"])\n KeyIn = event[\"KeyIn\"]\n\n #load environment variables\n BUCKET = str(os.environ['BUCKET'])\n BUCKETOUT = str(os.environ['BUCKETOUT'])\n PREFIX = str(os.environ['PREFIX'])\n NREDUCERS = int(os.environ['NREDUCERS'])\n\n if NodeNumber == 0:\n launcherNodes = 1 # Only this mapper is launching at this time\n else:\n launcherNodes = int(pow(2,int(math.log(NodeNumber,2))+1)) # Calculate the number of nodes launching mappers\n\n # The first node identifier to launch in this iteration is equal to \"launcherNodes\"\n # because index begins by 0. Each launcher node will launch his position beginning\n # in this position.\n myNextLaunch = launcherNodes + NodeNumber\n\n while myNextLaunch < TotalNodes:\n\n #launch lambda function mapper\n payload = {}\n payload[\"FileName\"]=str(FileName)\n payload[\"NodeNumber\"]=str(myNextLaunch)\n payload[\"TotalNodes\"]=str(TotalNodes)\n payload[\"ChunkSize\"]=str(ChunkSize)\n payload[\"FileSize\"]=str(FileSize)\n payload[\"KeyIn\"]=str(KeyIn)\n lambda_client = boto3.client('lambda')\n response_invoke = lambda_client.invoke(\n ClientContext='ClusterHD-'+BUCKET,\n FunctionName='HC-'+PREFIX+'-lambda-mapper',\n InvocationType='Event',\n LogType='Tail',\n Payload=json.dumps(payload),\n )\n # In each iteration, the number of launcher nodes\n # will be multiplied by 2\n launcherNodes = 2*launcherNodes\n myNextLaunch = launcherNodes + NodeNumber\n \n #download partition from data file\n bucketIn = BUCKET\n s3_client = boto3.client('s3')\n\n #calculate the partition range\n initRange = NodeNumber*ChunkSize\n limitRange = initRange + ChunkSize - 1\n if NodeNumber == TotalNodes-1:\n limitRange = FileSize\n \n chunkRange = 'bytes=' + str(initRange) + '-' + str(limitRange)\n obj = s3_client.get_object(Bucket=bucketIn, Key=KeyIn, Range=chunkRange)\n\n print(\"donwloaded partition {0} from {1}/{2}\".format(NodeNumber ,bucketIn, KeyIn))\n print(\"range {0}-{1}\".format(initRange ,limitRange))\n chunk = obj['Body'].read().decode('utf-8')\n del obj\n\n if NodeNumber > 0:\n #delete first line until '\\n' (inclusive)\n chunk=chunk.split('\\n', 1)[-1]\n \n if NodeNumber < TotalNodes-1:\n #download next text until '\\n'\n #calculate the size of extra text\n linelen = chunk.find('\\n')\n if linelen < 0:\n print(\"\\ n not found in mapper chunk\")\n return\n extraRange = 2*(linelen+20)\n initRange = limitRange + 1\n limitRange = limitRange + extraRange\n \n while limitRange < FileSize:\n chunkRange = 'bytes=' + str(initRange) + '-' + str(limitRange)\n obj = s3_client.get_object(Bucket=bucketIn, Key=KeyIn, Range=chunkRange)\n\n extraChunk = obj['Body'].read().decode('utf-8')\n posEndLine = extraChunk.find('\\n')\n #check if end of line is found\n if posEndLine != -1:\n #add extra text until '\\n' and exit from loop\n chunk = chunk + extraChunk[:posEndLine+1]\n break\n else:\n #save downloaded text and continue with next iteration\n chunk = chunk + extraChunk\n initRange = limitRange\n limitRange = limitRange + extraRange\n\n ##################\n ####### USER MAPPING #######\n ##################\n\n Pairs = user_functions.mapper(chunk)\n\n ############################\n\n del chunk\n # I'm not sure if this is necessary ...\n # Convert to string\n #Pairs = list(map(lambda pair:(str(pair[0]),str(pair[1])), Pairs))\n # Sort Pairs for name\n Pairs.sort()\n\n #################\n ####### USER REDUCE #######\n #################\n\n Results = user_functions.reducer(Pairs)\n del Pairs\n\n ###########################\n\n Results.sort()\n \n #upload results\n\n results = \"\"\n ASCIIinterval = (130-32)/NREDUCERS\n ASCIIlimit = ASCIIinterval+32\n ASCIInumInterval = 0 # Actual interval\n partialKey = PREFIX + \"/\" + FileName + \"/\"\n for name, value in Results:\n\n # take ASCII value of first \"name\" character\n ASCIIval = ord(str(name)[0])\n\n if ASCIIval >= 999:\n #Invalid data\n print(\"Name: \" + str(name) + \" out of range\")\n continue\n \n # Add pairs to results until ASCII limit has been reached\n if ASCIIval < ASCIIlimit:\n results += \"{0},{1}\\n\".format(name, value)\n else:\n while ASCIIval >= ASCIIlimit:\n print(\"ASCII group \" + str(ASCIInumInterval) + \" (\" + str(name) +\")\")\n # Upload results\n resultsKey = partialKey + str(ASCIInumInterval) + \"_\" + str(NodeNumber)\n\n # Add a prefix with his hash to maximize\n # S3 pefrormance.\n resultsKey = str(hashlib.md5(resultsKey.encode()).hexdigest()) + \"/\" + resultsKey\n s3_client.put_object(Body=results,Bucket=BUCKETOUT,Key=resultsKey)\n \n # Update ASCII interval\n ASCIIlimit +=ASCIIinterval\n\n # Last ASCII interval chunk will contain\n # all extended characters too\n if ASCIIlimit > 126:\n ASCIIlimit = 999\n \n # Clear results\n results = \"\"\n ASCIInumInterval += 1\n results += \"{0},{1}\\n\".format(name, value)\n \n\n # Create remaining interval files\n for i in range (ASCIInumInterval,NREDUCERS):\n print(\"ASCII group \" + str(i) + \" (remaining)\")\n # Upload results\n resultsKey = partialKey + str(i) + \"_\" + str(NodeNumber)\n\n # Add a prefix with his hash to maximize\n # S3 pefrormance.\n resultsKey = str(hashlib.md5(resultsKey.encode()).hexdigest()) + \"/\" + resultsKey\n \n s3_client.put_object(Body=results,Bucket=BUCKETOUT, Key=resultsKey)\n results = \"\"\n \n #check if this is the last partition.\n launcher = TotalNodes-1\n if TotalNodes > 1:\n launcher = TotalNodes-2\n if NodeNumber == launcher:\n #launch first lambda functions reducer\n print(\"launching tester reducer function\")\n lambda_client = boto3.client('lambda')\n payload = {}\n payload[\"Invocation\"] = '0'\n payload[\"ReducerNumber\"] = '-1'\n payload[\"FileName\"]=str(FileName)\n payload[\"TotalNodes\"]=str(TotalNodes)\n response_invoke = lambda_client.invoke(\n ClientContext='ClusterHD-'+BUCKETOUT,\n FunctionName='HC-'+PREFIX+'-lambda-reducer',\n InvocationType='Event',\n LogType='Tail',\n Payload=json.dumps(payload),\n )\n \n"
},
{
"alpha_fraction": 0.586899995803833,
"alphanum_fraction": 0.5949397087097168,
"avg_line_length": 36.75893020629883,
"blob_id": "32c58ce0822bcf94d93a9b7111f58183ee2922bc",
"content_id": "a5787fad615ecd38ea36b1a5092bfe56a07763e5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4229,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 112,
"path": "/functions/coordinator.py",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": "# MARLA - MApReduce on AWS Lambda\n# Copyright (C) GRyCAP - I3M - UPV \n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. \n\nimport boto3\nimport os\nimport json\n\ndef handler(event, context):\n for record in event['Records']:\n #extract bucket and key name\n bucket = record['s3']['bucket']['name']\n key = record['s3']['object']['key'] \n event = record['eventName']\n\n memoryLimit = 0.30\n \n #check if event type is \"ObjectCreated\"\n if event.find(\"ObjectCreated:\") != 0:\n print(\"not ObjectCreated event\")\n return\n\n print(\"Bucket = \" + bucket)\n print(\"Key = \" + key)\n \n #load environment variables\n BUCKET = str(os.environ['BUCKET'])\n BUCKETOUT = str(os.environ['BUCKETOUT'])\n PREFIX = str(os.environ['PREFIX'])\n MAPPERNUMBER = int(os.environ['MAPPERNUMBER'])\n MINBLOCKSIZE = int(os.environ['MINBLOCKSIZE'])\n MAXBLOCKSIZE = int(os.environ['MAXBLOCKSIZE'])\n MEMORY = float(os.environ['MEMORY'])*1048576.0\n \n #check bucket and prefix\n if bucket != BUCKET:\n print(\"wrong bucket\")\n return\n if key.find(PREFIX) != 0:\n print(\"wrong key\")\n return\n #check if the key have only 1 slash (/)\n if key.find('/') != key.rfind('/'):\n print(\"this file is in a folder\")\n return\n\n #Extract file name\n filename = os.path.splitext(os.path.basename(key))[0]\n \n #check file size\n lambda_client = boto3.client('lambda')\n s3_client = boto3.client('s3')\n response = s3_client.head_object(Bucket=BUCKET, Key=key)\n fileSize = response['ContentLength']\n print(\"FileSize = \" + str(fileSize))\n\n #Calculate the chunk size\n chunkSize = int(fileSize/MAPPERNUMBER)\n numberMappers = MAPPERNUMBER\n if chunkSize < MINBLOCKSIZE:\n print(\"chunk size to small (\" + str(chunkSize) + \" bytes), changing to \" + str(MINBLOCKSIZE) + \" bytes\")\n chunkSize = MINBLOCKSIZE\n numberMappers = int(fileSize/chunkSize)+1\n\n #Ensure that chunk size is smaller than lambda function memory\n secureMemorySize = int(MEMORY*memoryLimit)\n if chunkSize > secureMemorySize:\n print(\"chunk size to large (\" + str(chunkSize) + \" bytes), changing to \" + str(secureMemorySize) + \" bytes\")\n chunkSize = secureMemorySize\n numberMappers = int(fileSize/chunkSize)+1\n\n if MAXBLOCKSIZE > 0:\n if chunkSize > MAXBLOCKSIZE:\n print(\"chunk size to big (\" + str(chunkSize) + \" bytes), changing to \" + str(MAXBLOCKSIZE) + \" bytes\")\n chunkSize = MAXBLOCKSIZE\n numberMappers = int(fileSize/chunkSize)+1\n \n print(\"Using chunk size of \" + str(chunkSize) + \" bytes, and \" + str(numberMappers) + \" nodes\")\n\n\n #create a dummy file in output folder\n keyDummy= PREFIX + '/' + filename + '/dummy'\n s3_client.put_object(Body=str(numberMappers),Bucket=BUCKETOUT, Key=keyDummy) \n \n #launch first mapper\n payload = {}\n payload[\"FileName\"]=str(filename)\n payload[\"NodeNumber\"]=str(0)\n payload[\"TotalNodes\"]=str(numberMappers)\n payload[\"ChunkSize\"]=str(chunkSize)\n payload[\"FileSize\"]=str(fileSize)\n payload[\"KeyIn\"]=str(key)\n response_invoke = lambda_client.invoke(\n ClientContext='ClusterHD-'+BUCKET,\n FunctionName='HC-'+PREFIX+'-lambda-mapper',\n InvocationType='Event',\n LogType='Tail',\n Payload=json.dumps(payload),\n ) \n \n return\n"
},
{
"alpha_fraction": 0.5889186859130859,
"alphanum_fraction": 0.5969615578651428,
"avg_line_length": 32.878787994384766,
"blob_id": "7df715ae08107b1efdb6633a0010c466ccacaf73",
"content_id": "76869dacea58930984cb56ae16bac57ddebb33ea",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1119,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 33,
"path": "/example/example_functions.py",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": " \ndef mapper(chunk):\n \"\"\"\n The mapper function: process the raw text and returns the pairs name-value.\n Args:\n - chunk(str): the raw text from data file\n Return(list of tuples): a list of 2D tuples with the pairs name-value. \n \"\"\"\n Pairs = []\n for line in chunk.split('\\n'):\n data = line.strip().split(\",\")\n if len(data) == 6:\n zip_code, latitude, longitude, city, state, country = data\n Pairs.append((str(country), 1))\n return Pairs\n\ndef reducer(Pairs):\n \"\"\"\n The reducer function: reduces the Pairs.\n Args:\n - Pairs(list of tuples): a sorted list of 2D tuples with the pairs name-value.\n Return(list of tuples): a list of 2D tuples with the pairs name-value. \n \"\"\"\n Results = []\n actualName = None\n resultsIndex = -1\n for name, value in Pairs:\n if actualName != str(name):\n actualName = str(name)\n Results.append([str(name),int(value)])\n resultsIndex = resultsIndex + 1\n else:\n Results[resultsIndex][1] = int(Results[resultsIndex][1]) + int(value)\n return Results"
},
{
"alpha_fraction": 0.7526488304138184,
"alphanum_fraction": 0.7611250281333923,
"avg_line_length": 45.75675582885742,
"blob_id": "fcd8a5a3acd85e4e4b6008a9799126c03c5fb71c",
"content_id": "54a189568e88bf439c2b6342f7fa5014e8ee8197",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5193,
"license_type": "permissive",
"max_line_length": 299,
"num_lines": 111,
"path": "/README.md",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": "# MARLA - MApReduce on AWS Lambda\n\nMARLA is a tool to create and configure a serverless MapReduce processor on AWS by means of a set of Lambda functions created on AWS Lambda. Files are uploaded to Amazon S3 and this triggers the execution of the functions using the user-supplied Mapper and Reduce functions.\n\n# Architecture\n\n\n# Installation\n\nMARLA requires:\n\n* An AWS account\n* AWS CLI (version 1.11.76+), used to create the Lambda functions and S3 buckets\n* An IAM Role on AWS with permissions to create, delete and list keys on the used S3 buckets and permissions to invoke Lambda functions. See an example of such an IAM role in the [examples/iam-role.json](examples/iam-role.json) file.\n\nThe code of the Lambda functions and user-defined Mapper and Reduce functions is written in Python. \n\nMARLA can be retrieved by issuing this command:\n\n `git clone https://github.com/grycap/marla`\n\n# Usage\n\nFirst you need to create your own Mapper and Reduce functions in the same file (as shown in the [example/example_functions.py](example/example_functions.py) file). \n\nThis functions must satisfy some constraints, explained below.\n\n## Mapper Function\n\nThe mapper function must adhere to the following signature:\n\n `def mapper(chunk):`\n \nwhere `chunk` is the raw text from the input file to be mapped..\n\nAfter executing the mapper function returns the name-value pairs respectively. That is, a list of 2D tuples with the pairs name-value (`Pair[i][0]` correspond to the name of the element `i`, `Pairs[i][1]` correspond to the value of the element `i`) extracted in the mapper function.\n \n \n## Reducer Function\n\nThe reducer function must adhere to the following signature:\n \n `def reducer(Pairs):`\n \nwhere `Pairs` is a list of 2D tuples with the pairs name-value (in the same format of the mapper function) extracted in the mapper function. `Pairs` is sorted alphabetically by names. \n \n After executing the reduce function returns a list of name-value pairs (`Results[i][0]` correspond to the name of the element `i`, `Results[i][1]` correspond to the value of the element `i`).\n \n \n## Configuration\n \n In addition to the aforementioned functions, the user must specify some parameters in a configuration file. This configuration file must follow the structure of the provided example [examples/config.in](examples/config.in). The order of the keys is not important and its meaning is explained here: \n \n * ClusterName: An identified for this \"Lambda cluster\".\n \n * FunctionsDir: The directory containing the file that defines the Mapper and Reduce functions.\n \n * FunctionsFile: The name of the file with the Mapper and Reduce functions.\n \n * Region: The AWS region where the AWS Lambda functions will be created.\n \n * BucketIn: The bucket for input files. It must exist.\n \n * BucketOut: The bucket for output files. We strongly recommend using different buckets for input and output to avoid unwanted recursions.\n \n * RoleARN: The ARN of the role under which the Lambda functions will be executed.\n \n * MapperNodes: The desired number of concurrent mapper functions.\n \n * MinBlockSize: The minimum size, in KB, of text that every mapper will process.\n \n * MaxBlockSize: Maximum size, in KB, of text that every mapper will process.\n \n * KMSKeyARN: The ARN of KMS key used to encript environment variables. (Optional)\n \n * MapperMemory: The memory of the mapper Lambda functions. The maximum text size to process by every Mapper will be restricted by this amount of memory.\n \n * ReducerMemory: The memory of the reduce Lambda functions.\n \n * TimeOut: The elapsed time for a Lambda function to run before terminating it.\n \n * ReducersNumber: Number of reducers to use\n \n \n## Creating and Processing the Data\n \n Once fulfilled the previous steps, assumming that you modified the `config.in` file in the `example` directory, issue:\n\n `$ sh marla_create.sh example/config.in`\n \n where `config.in` is the path to the configuration file. \n \n The script will create and configure the Lambda functions and add permissions to the S3 buckets. If the script finishes successfully, you will find a folder with the cluster name in the bucket specified in configuration file, such as this one: `BucketIn/ClusterName`\n \nEvery file you upload in this folder will be processed via MapReduce. The output of the MapReduce process will be stored in the `BucketOut` S3 bucket in the following path: `BucketOut/ClusterName/NameFile/results`\n\nwhere `NameFile` is the name of the uploaded input file without the extension (for example .txt) and \"results\" is the file with the MapReduce results.\n\n## Deleting\n\nTo remove a \"Lambda cluster\", use the script \"marla_remove.sh\" with the name of \"cluster\"\n\n`$ sh marla_remove.sh ClusterName`\n\nThis will remove all the created Lambda functions, but not the files in S3.\n\n## Acknowledgement \nPlease acknowledge the use of MARLA by citing the following publication:\n```\nGiménez-Alventosa, V., Moltó, G., Caballer, M., 2019. A framework and a performance assessment for serverless MapReduce on AWS Lambda. Futur. Gener. Comput. Syst. 97, 259–274. https://doi.org/10.1016/j.future.2019.02.057\n```\n\n"
},
{
"alpha_fraction": 0.6583407521247864,
"alphanum_fraction": 0.6724235415458679,
"avg_line_length": 37.48136520385742,
"blob_id": "b0c11d2e390876afd8254b994177a8d5747ad200",
"content_id": "034622acb4b2670e331403d3d6f128c21785d4ab",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 24782,
"license_type": "permissive",
"max_line_length": 274,
"num_lines": 644,
"path": "/marla_create.sh",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# MARLA - MApReduce on AWS Lambda\n# Copyright (C) GRyCAP - I3M - UPV \n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. \n\n\nif [ $# -ne 1 ]; then\n echo \"Usage: $0 config.in\"\n exit -1\nfi\n\nCONFIGFILE=$1\n\nmissingParameter=\"false\"\n\n#check if configuration file exists\nif [ ! -f $CONFIGFILE ]; then\n echo -e \"\\e[31mConfiguration file not found!\\e[39m\"\n exit 1\nfi\n\nCLUSTERNAME=`sed -n 's/^ClusterName:[[:space:]]*//p' $CONFIGFILE`\nFUNCTIONSDIR=`sed -n 's/^FunctionsDir:[[:space:]]*//p' $CONFIGFILE`\nFUNCTIONSFILE=`sed -n 's/^FunctionsFile:[[:space:]]*//p' $CONFIGFILE`\nREGION=`sed -n 's/^Region:[[:space:]]*//p' $CONFIGFILE`\nBUCKETIN=`sed -n 's/^BucketIn:[[:space:]]*//p' $CONFIGFILE`\nBUCKETOUT=`sed -n 's/^BucketOut:[[:space:]]*//p' $CONFIGFILE`\nROLE=`sed -n 's/^RoleARN:[[:space:]]*//p' $CONFIGFILE`\nMAXMAPPERNODES=`sed -n 's/^MapperNodes:[[:space:]]*//p' $CONFIGFILE`\nMINBLOCKSIZE=`sed -n 's/^MinBlockSize:[[:space:]]*//p' $CONFIGFILE`\nMINBLOCKSIZE=$(($MINBLOCKSIZE*1024))\nMAXBLOCKSIZE=`sed -n 's/^MaxBlockSize:[[:space:]]*//p' $CONFIGFILE`\nMAXBLOCKSIZE=$(($MAXBLOCKSIZE*1024))\nKMSKEYARN=`sed -n 's/^KMSKeyARN:[[:space:]]*//p' $CONFIGFILE`\nMAPPERMEMORY=`sed -n 's/^MapperMemory:[[:space:]]*//p' $CONFIGFILE`\nREDUCERMEMORY=`sed -n 's/^ReducerMemory:[[:space:]]*//p' $CONFIGFILE`\nTIMEOUT=`sed -n 's/^TimeOut:[[:space:]]*//p' $CONFIGFILE`\nNREDUCERS=`sed -n 's/^ReducersNumber:[[:space:]]*//p' $CONFIGFILE`\n\nUSEKMSKEY=0\n\necho \"----Parameter list----\"\n#check if some parameter is missing\nif [[ $CLUSTERNAME = *[!\\ ]* ]]\nthen\n echo \"ClusterName: $CLUSTERNAME \"\nelse\n echo -e \"\\e[31mMissing 'ClusterName:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $FUNCTIONSDIR = *[!\\ ]* ]]\nthen\n echo \"FunctionsDir: $FUNCTIONSDIR \"\nelse\n echo -e \"\\e[31mMissing 'FunctionsDir:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $FUNCTIONSFILE = *[!\\ ]* ]]\nthen\n echo \"FunctionsFile: $FUNCTIONSFILE \"\nelse\n echo -e \"\\e[31mMissing 'FunctionsFile:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\n\nif [[ $REGION = *[!\\ ]* ]]\nthen\n echo \"Region: $REGION \"\nelse\n echo -e \"\\e[31mMissing 'Region:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $BUCKETIN = *[!\\ ]* ]]\nthen\n echo \"BucketIn: $BUCKETIN \"\nelse\n echo -e \"\\e[31mMissing 'BucketIn:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $BUCKETOUT = *[!\\ ]* ]]\nthen\n echo \"BucketOut: $BUCKETOUT \"\nelse\n echo -e \"\\e[31mMissing 'BucketOut:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $ROLE = *[!\\ ]* ]]\nthen\n echo \"Role: $ROLE \"\nelse\n echo -e \"\\e[31mMissing 'Role:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $MAXMAPPERNODES = *[!\\ ]* ]]\nthen\n echo \"MapperNodes: $MAXMAPPERNODES \"\nelse\n echo -e \"\\e[31mMissing 'MaxMapperNodes:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $MINBLOCKSIZE = *[!\\ ]* ]]\nthen\n echo \"MinBlockSize: $MINBLOCKSIZE Bytes\"\nelse\n echo -e \"\\e[31mMissing 'MinBlockSize:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $MAXBLOCKSIZE = *[!\\ ]* ]]\nthen\n echo \"MaxBlockSize: $MAXBLOCKSIZE Bytes\"\nelse\n echo -e \"\\e[31mMissing 'MaxBlockSize:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $KMSKEYARN = *[!\\ ]* ]]\nthen\n echo \"KMSKeyARN: $KMSKEYARN \"\n USEKMSKEY=1\nelse\n echo -e \"Missing 'KMSKeyARN:' in configuration file, default service key will be used.\"\n USEKMSKEY=0\nfi\n\nif [[ $MAPPERMEMORY = *[!\\ ]* ]]\nthen\n echo \"MapperMemory: $MAPPERMEMORY \"\nelse\n echo -e \"\\e[31mMissing 'MapperMemory:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $REDUCERMEMORY = *[!\\ ]* ]]\nthen\n echo \"ReducerMemory: $REDUCERMEMORY \"\nelse\n echo -e \"\\e[31mMissing 'ReducerMemory:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $TIMEOUT = *[!\\ ]* ]]\nthen\n echo \"TimeOut: $TIMEOUT \"\nelse\n echo -e \"\\e[31mMissing 'TimeOut:' in configuration file \\e[39m\"\n missingParameter=\"true\"\nfi\n\nif [[ $NREDUCERS = *[!\\ ]* ]]\nthen\n echo \"ReducersNumber: $NREDUCERS \"\nelse\n echo -e \"Missing 'ReducersNumber:' in configuration file. Only 1 reducer will be used\"\n NREDUCERS=1\nfi\n\nif [ $missingParameter == \"true\" ]\nthen\n exit 1\nfi\n\necho \"----------------------\"\n\nPWD=`pwd`\n\n#Check for secure bucket input/output configuration\nif [ \"$BUCKETIN\" == \"$BUCKETOUT\" ]\nthen\n echo -e \"\\e[31mYou are trying to use the same bucket as input and output\"\n echo \"this can be so dangerous, please reconsider your buckets choice.\"\n echo -e \"If you want to continue, remove this 'if' statment in the script by your own responsability.\\e[39m\"\n exit 1\nfi\n\n#Extract role name\nroleName=`echo \"$ROLE\" | cut -d \"/\" -f 2`\necho \"\"\necho -e \"\\e[36mMake sure that role '$roleName' has permissions to use\"\necho -e \"buckets '$BUCKETIN' and '$BUCKETOUT' and to execute 'lambda:InvokeFunction'\\e[39m\"\necho \"\"\n\necho \"#####################################\"\necho \"##CREATE AND UPLOAD PACKAGES SECTION#\"\necho \"#####################################\"\n\necho \"----Creating packages...----\"\n\n#remove (if exists) previous cluster lambda packages\naws s3 rm s3://$BUCKETIN/$CLUSTERNAME/coordinator.zip &> /dev/null\naws s3 rm s3://$BUCKETIN/$CLUSTERNAME/mapper.zip &> /dev/null\naws s3 rm s3://$BUCKETIN/$CLUSTERNAME/reducer.zip &> /dev/null\n\n#check if in FunctionsDir exists som reserved filename\nfileCount=$(find $FUNCTIONSDIR -name marla_mapper_CH.py | wc -l)\n\nif [[ $file_count -gt 0 ]]\nthen\n echo -e \"\\e[31mError: you are using reserved name 'marla_mapper_CH.py' in some of files in $FUNCTIONSDIR.\"\n echo -e \"Please, change this filename\\e[39m\"\nfi\n\nfileCount=$(find $FUNCTIONSDIR -name marla_reducer_CH.py | wc -l)\n\nif [[ $file_count -gt 0 ]]\nthen\n echo -e \"\\e[31mError: you are using reserved name 'marla_reducer_CH.py' in some of files in $FUNCTIONSDIR.\"\n echo -e \"Please, change this filename\\e[39m\"\nfi\n\n#Create a directory in $HOME/.marla/ to store created files\n\nif [ ! -d $HOME/.marla/ ]; then\n echo -e \"Creating $HOME/.marla directory.\"\n mkdir $HOME/.marla\nfi\n\nrm -r $HOME/.marla/$CLUSTERNAME/ &> /dev/null\n\nmkdir $HOME/.marla/$CLUSTERNAME &> /dev/null\n\n#Check existence of directory\nif [ ! -d $HOME/.marla/$CLUSTERNAME/ ]; then\n echo -e \"\\e[31mError: can't create '$HOME/.marla/$CLUSTERNAME/' directory.\"\n exit 1\nfi\n\nmkdir $HOME/.marla/$CLUSTERNAME/functions &> /dev/null\nmkdir $HOME/.marla/$CLUSTERNAME/functions/mapper &> /dev/null\nmkdir $HOME/.marla/$CLUSTERNAME/functions/reducer &> /dev/null\n\n##Mapper##\n\n#copy user functions to mapper directory\nif cp $FUNCTIONSDIR/* $HOME/.marla/$CLUSTERNAME/functions/mapper/\nthen\n echo -e \"\\e[32mUser functions copied to mapper package\\e[39m\"\nelse\n echo -e \"\\e[31mError coping user functions\\e[39m\"\n exit 1\nfi\n\n#change user functions filename\nif mv $HOME/.marla/$CLUSTERNAME/functions/mapper/$FUNCTIONSFILE $HOME/.marla/$CLUSTERNAME/functions/mapper/user_functions.py\nthen\n echo -e \"\\e[32mUser functions filename changed\\e[39m\"\nelse\n echo -e \"\\e[31mError changing filename of user functions\\e[39m\"\n exit 1\nfi\n\n#copy mapper cluster function to mapper directory\nif cp functions/marla_mapper_CH.py $HOME/.marla/$CLUSTERNAME/functions/mapper/marla_mapper_CH.py\nthen\n echo -e \"\\e[32mCluster mapper function copied to mapper package\\e[39m\"\nelse\n echo -e \"\\e[31mError coping cluster mapper function\\e[39m\"\n exit 1\nfi\n\n##Reducer##\n\n#copy user functions to reducer directory\nif cp $FUNCTIONSDIR/* $HOME/.marla/$CLUSTERNAME/functions/reducer/\nthen\n echo -e \"\\e[32mUser functions copied to reducer package\\e[39m\"\nelse\n echo -e \"\\e[31mError coping user functions\\e[39m\"\n exit 1\nfi\n\n#change user functions filename\nif mv $HOME/.marla/$CLUSTERNAME/functions/reducer/$FUNCTIONSFILE $HOME/.marla/$CLUSTERNAME/functions/reducer/user_functions.py\nthen\n echo -e \"\\e[32mUser functions filename changed\\e[39m\"\nelse\n echo -e \"\\e[31mError changing filename of user functions\\e[39m\"\n exit 1\nfi\n\n#copy reducer cluster function to reducer directory\nif cp functions/marla_reducer_CH.py $HOME/.marla/$CLUSTERNAME/functions/reducer/marla_reducer_CH.py\nthen\n echo -e \"\\e[32mCluster reducer function copied to reducer package\\e[39m\"\nelse\n echo -e \"\\e[31mError coping cluster reducer function\\e[39m\"\n exit 1\nfi\n\n#####Create packages#####\n\n#zip the coordinator code\nif zip -j9 $HOME/.marla/$CLUSTERNAME/coordinator.zip functions/coordinator.py &> /dev/null\nthen\n echo -e \"\\e[32mCoordinator package created\\e[39m\"\nelse\n echo -e \"\\e[31mError creating coordinator package\\e[39m\"\n exit 1\nfi\n\n#zip the mapper code\nif zip -j9 $HOME/.marla/$CLUSTERNAME/mapper.zip $HOME/.marla/$CLUSTERNAME/functions/mapper/* &> /dev/null\nthen\n echo -e \"\\e[32mMapper package created\\e[39m\"\nelse\n echo -e \"\\e[31mError creating mapper package\\e[39m\"\n exit 1\nfi\n\n#zip the reducer code\nif zip -j9 $HOME/.marla/$CLUSTERNAME/reducer.zip $HOME/.marla/$CLUSTERNAME/functions/reducer/* &> /dev/null\nthen\n echo -e \"\\e[32mReducer package created\\e[39m\"\nelse\n echo -e \"\\e[31mError creating reducer package\\e[39m\"\n exit 1\nfi\n\n\necho \"-------------------------\"\n\necho \"----Uploading packages...----\"\n\n#Upload code for cluster functions to the cluster bucket\nif aws s3 cp $HOME/.marla/$CLUSTERNAME/mapper.zip s3://$BUCKETIN/$CLUSTERNAME/mapper.zip &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mMapper code upload to $BUCKETIN/$CLUSTERNAME\\e[39m\"\nelse\n echo -e \"\\e[31mError uploading mapper code to $BUCKETIN/$CLUSTERNAME\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr \n exit 1\nfi\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws s3 cp $HOME/.marla/$CLUSTERNAME/reducer.zip s3://$BUCKETIN/$CLUSTERNAME/reducer.zip &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mReducer code upload to $BUCKETIN/$CLUSTERNAME\\e[39m\"\nelse\n echo -e \"\\e[31mError uploading reducer code to $BUCKETIN/$CLUSTERNAME\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr \n exit 1\nfi\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws s3 cp $HOME/.marla/$CLUSTERNAME/coordinator.zip s3://$BUCKETIN/$CLUSTERNAME/coordinator.zip &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mCoordinator code upload to $BUCKETIN/$CLUSTERNAME\\e[39m\"\nelse\n echo -e \"\\e[31mError uploading coordinator code to $BUCKETIN/$CLUSTERNAME\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\necho \"###############################\"\necho \"##COORDINATOR FUNCTION SECTION#\"\necho \"###############################\"\n\necho \"----Creating coordinator function...----\"\n\n#Generate cli json for coordinator function\necho '{' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"FunctionName\": \"HC-'$CLUSTERNAME'-lambda-coordinator\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Runtime\": \"python3.6\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Role\": \"'$ROLE'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Handler\": \"coordinator.handler\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Code\": {' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"S3Bucket\": \"'$BUCKETIN'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"S3Key\": \"'$CLUSTERNAME'/coordinator.zip\"' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\n#echo ' \"S3ObjectVersion\": \"0.1\"' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' },' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Timeout\": '$TIMEOUT',' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"MemorySize\": 128,' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\n#echo ' \"Publish\": true,' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Environment\": {' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"Variables\": {' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"BUCKET\": \"'$BUCKETIN'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"BUCKETOUT\": \"'$BUCKETOUT'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"PREFIX\": \"'$CLUSTERNAME'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"MAPPERNUMBER\": \"'$MAXMAPPERNODES'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"MINBLOCKSIZE\": \"'$MINBLOCKSIZE'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"MAXBLOCKSIZE\": \"'$MAXBLOCKSIZE'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"MEMORY\": \"'$MAPPERMEMORY'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' \"NREDUCERS\": \"'$NREDUCERS'\"' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\nif [ $USEKMSKEY == 1 ]\nthen\n echo ' },' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\n echo ' \"KMSKeyArn\": \"'$KMSKEYARN'\"' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\nelse\n echo ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\nfi\necho '}' >> $HOME/.marla/$CLUSTERNAME/coordinator.json\n\n\n#Create lambda coordinator function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda create-function --region $REGION --cli-input-json file://$HOME/.marla/$CLUSTERNAME/coordinator.json &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mLambda coordinator function created on AWS.\\e[39m\"\nelse\n echo -e \"\\e[31mError creating lambda coordinator function.\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\necho \"----Adding permissions for coordinator function...----\"\n\n\n#Add permission to lambda coordinator function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda add-permission --function-name \"HC-\"$CLUSTERNAME\"-lambda-coordinator\" --statement-id \"HC-\"$CLUSTERNAME\"-coordinator-stateID\" --action \"lambda:InvokeFunction\" --principal s3.amazonaws.com --source-arn \"arn:aws:s3:::\"$BUCKETIN &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mPermission to S3 added for coordinator function\\e[39m\"\nelse\n echo -e \"\\e[31mError adding permission to S3 for coordinator function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n#Extract coordinator function arn\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif coordinatorARN=`aws lambda get-function --function-name \"HC-\"$CLUSTERNAME\"-lambda-coordinator\" | grep -o 'arn:aws:lambda:\\S*' | sed -e 's/\",//g'` &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mARN of coordinator function obtained\\e[39m\"\nelse\n echo -e \"\\e[31mError obtaining ARN of coordinator function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n#Add a bucket notification configuration to \"BUCKETIN\"\n\necho '{' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho '\t\"LambdaFunctionConfigurations\": [' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' {' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Id\": \"'$BUCKETIN'-'$CLUSTERNAME'-TRIGGERID\",' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"LambdaFunctionArn\": \"'$coordinatorARN'\",' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Events\": [' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"s3:ObjectCreated:*\"' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' ],' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Filter\": {' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Key\": {' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"FilterRules\": [' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' {' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Name\": \"prefix\",' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' \"Value\": \"'$CLUSTERNAME'/\"' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' ]' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho ' ]' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\necho '}' >> $HOME/.marla/$CLUSTERNAME/coordinator_notification.json\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws s3api put-bucket-notification-configuration --bucket $BUCKETIN --notification-configuration file://$HOME/.marla/$CLUSTERNAME/coordinator_notification.json &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mBucket notification configuration added\\e[39m\"\nelse\n echo -e \"\\e[31mError adding bucket notification configuration\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\necho \"###############################\"\necho \"## MAPPER FUNCTION SECTION #\"\necho \"###############################\"\n\necho \"----Creating mapper function...----\"\n\n#Generate cli json for mapper function\necho '{' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"FunctionName\": \"HC-'$CLUSTERNAME'-lambda-mapper\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Runtime\": \"python3.6\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Role\": \"'$ROLE'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Handler\": \"marla_mapper_CH.handler\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Code\": {' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"S3Bucket\": \"'$BUCKETIN'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"S3Key\": \"'$CLUSTERNAME'/mapper.zip\"' >> $HOME/.marla/$CLUSTERNAME/mapper.json\n#echo ' \"S3ObjectVersion\": \"0.1\"' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' },' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Timeout\": '$TIMEOUT',' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"MemorySize\": '$MAPPERMEMORY',' >> $HOME/.marla/$CLUSTERNAME/mapper.json\n#echo ' \"Publish\": true,' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Environment\": {' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"Variables\": {' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"BUCKET\": \"'$BUCKETIN'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"BUCKETOUT\": \"'$BUCKETOUT'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"PREFIX\": \"'$CLUSTERNAME'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"MEMORY\": \"'$MAPPERMEMORY'\",' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' \"NREDUCERS\": \"'$NREDUCERS'\"' >> $HOME/.marla/$CLUSTERNAME/mapper.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/mapper.json\nif [ $USEKMSKEY == 1 ]\nthen\n echo ' },' >> $HOME/.marla/$CLUSTERNAME/mapper.json\n echo ' \"KMSKeyArn\": \"'$KMSKEYARN'\"' >> $HOME/.marla/$CLUSTERNAME/mapper.json\nelse\n echo ' }' >> $HOME/.marla/$CLUSTERNAME/mapper.json\nfi \necho '}' >> $HOME/.marla/$CLUSTERNAME/mapper.json\n\n#Create lambda mapper function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda create-function --region $REGION --cli-input-json file://$HOME/.marla/$CLUSTERNAME/mapper.json &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mLambda mapper function created on AWS.\\e[39m\"\nelse\n echo -e \"\\e[31mError creating lambda mapper function.\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\necho \"----Adding permissions for mapper function...----\"\n\n#Add permission to lambda mapper function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda add-permission --function-name \"HC-\"$CLUSTERNAME\"-lambda-mapper\" --statement-id \"HC-\"$CLUSTERNAME\"-mapper-stateID\" --action \"lambda:InvokeFunction\" --principal lambda.amazonaws.com --source-arn $coordinatorARN &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mPermission to coordination function added for mapper function\\e[39m\"\nelse\n echo -e \"\\e[31mError adding permission to coordinator function for mapper function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n#Extract mapper function arn\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif mapperARN=`aws lambda get-function --function-name \"HC-\"$CLUSTERNAME\"-lambda-mapper\" | grep -o 'arn:aws:lambda:\\S*'` &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mARN of mapper function obtained\\e[39m\"\nelse\n echo -e \"\\e[31mError obtaining ARN of mapper function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\n\necho \"################################\"\necho \"## REDUCER FUNCTION SECTION #\"\necho \"################################\"\n\necho \"----Creating reducer function...----\"\n\n#Generate cli json for reducer function\necho '{' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"FunctionName\": \"HC-'$CLUSTERNAME'-lambda-reducer\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Runtime\": \"python3.6\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Role\": \"'$ROLE'\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Handler\": \"marla_reducer_CH.handler\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Code\": {' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"S3Bucket\": \"'$BUCKETIN'\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"S3Key\": \"'$CLUSTERNAME'/reducer.zip\"' >> $HOME/.marla/$CLUSTERNAME/reducer.json\n#echo ' \"S3ObjectVersion\": \"0.1\"' >> reducer.json\necho ' },' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Timeout\": '$TIMEOUT',' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"MemorySize\": '$REDUCERMEMORY',' >> $HOME/.marla/$CLUSTERNAME/reducer.json\n#echo ' \"Publish\": true,' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Environment\": {' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"Variables\": {' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"BUCKETOUT\": \"'$BUCKETOUT'\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"PREFIX\": \"'$CLUSTERNAME'\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"MEMORY\": \"'$REDUCERMEMORY'\",' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' \"NREDUCERS\": \"'$NREDUCERS'\"' >> $HOME/.marla/$CLUSTERNAME/reducer.json\necho ' }' >> $HOME/.marla/$CLUSTERNAME/reducer.json\nif [ $USEKMSKEY == 1 ]\nthen\n echo ' },' >> $HOME/.marla/$CLUSTERNAME/reducer.json\n echo ' \"KMSKeyArn\": \"'$KMSKEYARN'\"' >> $HOME/.marla/$CLUSTERNAME/reducer.json\nelse\n echo ' }' >> $HOME/.marla/$CLUSTERNAME/reducer.json \nfi\necho '}' >> $HOME/.marla/$CLUSTERNAME/reducer.json\n\n#Create lambda reducer function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda create-function --region $REGION --cli-input-json file://$HOME/.marla/$CLUSTERNAME/reducer.json &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mLambda reducer function created on AWS.\\e[39m\"\nelse\n echo -e \"\\e[31mError creating lambda reducer function.\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\necho \"-------------------------\"\n\necho \"----Adding permissions for reducer function...----\"\n\n#Add permission to lambda mapper function to invoke reducer function\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda add-permission --function-name \"HC-\"$CLUSTERNAME\"-lambda-reducer\" --statement-id \"HC-\"$CLUSTERNAME\"-reducer-stateID\" --action \"lambda:InvokeFunction\" --principal lambda.amazonaws.com --source-arn $mapperARN &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mPermission to mapper function added for reducer function\\e[39m\"\nelse\n echo -e \"\\e[31mError adding permission to mapper function for reducer function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n#Extract reducer function arn\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif reducerARN=`aws lambda get-function --function-name \"HC-\"$CLUSTERNAME\"-lambda-reducer\" | grep -o 'arn:aws:lambda:\\S*'` &> $HOME/.marla/$CLUSTERNAME/stderr\nthen\n echo -e \"\\e[32mARN of reducer function obtained\\e[39m\"\nelse\n echo -e \"\\e[31mError obtaining ARN of reducer function\\e[39m\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n\necho \"-------------------------\"\n\necho \"Cluster generated successfully!\"\n"
},
{
"alpha_fraction": 0.5695759057998657,
"alphanum_fraction": 0.5792732834815979,
"avg_line_length": 31.543725967407227,
"blob_id": "209f982651ab865ee57a88dd7a2078705aefffd3",
"content_id": "17130dc73e00e0255ce440b3dd8a59c91d61d076",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8559,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 263,
"path": "/functions/marla_reducer_CH.py",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": "# MARLA - MApReduce on AWS Lambda\n# Copyright (C) GRyCAP - I3M - UPV \n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. \n\nimport boto3\nimport os\nimport time\nimport json\nimport sys\nimport user_functions\nimport resource\nimport botocore\nimport hashlib\n\n\ndef downloadPairs(filesSize,usedMemory, maxUsedMemory, actualPartition, TotalNodes, BUCKETOUT, PREFIX, FileName, ReducerNumber, s3_client):\n\n chunk = \"\"\n init = actualPartition\n for j in range(init, TotalNodes):\n #download partition file\n bucket = BUCKETOUT\n key = PREFIX + \"/\" + FileName + \"/\" + str(ReducerNumber) + \"_\" + str(j)\n\n #Add hash prefix\n key = str(hashlib.md5(key.encode()).hexdigest()) + \"/\" + key \n \n #Get corresponding file size\n fileSize = filesSize[j]\n #check the used memory\n usedMemory = usedMemory + fileSize\n if usedMemory > maxUsedMemory and j != init:\n print(\"Using more memory than MaxMemory. Do not read more data.\")\n break\n\n # Try to download file a maximum of 5 times\n for i in range(5):\n try:\n obj = s3_client.get_object(Bucket=bucket, Key=key)\n break\n except botocore.exceptions.ClientError as e:\n print(\"Can't download \" + str(auxName) + \", try num \" + str(i))\n time.sleep(0.2)\n if i == 4:\n print(\"Unable to download + \" + str(auxName) + \" aborting reduce\")\n sys.exit()\n \n \n \n print(\"downloaded \" + bucket + \"/\" + key)\n print(\"used memory: {0}\".format(usedMemory))\n \n chunk = chunk + str(obj['Body'].read().decode('utf-8'))\n actualPartition +=1\n obj = None\n\n del obj\n\n #extract Names and values\n print(\"Spliting lines\")\n auxPairs = []\n chunkList = chunk.split('\\n')\n chunk = \"\"\n del chunk\n\n print(\"Extract columns\")\n for line in chunkList:\n #extract data\n data = line.strip().split(\",\")\n if len(data) == 2:\n auxPairs.append(data)\n else:\n print(\"Incorrect formatted line ignoring: {0}\".format(line))\n\n chunkList = \"\"\n del chunkList \n return auxPairs, actualPartition\n\ndef handler(event, context):\n #extract filename and the partition number\n FileName = event[\"FileName\"]\n TotalNodes = int(event[\"TotalNodes\"])\n ReducerNumber = int(event[\"ReducerNumber\"])\n memoryLimit = 0.03\n \n #load environment variables\n BUCKETOUT = str(os.environ['BUCKETOUT'])\n PREFIX = str(os.environ['PREFIX'])\n MEMORY = int(os.environ['MEMORY'])*1048576\n\n #Check invocation number\n Invocation = int(event[\"Invocation\"])\n\n print(\"Invocation number \" + str(Invocation) + \" of reduce function \" + str(ReducerNumber))\n \n if ReducerNumber >= 0 and Invocation > 20:\n print(\"Too many invocations. Abort reduce.\")\n return\n elif ReducerNumber < 0 and Invocation > 70:\n print(\"Too many invocations. Abort reduce.\")\n return \n \n tester=False\n if ReducerNumber < 0:\n tester = True\n NREDUCERS = int(os.environ['NREDUCERS'])\n ReducerNumber = NREDUCERS-1\n \n\n #Get boto3 s3 client\n s3_client = boto3.client('s3')\n\n \n #check if all partitions are mapped\n #this function will check that 5 times\n keyPrefix = PREFIX + \"/\" + FileName + \"/\" + str(ReducerNumber) + \"_\"\n filesSize = [] # Store mapped files size\n for i in range(5):\n allMapped = True\n for j in range(len(filesSize),TotalNodes):\n auxName = keyPrefix + str(j)\n\n #Add hash prefix\n auxName = str(hashlib.md5(auxName.encode()).hexdigest()) + \"/\" + auxName\n \n try:\n response = s3_client.head_object(Bucket=BUCKETOUT, Key=auxName)\n filesSize.append(int(response['ContentLength']))\n except botocore.exceptions.ClientError as e:\n print(\"mapping of \" + str(auxName) + \" not finished\")\n allMapped = False\n break\n if allMapped == True:\n break\n time.sleep( 0.5 )\n\n #if mapping is not finished, the function\n #invoke another reduce function and termines\n if allMapped == False:\n #lunch lambda function reducer\n if tester == True:\n ReducerNumber = -1\n time.sleep( 2 )\n lambda_client = boto3.client('lambda')\n payload = {}\n payload[\"Invocation\"] = str(Invocation+1)\n payload[\"ReducerNumber\"] = str(ReducerNumber)\n payload[\"FileName\"]=str(FileName)\n payload[\"TotalNodes\"]=str(TotalNodes)\n response_invoke = lambda_client.invoke(\n ClientContext='ClusterHD-'+BUCKETOUT,\n FunctionName='HC-'+PREFIX+'-lambda-reducer',\n InvocationType='Event',\n LogType='Tail',\n Payload=json.dumps(payload),\n )\n return\n\n if len(filesSize) != TotalNodes:\n print(\"Error: number of file sizes stored (\" + str(len(filesSize)) + \") not equal to total mapper nodes (\" + str(TotalNodes) + \")\")\n print(\"Reduce aborted\")\n return\n \n #if mapping is finished, begin the reduce.\n\n #If this reducer is the tester reduce, launch all reducer functions\n if tester == True:\n for i in range(NREDUCERS):\n lambda_client = boto3.client('lambda')\n payload = {}\n payload[\"Invocation\"] = '0'\n payload[\"ReducerNumber\"] = str(i)\n payload[\"FileName\"]=str(FileName)\n payload[\"TotalNodes\"]=str(TotalNodes)\n response_invoke = lambda_client.invoke(\n ClientContext='ClusterHD-'+BUCKETOUT,\n FunctionName='HC-'+PREFIX+'-lambda-reducer',\n InvocationType='Event',\n LogType='Tail',\n Payload=json.dumps(payload),\n )\n return\n \n #create lists to store results\n Pairs = []\n #iterate for all mapped partitions \n maxUsedMemory = int(MEMORY*memoryLimit)\n i=0\n print(\"Max memory to download data: {0} B\".format(maxUsedMemory))\n while (i < TotalNodes):\n\n #download and extract pairs\n usedMemory = sys.getsizeof(Pairs)\n auxPairs, i = downloadPairs(filesSize,usedMemory,maxUsedMemory, i, TotalNodes, BUCKETOUT, PREFIX, FileName, ReducerNumber, s3_client)\n \n #Merge with previous pairs and sort\n print(\"Sorting data\")\n auxPairs += Pairs\n\n Pairs = []\n auxPairs.sort()\n print(\"Reducing data\")\n #################\n ####### USER REDUCE #######\n #################\n #Save new results for the next iteration\n Pairs = user_functions.reducer(auxPairs)\n\n auxPairs = []\n\n #upload results\n print(\"Stringify data\")\n\n results = \"\"\n numPairs = len(Pairs)\n PairsSize = sys.getsizeof(Pairs)\n print(\"Results size {0}\".format(PairsSize))\n\n if sys.getsizeof(Pairs) > maxUsedMemory/2:\n\n print(\"Spliting huge results\")\n Pairs1 = Pairs[0:int(numPairs/2)]\n del Pairs[0:int(numPairs/2)]\n\n for x in Pairs1:\n results += \"{0},{1}\\n\".format(x[0], x[1])\n\n del Pairs1\n \n for x in Pairs:\n results += \"{0},{1}\\n\".format(x[0], x[1])\n \n else:\n i = 0\n while i < numPairs:\n results += \"{0},{1}\\n\".format(Pairs[i][0], Pairs[i][1])\n i=i+1\n \n del Pairs\n print(\"Uploading data\")\n resultsKey = os.path.join(PREFIX,FileName,str(ReducerNumber) + \"_results\")\n s3_client.put_object(Body=results,Bucket=BUCKETOUT, Key=resultsKey)\n\n #remove all partitions\n for i in range(TotalNodes):\n bucket = BUCKETOUT\n key = PREFIX + \"/\" + FileName + \"/\" + str(ReducerNumber) + \"_\" + str(i)\n\n #Add hash prefix\n key = str(hashlib.md5(key.encode()).hexdigest()) + \"/\" + key \n \n s3_client.delete_object(Bucket=BUCKETOUT, Key=key)\n"
},
{
"alpha_fraction": 0.7302197813987732,
"alphanum_fraction": 0.7373626232147217,
"avg_line_length": 30.36206817626953,
"blob_id": "a2bb17a94a0b11c3ca735a7d67d7df3e1a786551",
"content_id": "b3b5b3bf548a5e0438579dbd2b74e7c49c0cc2f5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1820,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 58,
"path": "/marla_remove.sh",
"repo_name": "niloo9876/marow",
"src_encoding": "UTF-8",
"text": " #!/bin/bash\n\n# MARLA - MApReduce on AWS Lambda\n# Copyright (C) GRyCAP - I3M - UPV \n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License. \n\n\nif [ $# -ne 2 ]; then\n echo \"Usage: $0 region cluster-name\"\n exit -1\nfi\n\nREGION=$1\nCLUSTERNAME=$2\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda delete-function --region $REGION --function-name HC-$CLUSTERNAME-lambda-mapper\nthen\n echo \"Mapper function removed from cluster '$CLUSTERNAME'\"\nelse\n echo \"Error removing mapper function from cluster '$CLUSTERNAME'\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda delete-function --region $REGION --function-name HC-$CLUSTERNAME-lambda-reducer\nthen\n echo \"Reducer function removed from cluster '$CLUSTERNAME'\"\nelse\n echo \"Error removing reducer function from cluster '$CLUSTERNAME'\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\n\nrm $HOME/.marla/$CLUSTERNAME/stderr &> /dev/null\nif aws lambda delete-function --region $REGION --function-name HC-$CLUSTERNAME-lambda-coordinator\nthen\n echo \"Coordinator function removed from cluster '$CLUSTERNAME'\"\nelse\n echo \"Error removing coordinator function from cluster '$CLUSTERNAME'\"\n more $HOME/.marla/$CLUSTERNAME/stderr\n exit 1\nfi\n\nrm -r $HOME/.marla/$CLUSTERNAME &> /dev/null\n"
}
] | 7 |
maayanch/ImageProcessing_ex1 | https://github.com/maayanch/ImageProcessing_ex1 | 0befc56fb6602e0aedaa650793dc55893b0e27c4 | 7c22f2a59c2ed76ff71115a8702982488b2f3250 | 935f05e99dfb0577ae6397f9f6e86852cd3fef3d | refs/heads/master | 2023-01-07T05:40:32.085106 | 2020-11-01T19:54:15 | 2020-11-01T19:54:15 | 309,175,782 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6253461241722107,
"alphanum_fraction": 0.639936089515686,
"avg_line_length": 32.06338119506836,
"blob_id": "6aae18a4b89d3ffca3f22712f3a0afac23a330af",
"content_id": "c7e044ffc08e4c9647b21b690f2f938ce5992fe7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9394,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 284,
"path": "/sol1.py",
"repo_name": "maayanch/ImageProcessing_ex1",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom imageio import imread\nfrom skimage.color import rgb2gray\nfrom matplotlib import pyplot as plt\nfrom sklearn.cluster import KMeans\nfrom sklearn.utils import shuffle\n\nBINS = 256\nMAX_COLOR_VAL = 255\nGREY_SCALE_IM = 1\nRGB_TO_YIQ = np.array([[0.299, 0.587, 0.114], [0.596, -0.275, -0.321], [0.212, -0.523, 0.311]]).T\nRGB_IMAGE = 2\n\n\ndef read_image(filename, representation):\n \"\"\"\n Function that reads an image file and converts it into a given representation.\n :param filename: the filename of an image on disk (could be grayscale or RGB).\n :param representation: representation code, either 1 or 2 defining whether the output should be\n a grayscale image (1) or an RGB image (2). If the input image is grayscale, we won’t call it\n with representation = 2.\n :return:This function returns an image\n \"\"\"\n check_representation(representation)\n im = imread(filename).astype(np.float64) / MAX_COLOR_VAL\n if representation == GREY_SCALE_IM:\n im = rgb2gray(im)\n return im\n\n\ndef check_representation(representation):\n if (representation != RGB_IMAGE and representation != GREY_SCALE_IM):\n print(\"for grayscle representation use 1, for RGB use 2\")\n exit(1)\n\n\ndef imdisplay(filename, representation):\n \"\"\"\n function that display an image in a given representation\n :param filename: the filename of an image on disk (could be grayscale or RGB).\n :param representation: representation code, either 1 or 2 defining whether the output should be\n a grayscale image (1) or an RGB image (2). If the input image is grayscale, we won’t call it\n with representation = 2.\n \"\"\"\n check_representation(representation)\n im = read_image(filename, representation)\n cmap = 'gray' if representation == GREY_SCALE_IM else None\n plt.imshow(im, cmap=cmap)\n plt.show()\n\n\ndef rgb2yiq(imRGB):\n \"\"\"\n Transforms the image from RGB space to YIQ space\n :param imRGB: Image to transform\n :return: Transformed image\n \"\"\"\n return np.dot(imRGB, RGB_TO_YIQ)\n\n\ndef yiq2rgb(imYIQ):\n \"\"\"\n Transforms the image from YIQ space to RGB space\n :param imRGB: Image to transform\n :return: Transformed image\n \"\"\"\n return np.dot(imYIQ, np.linalg.inv(RGB_TO_YIQ))\n\n\ndef histogram_equalize(im_orig):\n \"\"\"\n Performs histogram equalization of a given grayscale or RGB image.\n :param im_orig:The input grayscale or RGB float64 image with values in [0, 1].\n :return:a list [im_eq, hist_orig, hist_eq] where\n im_eq - is the equalized image. grayscale or RGB float64 image with values in [0, 1].\n hist_orig - is a 256 bin histogram of the original image (array with shape (256,) ).\n hist_eq - is a 256 bin histogram of the equalized image (array with shape (256,) ).\n \"\"\"\n img_greyscale = np.copy(im_orig)\n # check if the image in 3 or 1 dim. 3 means RGB image, else means greyscale image\n rgb_image = (len(im_orig.shape) == 3)\n\n img_greyscale = get_greyscale_img(img_greyscale, rgb_image)\n cumsum_hist, hist = get_cumsum_hist(img_greyscale)\n cumsum_hist_normalized = cumsum_hist / np.max(cumsum_hist)\n min_non_zero_val = get_first_non_zero_bin(cumsum_hist_normalized)\n equalized_hist = calc_hist_formula(cumsum_hist_normalized, min_non_zero_val)\n eq_im = get_final_img(equalized_hist, im_orig, img_greyscale, rgb_image)\n\n eq_im = np.clip(eq_im, 0, 1)\n return eq_im, hist, equalized_hist\n\n\ndef get_cumsum_hist(img_greyscale):\n \"\"\"\n caculate histogram and cumsum histogram\n :param img_greyscale:\n :return:\n hist: is a 256 bin histogram of the original image\n cumsum hist\n \"\"\"\n hist, bin_edges = np.histogram(img_greyscale, BINS, [0, BINS])\n cumsum_hist = np.cumsum(hist)\n return cumsum_hist, hist\n\n\ndef get_final_img(hist, im_orig, img_greyscale, rgb_image):\n \"\"\"\n Map the intensity values of the image using the equlized hist\n :return: final image\n \"\"\"\n # move each pixel intensity in the old image with the new intensity\n new_image = hist[img_greyscale.astype(np.int8)]\n new_image = new_image.astype(np.float64) / MAX_COLOR_VAL\n if rgb_image:\n yiq_image = rgb2yiq(im_orig)\n yiq_image[:, :, 0] = new_image[:, :]\n result_image = yiq2rgb(yiq_image)\n else:\n result_image = new_image\n return result_image\n\n\ndef get_first_non_zero_bin(cumsum_hist_normalized):\n \"\"\"\n get the first value (bin) that not equal 0. The minimum val in the histogram\n :param cumsum_hist_normalized:\n :return: minimum value index\n \"\"\"\n hist_ignored_zeros = np.ma.masked_equal(cumsum_hist_normalized, 0)\n min_index_not_zero = np.argmin(hist_ignored_zeros)\n min_not_zero_val = cumsum_hist_normalized[min_index_not_zero]\n return min_not_zero_val\n\n\ndef get_greyscale_img(img_greyscale, rgb_image):\n \"\"\"\n transform only RGB image to grayscale image\n :param img_greyscale:\n :param rgb_image: boolean val, if RGB image equal True, else False\n :return: greyscale image\n \"\"\"\n if rgb_image:\n img_greyscale = rgb2yiq(img_greyscale)\n img_greyscale = img_greyscale[:, :, 0]\n img_greyscale *= MAX_COLOR_VAL\n return img_greyscale\n\n\ndef calc_hist_formula(cumsum_hist_normalized, min_not_zero_val):\n \"\"\"\n Calculate the formula of histogram\n :param cumsum_hist_normalized:\n :param min_not_zero_val:\n :return: equalized histogram by formula\n \"\"\"\n non_zero_cumsum_hist = np.ma.masked_equal(cumsum_hist_normalized, 0)\n equalized_hist = np.round(((non_zero_cumsum_hist - min_not_zero_val) / (\n non_zero_cumsum_hist[MAX_COLOR_VAL] - min_not_zero_val)) * MAX_COLOR_VAL)\n\n equalized_hist = np.ma.filled(equalized_hist, 0)\n return equalized_hist\n\n\ndef get_init_z(img_greyscale, n_quant):\n cumsum_hist, hist = get_cumsum_hist(img_greyscale)\n segment_size = cumsum_hist[-1] / n_quant\n z = [0]\n z += [np.argmax(cumsum_hist >= segment_size * i) for i in range(1, n_quant)]\n z.append(MAX_COLOR_VAL)\n return z\n\n\ndef get_new_q(hist, z, q):\n for i in range(len(q)):\n z_i, z_i_next = get_zi_zinext(i, z)\n g, h_g = get_g_hg(hist, z_i, z_i_next)\n q[i] = (np.sum(h_g * g)) / (np.sum(h_g))\n return q\n\n\ndef get_new_z(q, z_list):\n for i in range(1, len(q)):\n if np.isnan(q[i]) or np.isnan(q[i - 1]):\n z_list[i] = 0\n continue\n z_list[i] = np.ceil((q[i - 1] + q[i]) / 2)\n return z_list\n\n\ndef compute_error(hist, z_list, q):\n err = 0\n for i in range(len(q)):\n z_i, z_i_next = get_zi_zinext(i, z_list)\n g, h_g = get_g_hg(hist, z_i, z_i_next)\n err += np.sum((np.square(q[i] - g)) * h_g)\n return err\n\n\ndef get_g_hg(hist, z_i, z_i_next):\n g = np.arange(z_i, z_i_next)\n h_g = hist[z_i:z_i_next]\n return g, h_g\n\n\ndef get_zi_zinext(i, z_list):\n z_i = int(z_list[i])\n z_i_next = int(z_list[i + 1])\n return z_i, z_i_next\n\n\ndef quantize(im_orig, n_quant, n_iter):\n \"\"\"\n Performs optimal quantization of a given grayscale or RGB image\n :param im_orig:the input grayscale or RGB image to be quantized (float64 image with\n in [0, 1]).\n :param n_quant: the number of intensities your output im_quant image should have\n :param n_iter:s the maximum number of iterations of the optimization procedure.\n :return: a list [im_quant, error] where\n im_quant - is the quantized output image.\n error - is an array with shape (n_iter,) (or less) of the total intensities error\n for each iteration of the quantization procedure.\n \"\"\"\n\n rgb_image = (len(im_orig.shape) == 3)\n img_greyscale = np.copy(im_orig)\n img_greyscale = get_greyscale_img(img_greyscale, rgb_image)\n hist, bin_edges = np.histogram(img_greyscale, BINS, [0, BINS])\n\n z_list = get_init_z(img_greyscale, n_quant)\n err = []\n q = np.zeros(n_quant)\n\n q, z_list, err = improve_resualts(err, hist, n_iter, q, z_list)\n\n for i in range(n_quant):\n z_i, z_i_next = get_zi_zinext(i, z_list)\n hist[z_i:z_i_next] = np.ceil(q[i])\n\n result_image = get_final_img(hist, im_orig, img_greyscale, rgb_image)\n return result_image, err\n\n\ndef improve_resualts(err, hist, n_iter, q, z_list):\n i = 0\n while i < n_iter:\n prev_z = np.copy(z_list)\n q = get_new_q(hist, z_list, q)\n z_list = get_new_z(q, z_list)\n if np.array_equal(z_list, prev_z):\n break\n error = compute_error(hist, z_list, q)\n err.append(error)\n i += 1\n return q, z_list, err\n\n\ndef quantize_rgb(im_orig, n_quant):\n \"\"\"\n reduce color in rgb image\n :param im_orig:\n :param n_quant: num of colors\n :return: new image\n \"\"\"\n w, h, d = tuple(im_orig.shape)\n im_arr = np.reshape(im_orig, (w * h, d))\n im_arr_sample = shuffle(im_arr, random_state=0)[:1000]\n kmeans = KMeans(n_clusters=n_quant, random_state=0).fit(im_arr_sample)\n labels = kmeans.predict(im_arr)\n image = np.zeros((w, h, kmeans.cluster_centers_.shape[1]))\n lb = 0\n for i in range(w):\n for j in range(h):\n image[i][j] = kmeans.cluster_centers_[labels[lb]]\n lb += 1\n return image\n\n\nres = read_image(\"C:\\\\Users\\\\maayantz\\\\Desktop\\\\maytal\\\\yuv.jpeg\", 1)\nres, b,x = histogram_equalize(res)\nres, err = quantize(res,2,10)\n# res = quantize_rgb(im, 5)\nplt.imshow(res,'gray')\nplt.show()\n"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.8444444537162781,
"avg_line_length": 10.5,
"blob_id": "80c3f111ce07a40b74ecc7b388e80a9cf26e18d2",
"content_id": "3f4c067acbe46ec320e0e656d8112c3bd8a2832f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 45,
"license_type": "no_license",
"max_line_length": 13,
"num_lines": 4,
"path": "/README.md",
"repo_name": "maayanch/ImageProcessing_ex1",
"src_encoding": "UTF-8",
"text": "maayanchetrit\nsol1.py\nanswer_q1.txt\nREADME.md"
}
] | 2 |
MichaelCarius/fantasia | https://github.com/MichaelCarius/fantasia | 49955be89b382bb700fadd95492294051c5cf3a9 | 0a53b6db864feacfd21ffeff99b2c762b86dc246 | a06a9d92a14a1af78b60d1d2c60445d3b06599a5 | refs/heads/master | 2016-09-08T12:06:51.234497 | 2013-02-02T05:53:34 | 2013-02-02T05:53:34 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5945350527763367,
"alphanum_fraction": 0.59828120470047,
"avg_line_length": 27.18633460998535,
"blob_id": "4e93f271cc783c3f828b33a28911421ff7ffe9da",
"content_id": "4012844d58d1b4b57be4dddcb2c4dfaca6f2356f",
"detected_licenses": [
"WTFPL"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4538,
"license_type": "permissive",
"max_line_length": 124,
"num_lines": 161,
"path": "/fantasia.py",
"repo_name": "MichaelCarius/fantasia",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\nimport sys\nimport os\nimport random\nimport argparse\nimport importlib\n\nimport play\n\nclass Playlist:\n \"\"\"Provides a sequence of songs to play.\"\"\"\n\n def __init__(self, file):\n dir = os.path.dirname(file)\n mod = os.path.splitext(os.path.basename(file))[0]\n\n sys.path.insert(0, dir)\n module = importlib.import_module(mod)\n sys.path.pop(0)\n\n self.include = module.include\n self.exclude = module.exclude\n self.file = file\n self.buffers = ([], [])\n self.current_buffer = 0\n self.last_song = \"\"\n\n def get_next_song(self):\n \"\"\"Get the next song from the playlist.\n\n All details magically taken care of.\n \"\"\"\n\n self.check_for_new_songs()\n return self.pick_song()\n\n def check_for_new_songs(self):\n \"\"\"Re-scan disk based on includes/excludes.\"\"\"\n\n for inc in self.include:\n if os.path.isfile(inc):\n self.include_file(inc)\n elif os.path.isdir(inc):\n self.include_dir(inc)\n\n for ex in self.exclude:\n if os.path.isfile(ex):\n self.exclude_file(ex)\n elif os.path.isdir(ex):\n self.exclude_dir(ex)\n\n def include_file(self, file):\n \"\"\"If file meets some conditions, queue it.\"\"\"\n\n is_mp3 = os.path.splitext(file)[1] == \".mp3\"\n is_new = not (file in self.buffers[0] or file in self.buffers[1])\n if is_mp3 and is_new:\n self.buffers[self.current_buffer].append(file)\n\n def include_dir(self, dir):\n \"\"\"Queue all files in tree rooted at dir.\"\"\"\n\n for root, dirs, files in os.walk(dir):\n for file in files:\n self.include_file(os.path.join(root, file))\n\n def exclude_file(self, file):\n \"\"\"Completely remove file from the queue.\"\"\"\n\n # loops, just in case the file was mistakenly added more than once\n\n while file in self.buffers[0]:\n self.buffers[0].remove(file)\n\n while file in self.buffers[1]:\n self.buffers[1].remove(file)\n\n def exclude_dir(self, dir):\n \"\"\"Exclude all files in tree rooted at dir.\"\"\"\n\n for root, dirs, files in os.walk(dir):\n for file in files:\n self.exclude_file(os.path.join(root, file))\n\n def pick_song(self):\n \"\"\"Randomly select a song.\n\n Makes sure the song isn't a repeat, and moves it from the pull buffer\n to the push buffer.\n \"\"\"\n\n try:\n song = random.choice(self.buffers[self.current_buffer])\n while song == self.last_song:\n song = random.choice(self.buffers[self.current_buffer])\n self.buffers[self.current_buffer].remove(song)\n\n except IndexError:\n if len(self.include) == 0:\n # woops. empty playlist. don't infinitely recurse.\n raise Exception(\"empty playlist\")\n\n self.current_buffer = not self.current_buffer\n song = self.pick_song()\n\n self.last_song = song\n return song\n\ndef play_command(args):\n pl = Playlist(args.playlist)\n while True:\n play.external(pl.get_next_song())\n\ndef pause_command(args):\n pass\n\ndef stop_command(args):\n pass\n\ndef skip_command(args):\n pass\n\ndef include_command(args):\n pass\n\ndef exclude_command(args):\n pass\n\ndef parse_args():\n \"\"\"Parse command-line arguments.\"\"\"\n\n ap = argparse.ArgumentParser()\n subparsers = ap.add_subparsers()\n\n # utility so we don't have to store each subparser and set its callback\n def make_subparser(arg, command):\n parser = subparsers.add_parser(arg)\n parser.set_defaults(command=command)\n return parser\n\n make_subparser(\"play\", play_command).add_argument(\"playlist\")\n make_subparser(\"pause\", pause_command)\n make_subparser(\"stop\", stop_command)\n make_subparser(\"skip\", skip_command)\n make_subparser(\"include\", include_command).add_argument(\"target\")\n make_subparser(\"exclude\", exclude_command).add_argument(\"target\")\n\n # TODO: make_parser(\"query\", query)\n\n return ap.parse_args()\n\nif len(sys.argv) == 1:\n # argparse doesn't have a \"you have to specify a subcommand\" hook, so we\n # have to do it manually\n print(\"usage: fantasia.py [-h] {play,pause,stop,skip,include,exclude} ...\")\n print(\"fantasia.py: error: no subcommand specified (choose from 'play', 'pause', 'stop', 'skip', 'include', 'exclude')\")\n sys.exit(1)\n\nargs = parse_args()\nargs.command(args)\n"
},
{
"alpha_fraction": 0.7294151186943054,
"alphanum_fraction": 0.7365133166313171,
"avg_line_length": 41.43373489379883,
"blob_id": "0f5d426e82ec1d70b0243e5e8a0b7a36ef97e01c",
"content_id": "5ced85e0e13cae4cfd9b9e24382a60dda8527698",
"detected_licenses": [
"WTFPL"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3523,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 83,
"path": "/README.markdown",
"repo_name": "MichaelCarius/fantasia",
"src_encoding": "UTF-8",
"text": "# Fantasia: A Music Player\n\nWhen I'm coding, I like to have some music playing. I don't want it in a\nwindow. I don't want to have another interface cluttering up my desktop. All I\nwant is to be able to pick some songs and play them on shuffle, in a loop,\nwith no repeats.\n\nThat's Fantasia.\n\n## Features\n\nFantasia is an organically growing Python script that I add features to\nwhenever I dream them up. Right now, it can do these things:\n\n* Using `fantasia.py play <playlist>`, it will play all the songs in the\n specified playlist on loop and shuffle with no repeats.\n\nAnd these are the things I'm working on right now:\n\n* Being able to issue commands like `fantasia.py pause` and `fantasia.py skip`\n* Command-line playlist modification (see below for how it works right now)\n\n## Setting Up\n\n`fantasia.py` is the main script, and depends only the other `.py` files in\nthe directory, `mpg123`, and of course Python itself (Python 3, by the way).\nPut them in whatever directory you like. Fantasia will never create any temp\nfiles, config files, or default playlists, so directory permissions shouldn't\nbe an issue.\n\n## Playing MP3s\n\nFantasia works with things called *playlists*. A playlist is a simple Python\nmodule, which has to define two variables: `include`, the files to include in\nthe playlist, and `exclude`, the ones to leave out. For example, here's my\n`working.py` playlist, with some hopefully illuminating comments:\n\n # The include list specifies all the files and directories to include in\n # the playlist (duh). Including a directory means to search it and all its\n # subdirectories for more MP3 files, unless a particular file or\n # subdirectory is named in the exclude list (see below).\n include = [\"/shared/music\"]\n\n # Now, I like Jonathan Coulton as much as the next guy, but songs with\n # lyrics in them distract me when I'm coding. Rather than specifying each\n # other subdirectory of /shared/music, I just have to say that I don't\n # want this one.\n # Also note that you don't have to escape shell metacharacters or spaces.\n exclude = [\"/shared/music/Jonathan Coulton's Greatest Hit (Plus 13 Other Songs)\"]\n\nOnce your playlist is written, all you have to do to play it is run\n\n $ fantasia.py play my_playlist.py\n\nYou can add a `&`, put it in your `.bashrc`, whatever you like. A couple handy\ncommands for managing the script (until I can add their functionality in) are:\n\n* `pgrep 'fantasia|mpg123' | xargs kill` - Stop Fantasia.\n* `pgrep mpg123 | xargs kill` - Skip to the next song.\n* `pgrep mpg123 | xargs kill -STOP` - Pause the current song.\n* `pgrep mpg123 | xargs kill -CONT` - Resume the current song.\n* `pgrep fantasia | xargs kill` - Stop Fantasia after the current song has finished.\n* `ps -Af | grep -v grep | grep -q fantasia` - Find out if Fantasia is running\n (useful in .bashrc).\n\n## Reading the Source\n\nFantasia is currently organized into two Python modules:\n\n* `fantasia.py` is the main script, and contains the `Playlist` class.\n* `play.py` defines \"player\" functions which play a song and block until it's\n finished.\n\nThe `Playlist` class affects the order of the songs. Look for the\n`get_next_song` method if you want to add a different ordering.\n\n## License and Copyright\n\nCopyright © 2013 Michael Carius <[email protected]>\n\nThis work is free. You can redistribute it and/or modify it under the terms of\nthe Do What The Fuck You Want To Public License, Version 2, as published by\nSam Hocevar. See http://www.wtfpl.net/ or the license.txt file for more details.\n"
},
{
"alpha_fraction": 0.6974790096282959,
"alphanum_fraction": 0.7100840210914612,
"avg_line_length": 22.799999237060547,
"blob_id": "23455c8e9d199bfa5169c6fc4ecf4a95130d1896",
"content_id": "67334212c792d97b29560ba212b3829ced6dfd22",
"detected_licenses": [
"WTFPL"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 238,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 10,
"path": "/play.py",
"repo_name": "MichaelCarius/fantasia",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n\"\"\"Functions for playing songs.\"\"\"\n\nimport subprocess\n\ndef external(song):\n \"\"\"Call an external program to play a song.\"\"\"\n\n subprocess.call([\"mpg123\", song], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n"
}
] | 3 |
shravani1510desai/FaceDetection | https://github.com/shravani1510desai/FaceDetection | 77c79b45cdd22c46c748dcbf8c550e09083316bc | 3bd40619e5f69504ba259bef9568439a475c7bb8 | 4d334a61c43e03d571dcdf4b7eddd25a4fbe0678 | refs/heads/main | 2023-05-30T04:53:20.995399 | 2021-06-07T08:33:53 | 2021-06-07T08:33:53 | 374,590,321 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5179542899131775,
"alphanum_fraction": 0.5778019428253174,
"avg_line_length": 27.75,
"blob_id": "fa678c860136d17a7808c1f07b4a541df39550fe",
"content_id": "793e82e73139be97cc0d68fc16ba1b907020c21d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 919,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 32,
"path": "/fd_app/imgrender.py",
"repo_name": "shravani1510desai/FaceDetection",
"src_encoding": "UTF-8",
"text": "import cv2\nimport mtcnn\n\ndef render_image(fileloc):\n face_detector = mtcnn.MTCNN()\n img = cv2.imread(fileloc)\n conf_t = 0.99\n\n img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n results = face_detector.detect_faces(img_rgb)\n\n print(results)\n number_of_faces = 0\n for res in results:\n x1, y1, width, height = res['box']\n x1, y1 = abs(x1), abs(y1)\n x2, y2 = x1 + width, y1 + height\n\n confidence = res['confidence']\n if confidence < conf_t:\n continue\n key_points = res['keypoints'].values()\n\n cv2.rectangle(img, (x1, y1), (x2, y2), (255, 0, 0), thickness=1)\n cv2.putText(img, f'conf: {confidence:3f}', (x1, y1), cv2.FONT_ITALIC, 0.7, (0, 0, 255), 2)\n\n for point in key_points:\n cv2.circle(img, point, 1, (255, 255, 0), thickness=2)\n number_of_faces+=1\n\n cv2.imwrite(fileloc, img)\n return number_of_faces"
},
{
"alpha_fraction": 0.7958833575248718,
"alphanum_fraction": 0.7958833575248718,
"avg_line_length": 290.5,
"blob_id": "25536699303ebc308a3b22ad219572cf213c7de7",
"content_id": "5c14cb7f787a8a796fc4460f9e55d1593b1fdc1a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 583,
"license_type": "no_license",
"max_line_length": 566,
"num_lines": 2,
"path": "/README.md",
"repo_name": "shravani1510desai/FaceDetection",
"src_encoding": "UTF-8",
"text": "# FaceDetection\nFace Detection is a simple computer vison project which focuses of detecting face from the provided image by the user and displays a new image show bounding box around the faces detected along with confidence i.e., how accurately the face is detected, key points facial mark such as left eye, right, eye, nose, mouth and providing this detail in json format. I used flask for the frontend to make it user friendly in which I created simple html form to receive the image from user and a submit button to apply face detection pre trained model used at backend.\n"
},
{
"alpha_fraction": 0.5468540787696838,
"alphanum_fraction": 0.5736278295516968,
"avg_line_length": 19.68115997314453,
"blob_id": "672ce0f18f286b1428a6068a4fa25b30559f221a",
"content_id": "16161202de1ebc31edf8f140ef39466faf1de07b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 1494,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 69,
"path": "/fd_app/templates/index.html",
"repo_name": "shravani1510desai/FaceDetection",
"src_encoding": "UTF-8",
"text": "<!DOCTYPE html>\r\n<html>\r\n<head>\r\n\t<title>Face Detection</title>\r\n</head>\r\n<style>\r\n\r\nbody {\r\n\tfont-size: 40px;\r\n\tpadding: 10px;\r\n\tmargin: 20px;\r\n background-color :#034f84;\r\n\r\n text-align: center;\r\n}\r\ndiv{\r\n\tbackground-color :#92a8d1;\r\n\ttext-align: center;\r\n\tborder: 3px solid black;\r\n border-radius: 10px;\r\n\tpadding: 20px;\r\n\tmargin: 35px; \r\n}\r\n#upload{\r\n\tbackground-color: white;\r\n border: 3px solid black;\r\n border-radius: 10px;\r\n padding: 10px;\r\n }\r\n\r\n#submit{\r\n\tbackground-color: white;\r\n border: 3px solid black;\r\n border-radius: 10px;\r\n padding: 10px;\r\n}\r\n</style>\r\n\r\n<body>\r\n {% if error %}\r\n <h3>You might have not passed a file or it might not be an image file containing faces</h3>\r\n {% endif %}\r\n \r\n \r\n \r\n <form method=\"post\" enctype=\"multipart/form-data\" action=\"renderimage\">\r\n \r\n \r\n <div class=\"col-md-4 col-sm-4\"><h1>FACE DETECTION</h1>\r\n \t<h>Upload your photo here</h><br><br>\r\n \t<input id=\"upload\" type=\"file\" name=\"imagetorender\" hidden/>\r\n <label id=\"upload\" for=\"upload\">Choose file</label><br><br>\r\n\r\n \r\n<input id=\"submit\" type=\"submit\" value=\"Submit\" hidden/>\r\n <label id=\"submit\" for=\"submit\">Submit</label><br></div>\r\n <div >\r\n \t{% if (fileloc) and (fileloc!='error') %}\r\n <h2>Total number of faces detected: {{numfaces}}</h2>\r\n <img src={{fileloc}} alt='renderedimage'>\r\n <!--{{fileloc}}-->\r\n {% endif %}\r\n\r\n </div>\r\n\r\n</form>\r\n<br>\r\n</body>\r\n</html>"
},
{
"alpha_fraction": 0.6181818246841431,
"alphanum_fraction": 0.6213438510894775,
"avg_line_length": 29.878047943115234,
"blob_id": "322e289384016f82568430a62796f13214f963c1",
"content_id": "64c01740502aed62f7a749517824f182236c2d19",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1265,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 41,
"path": "/fd_app/app.py",
"repo_name": "shravani1510desai/FaceDetection",
"src_encoding": "UTF-8",
"text": "from flask import *\nfrom imgrender import *\nimport os\napp = Flask(__name__,static_url_path='/static')\n\n\[email protected]('/',methods=['GET','POST'])\ndef show_index():\n fileloc = request.args.get('renderedimage',default=None,type=str)\n numfaces = request.args.get('numfaces',default=None,type=int)\n if fileloc == 'error':\n error = 'You did not pass any file'\n else:\n error = False\n #print(fileloc)\n return render_template('index.html',fileloc = fileloc,error=error,numfaces=numfaces)\n\[email protected]('/renderimage',methods=['POST'])\ndef render_image_now():\n files = request.files\n #print(files)\n #print(dir(files))\n cwd = os.getcwd()\n try:\n file = files['imagetorender']\n name = file.filename\n image_loc = f\"static/images/{name}\"\n file.save(image_loc)\n try:\n rendered = render_image(image_loc)\n except:\n rendered = False\n if rendered != False:\n return redirect(f'/?renderedimage={image_loc}&numfaces={rendered}')\n #return redirect(url_for(show_index,renderedimage=image_loc))\n else:\n return redirect(f'/?renderedimage=error')\n except:\n return redirect(f'/?renderedimage=error')\n\napp.run(debug=True,port=8000)"
}
] | 4 |
mkihr-ojisan/yakudobot | https://github.com/mkihr-ojisan/yakudobot | 42ba1288ac1f5b9070f1179ebc1562e50f06d481 | c3190a97826090618adfe7c3ebd0d9e04395e73e | b8434b437279627db86cfcdc84594c3daf00fd6c | refs/heads/master | 2023-05-07T01:29:10.664952 | 2021-05-31T10:46:32 | 2021-05-31T10:46:32 | 361,839,234 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.577151358127594,
"alphanum_fraction": 0.5908011794090271,
"avg_line_length": 41.6708869934082,
"blob_id": "68649022ff380634aed0801c72f1d332062ed589",
"content_id": "4d978f108b51d5dd310c22dc6b7b2775c4c1d8f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3468,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 79,
"path": "/scheduler.py",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "import tweepy,os,datetime,time\nfrom apscheduler.schedulers.blocking import BlockingScheduler\nfrom main import db\nfrom database.models import YakudoScore\n\ntwische = BlockingScheduler()\n\nauth = tweepy.OAuthHandler(os.environ.get('CONSUMER_KEY'),os.environ.get('CONSUMER_SECRET'))\nauth.set_access_token(os.environ.get('ACCESS_TOKEN_KEY'), os.environ.get('ACCESS_TOKEN_SECRET'))\napi = tweepy.API(auth)\nuserID = \"mis1yakudo334\"\n\ndef getalltweets():\n all_tweets = []\n Current_Date = datetime.datetime.today()\n starttime = Current_Date.strftime('%Y-%m-%d_00:00:00_JST')\n endtime = Current_Date.strftime('%Y-%m-%d_23:59:59_JST')\n tweets = api.user_timeline(screen_name=userID,since = starttime, until = endtime, count=200,include_rts=False, tweet_mode='extended')\n all_tweets.extend([t for t in tweets if \"Score:\" in t.full_text])\n oldest_id = tweets[-1].id\n while True:\n tweets = api.user_timeline(screen_name=userID, since = starttime, until = endtime, count=200,include_rts=False,max_id=oldest_id - 1,tweet_mode='extended')\n if len(tweets) == 0:\n break\n oldest_id = tweets[-1].id\n all_tweets.extend([t for t in tweets if \"Score:\" in t.full_text])\n return all_tweets\n\[email protected]_job('interval',minutes=1)\ndef timed_job():\n now = datetime.datetime.now()\n if now.minute == 0:\n yakudos = YakudoScore.query.filter(YakudoScore.date==datetime.datetime.now().strftime('%Y-%m-%d')).all()\n if len(yakudos) == 0:\n api.update_status(\"おいお前ら!早くyakudoしろ!(\" + datetime.datetime.now().strftime('%Y-%m-%d %H:%M') + \")\")\n else:\n api.update_status(\"本日のyakudo:\" + str(len(yakudos)) + \"件(\" + datetime.datetime.now().strftime('%Y-%m-%d %H:%M') + \")\")\n print(\"ScheduledTask Complete\")\n elif now.minute == 59 and now.hour == 23:\n yakudos = YakudoScore.query.filter(YakudoScore.date == datetime.datetime.now().strftime('%Y-%m-%d')).all()\n maxscore = 0\n maxuser = \"\"\n maxtweetid = \"\"\n if len(yakudos) == 0:\n api.update_status(\"本日のyakudoは...何一つ...出ませんでした...\")\n else:\n for yakudo in yakudos:\n if yakudo.score > maxscore:\n maxscore = yakudo.score\n maxtweetid = yakudo.tweetid\n maxuser = yakudo.username\n if maxscore > 0:\n msg = \"Highest Score:{:.3f}\\n優勝おめでとう!\\n\".format(maxscore)\n url = \"https://twitter.com/\" + maxuser + \"/status/\" + maxtweetid\n api.update_status(msg + url)\n else:\n api.update_status(\"おい待てや...今日のyakudo...-inf点しか無いやん...\")\n elif now.minute == 50:\n print(\"Checking Database\")\n yakudos = YakudoScore.query.filter(YakudoScore.date == datetime.datetime.now().strftime('%Y-%m-%d')).all()\n count = 0\n for yakudo in yakudos:\n try:\n tweet = api.get_status(yakudo.tweetid)\n except:\n print(\"Tweet Not Found\")\n time.sleep(1)\n api.destroy_status(yakudo.retweetid)\n YakudoScore.query.filter(YakudoScore.tweetid == yakudo.tweetid).delete()\n db.session.commit()\n count+=1\n time.sleep(1)\n if count >= 200:\n break\n\n\n\nif __name__ == \"__main__\":\n twische.start()"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.6700336933135986,
"avg_line_length": 20,
"blob_id": "a9da988b4375e6209d5be69af3a90650ee52bdb7",
"content_id": "3b85b910f77de927f43fbe35aae1c544c3e760bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 339,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 14,
"path": "/main.py",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "\nfrom flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\n\napp = Flask(__name__)\napp.config.from_object('database.config') # 追加\ndb = SQLAlchemy(app) # 追加\n\n#herokuサーバーをスリープさせない為の対策\[email protected](\"/\")\ndef index():\n return \"This is mis1yakudo_bot!\"\n\nif __name__ == \"__main__\":\n app.run()\n\n\n"
},
{
"alpha_fraction": 0.4861111044883728,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 15.744186401367188,
"blob_id": "127c0adfae1a3341cb5f24efe7e2cd42c8dafe58",
"content_id": "45c26601095c762d821c3f29e30ed3d461f54ae9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 720,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 43,
"path": "/requirements.txt",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "appdirs==1.4.3\nAPScheduler==3.7.0\nCacheControl==0.12.6\ncertifi==2019.11.28\nchardet==3.0.4\nclick==7.1.2\ncolorama==0.4.3\ncontextlib2==0.6.0\ndistlib==0.3.0\ndistro==1.4.0\nFlask==1.1.2\nFlask-SQLAlchemy==2.5.1\ngreenlet==1.0.0\ngunicorn==20.1.0\nhtml5lib==1.0.1\nidna==2.8\nipaddr==2.2.0\nitsdangerous==1.1.0\nJinja2==2.11.3\nlockfile==0.12.2\nMarkupSafe==1.1.1\nmsgpack==0.6.2\nnumpy==1.20.2\noauthlib==3.1.0\nopencv-contrib-python==4.5.1.48\npackaging==20.3\npep517==0.8.2\nprogress==1.5\npsycopg2-binary==2.8.6\npyparsing==2.4.6\nPySocks==1.7.1\npytoml==0.1.21\npytz==2021.1\nrequests==2.22.0\nrequests-oauthlib==1.3.0\nretrying==1.3.3\nsix==1.14.0\nSQLAlchemy==1.4.11\ntweepy==3.10.0\ntzlocal==2.1\nurllib3==1.25.8\nwebencodings==0.5.1\nWerkzeug==1.0.1\n"
},
{
"alpha_fraction": 0.6698113083839417,
"alphanum_fraction": 0.6698113083839417,
"avg_line_length": 21.36842155456543,
"blob_id": "d9f6b0d2f45413fd077f9b45ab43d29cb932652d",
"content_id": "41bbc19f3af9371bf5c80c6a26782ad9e76c069b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 19,
"path": "/database/models.py",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "from main import db\nfrom flask_sqlalchemy import SQLAlchemy\nimport datetime\n\n\nclass YakudoScore(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.Text)\n tweetid = db.Column(db.Text)\n retweetid = db.Column(db.Text)\n score = db.Column(db.Float)\n date = db.Column(db.Text, nullable=False)\n\n def __repr__(self):\n return \"YakudoScore!\"\n\n\ndef init():\n db.create_all()"
},
{
"alpha_fraction": 0.7011494040489197,
"alphanum_fraction": 0.7049808502197266,
"avg_line_length": 42.5,
"blob_id": "a863630d8b8505c60e32bbe7855f17827e0fec9e",
"content_id": "87a02562b42da4a889701254469184a0bcb694b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 261,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 6,
"path": "/database/config.py",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "import os\n\n#SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \"sqlite:///test.db\"\nSQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL').replace(\"://\", \"ql://\", 1) or \"sqlite:///test.db\"\nSQLALCHEMY_TRACK_MODIFICATIONS = True\nSECRET_KEY=\"secret key\"\n"
},
{
"alpha_fraction": 0.7392592430114746,
"alphanum_fraction": 0.7599999904632568,
"avg_line_length": 19.454545974731445,
"blob_id": "868af3c4a13d84969908eb1df324b7547eb0e545",
"content_id": "6a3d0d6d464773a62c511aa25c94990a4ad3e101",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1165,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 33,
"path": "/README.md",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "# yakudobot\n\n## これは何?\n- ハッシュタグ「#mis1yakudo」がついてるツイートを自動的にリツイートしたり、yakudo写真を評価したりするbotです。\n時間あったらアプリをDocker化するかもしれません。\n\n## 使い方\n### 環境構築\nまず、```~/.bashrc```を編集して、APIのTokenを環境変数として登録してください。\n```\nexport ACCESS_TOKEN_KEY=\"xxxxxxx\"\n...\n```\n次に、必要なライブラリーを導入します。\npython3がインストールされていない方は、まずpython3をインストールしてください。\n```\nsudo apt install python3 python3-pip\n```\npython3とpipが導入できたら、下記のコマンドを実行します。\n```\ngit clone https://github.com/HarrisonKawagoe3960X/yakudobot.git\ncd yakudobot\npip3 install -r requirements.txt\n```\nこれで、必要なライブラリーの導入が終了しました。\n### アプリの構成\n- ```scheduler.py```定期ツイート&ツイ消しの探知\n- ```monitor.py```「#mis1yakudo」を含むツイートの探知&引用リツイート\n### アプリの起動\n```\npython3 scheduler.py\npython3 monitor.py\n```\n"
},
{
"alpha_fraction": 0.5585733652114868,
"alphanum_fraction": 0.5668038129806519,
"avg_line_length": 32.44036865234375,
"blob_id": "505ad0e30a706690f4532cd4e74a61e9022d7410",
"content_id": "cd933131b04ce922fc4bc4b618b378e17a9dcb1a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3729,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 109,
"path": "/monitor.py",
"repo_name": "mkihr-ojisan/yakudobot",
"src_encoding": "UTF-8",
"text": "import datetime\nimport tempfile\nimport requests\nimport os\nimport tweepy\nfrom threading import Thread\nimport cv2\nfrom main import db\nfrom database.models import YakudoScore\nimport traceback\n\nauth = tweepy.OAuthHandler(os.environ.get('CONSUMER_KEY'),os.environ.get('CONSUMER_SECRET'))\nauth.set_access_token(os.environ.get('ACCESS_TOKEN_KEY'), os.environ.get('ACCESS_TOKEN_SECRET'))\napi = tweepy.API(auth)\nkeyword= ['#mis1yakudo']\n\nbotname = \"nishinomiya443\"\n\nyakudo = None\nmsg = \"\"\nurl = \"\"\nuserid = None\n\nclass MyStreamListener(tweepy.StreamListener):\n\n def on_status(self, status):\n t = Thread(runtask(status))\n t.start()\n\ndef checkyakudo(url):\n # load img from url\n res = requests.get(url)\n img = None\n with tempfile.NamedTemporaryFile(dir='./') as fp:\n fp.write(res.content)\n fp.file.seek(0)\n img = cv2.imread(fp.name)\n result = (1/cv2.Laplacian(img, cv2.CV_64F).var())*10000 # yakudoスコアの計算\n return result\n\n isquotetweet = False\n if not keyword[0] in tweet and \"twitter.com/\" in tweet and \"/status\" in tweet:\n isquotetweet = True\n\ndef runtask(status):\n print(status.text)\n if status.user.screen_name != botname and not status.text.startswith(\"RT @\") and keyword[0] in status.text:\n url = \"https://twitter.com/\" + status.user.screen_name + \"/status/\" + status.id_str\n msg = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')+\"\\n\"\n msg += \"User:@\"+status.user.screen_name + \"\\n\"\n # yakudo_check_block\n yakudo = YakudoScore(username=status.user.screen_name,tweetid=status.id_str,date=datetime.datetime.now().strftime('%Y-%m-%d'))\n if hasattr(status, 'extended_entities'):\n finalscore = 0\n count = 0\n isphoto = True\n for image in status.extended_entities[\"media\"]:\n if image[\"type\"] == \"video\":\n msg += \"やめろ!クソ動画を投稿するんじゃない!\\n\"\n msg += \"Score:-inf\\n\"\n yakudo.score = 0\n isphoto = False\n break\n score = checkyakudo(image['media_url_https'])\n finalscore += score\n count += 1\n childtext = \"{:.0f}枚目:{:.3f}\\n\"\n msg += childtext.format(count, score)\n yakudo.score = score\n if isphoto:\n finalscore /= count\n msg += \"GoodYakudo!\\n\" if finalscore >= 150 else \"もっとyakudoしろ!\\n\"\n finaltext = \"Score:{:.3f}\\n\"\n msg += finaltext.format(finalscore)\n else:\n msg += \"画像が入ってないやん!\\n\"\n msg += \"Score:-inf\\n\"\n yakudo.score = 0\n userid = status.user.id\n new_tweet = api.update_status(msg + url)\n api.create_friendship(status.user.id)\n yakudo.retweetid = new_tweet.id_str\n db.session.add(yakudo)\n db.session.commit()\n yakudo = None\n msg = \"\"\n url = \"\"\n userid = None\n\n\ndef start_monitoring():\n print(\"start monitoring\")\n while True:\n try:\n if yakudo is not None and msg != \"\" and url != \"\":\n new_tweet = api.update_status(msg + url)\n api.create_friendship(userid)\n yakudo.retweetid = new_tweet.id_str\n db.session.add(yakudo)\n db.session.commit()\n print(\"start streaming\")\n myStream = tweepy.Stream(auth=api.auth, listener=MyStreamListener())\n myStream.filter(track=keyword)\n except:\n traceback.print_exc()\n continue\n\nif __name__ == \"__main__\":\n start_monitoring()\n"
}
] | 7 |
alipsho/lab-6 | https://github.com/alipsho/lab-6 | c9bbbf51967be5af9b9055c15e18735c9bca7d92 | 525b5d8e890a03865807c94b4caa4745f62c3e35 | ba7ffded561a880f20f5b3273e587f24d741a9d6 | refs/heads/main | 2023-05-23T13:52:54.523151 | 2021-06-18T14:10:58 | 2021-06-18T14:10:58 | 378,154,953 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4609600901603699,
"alphanum_fraction": 0.47021400928497314,
"avg_line_length": 25.600000381469727,
"blob_id": "d46bb2a5a16c47cdd8262ac77e3376c1e2b6313a",
"content_id": "99a0637beada40f0cb76b023ee7f0fd206b82e94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1729,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 65,
"path": "/mserver.py",
"repo_name": "alipsho/lab-6",
"src_encoding": "UTF-8",
"text": "import math\nimport socket\nimport sys\nimport time\nimport errno\nfrom multiprocessing import Process\n\ndef process_start(s_sock):\n s_sock.send(str.encode('CALCULATOR'))\n while True:\n data = s_sock.recv(2048)\n data = data.decode(\"utf-8\")\n\n try:\n operation, val = data.split(\":\")\n opt = str(operation)\n n = float(val)\n\n if opt[0] == 'a':\n opt = 'Logarithm'\n ans = math.log10(n)\n elif opt[0] == 'b':\n opt = 'Square Root'\n ans = math.sqrt(n)\n elif opt[0] == 'c':\n opt = 'Exponential'\n ans = math.exp(n)\n else:\n answer = ('syntax error')\n\n sendAns = (str(opt)+ '['+ str(n) + ']= ' + str(ans))\n print ('\\nanswer!!!')\n except:\n print ('Connection with client was ended...')\n sendAns = ('Connection with client was ended...')\n\n if not data:\n break\n\n s_sock.send(str.encode(str(sendAns)))\n s_sock.close()\n\nif __name__ == '__main__':\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.bind((\"\",4444))\n print(\"Server is listening...\")\n s.listen(3)\n try:\n while True:\n try:\n s_sock, s_addr = s.accept()\n p = Process(target=process_start, args=(s_sock,))\n print(\"Connected to the client...\")\n p.start()\n\n except socket.error:\n\n print('There error in your socket connection')\n\n except Exception as e:\n print(\"An error occurred!\")\n print(e)\n sys.exit(1)\n finally:\n \t s.close()\n"
},
{
"alpha_fraction": 0.5645833611488342,
"alphanum_fraction": 0.5958333611488342,
"avg_line_length": 22.975000381469727,
"blob_id": "114a566dd4c2317c9d87dbdd026cdfae97ececc5",
"content_id": "fcd4f2b654c12c832431a8d30d279f3522eddcaf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 960,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 40,
"path": "/mclient2.py",
"repo_name": "alipsho/lab-6",
"src_encoding": "UTF-8",
"text": "\nimport socket\nimport signal\nimport sys\n\nc = socket.socket()\nhost = '192.168.56.102'\nport =4444\n\nprint('Waiting for connection...')\ntry:\n c.connect((host, port))\nexcept socket.error as e:\n print(str(e))\n\nResponse = c.recv(1024)\nprint(Response.decode(\"utf-8\"))\nwhile True:\n print(\"What mathematical function Do you want to use? :\")\n print(\"Logarithm(a)\")\n print(\"Square Root(b)\")\n print(\"Exponential(c)\")\n Input = input('\\nEnter code here: ')\n\n if Input == 'a' or Input == 'b' or Input == 'c':\n n = input(\"Enter the number: \")\n Input = Input + \":\" + n\n c.send(str.encode(Input))\n Response = c.recv(1024)\n print(Response.decode(\"utf-8\"))\n\n elif Input == 'exit':\n break\n\n else:\n print(\"your functional code not available because the code is l,s,e and exit only....\")\n c.send(str.encode(Input))\n Response = c.recv(1024)\n print(Response.decode(\"utf-8\"))\n\nc.close()\n"
}
] | 2 |
YouriSchuur/gevpro-week3 | https://github.com/YouriSchuur/gevpro-week3 | f8def0681c7239657c314fe3d012e1e2b0e14bfc | a00b29a3f47c8e2f77d46af55b9aed5e882c5db5 | e9b85474952b5851dafcd363fdbe6ca8c59a56c5 | refs/heads/master | 2021-01-22T11:55:14.057325 | 2015-03-09T08:34:00 | 2015-03-09T08:34:00 | 31,764,837 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6574585437774658,
"alphanum_fraction": 0.6643646359443665,
"avg_line_length": 26.846153259277344,
"blob_id": "26ba74bfab3062d784e8ae48f7d2eafa234ad3db",
"content_id": "b6f69024ec76486b0d56ae0af322692e9fcb241d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 724,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 26,
"path": "/blooddie.py",
"repo_name": "YouriSchuur/gevpro-week3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nimport sys\nimport json\nfrom collections import namedtuple\n\n\"\"\" create a namedtuple\"\"\"\nLanguage = namedtuple(\"language\", \"name, classification\")\n\ndef blooddie(language):\n blood = language[2].split()\n die = language[3].split()\n return [True for word in blood if word in die]\n\ndef main():\n \"\"\" open file\"\"\"\n open_file = open(\"blood-die.json\", \"r\")\n filedata = json.load(open_file)\n \n \"\"\" create resultslist and append to resultslist\"\"\"\n resultslist = []\n [resultslist.append(Language(language[0], language[1])) for language in filedata if blooddie(language)]\n [print(result.name, \"|\", result.classification) for result in resultslist]\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5433168411254883,
"alphanum_fraction": 0.5569307208061218,
"avg_line_length": 26.89285659790039,
"blob_id": "f8dc873b8c3554e2236bcec098e89bbbbf3bcb68",
"content_id": "f2671012a1a5e5d8f622e61468016d8e5341eb6d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 808,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 28,
"path": "/spontal_filter.py",
"repo_name": "YouriSchuur/gevpro-week3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nimport xml.etree.ElementTree as ET\nimport sys\n\ndef main(argv):\n \"\"\" import filedata\"\"\"\n tree = ET.parse(argv[1])\n root = tree.getroot()\n \n \"\"\" loop through the filedata and compare start and end with top and bottom\"\"\"\n for POINT in root.findall('POINT'):\n topHZ = float(POINT.find('TOP_HZ').text)\n bottomHZ = float(POINT.find('BOTTOM_HZ').text)\n startF0 = float(POINT.find('F0_START').text)\n endF0 = float(POINT.find('F0_END').text)\n\t \n \n if startF0 > topHZ or startF0 < bottomHZ:\n root.remove(POINT)\n elif endF0 > topHZ or endF0 < bottomHZ:\n root.remove(POINT)\n\t\n \"\"\" write to file\"\"\"\t \n tree.write(argv[2])\n \nif __name__ == \"__main__\":\n main(sys.argv)\n\t\t\t\n\t\t\t\n\t \n\t \n\t \n\n"
}
] | 2 |
rishaungreen/dynamic-map | https://github.com/rishaungreen/dynamic-map | ffeca53dcc795c02aef9c99af2ea53013ae8464d | 99e806703d10f28274413c61701ca4e6d5405ca3 | 64ad7a8ab5a3105d5baeb3b166934804e7bda34d | refs/heads/main | 2023-03-22T01:23:23.346504 | 2021-03-18T14:25:37 | 2021-03-18T14:25:37 | 347,029,580 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5006915926933289,
"alphanum_fraction": 0.7095435857772827,
"avg_line_length": 15.813953399658203,
"blob_id": "eea6d08a6eae44c330af715e328c7b21b176636c",
"content_id": "189edc709f92e9ef66b9e6148230ef079e8ba739",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 723,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 43,
"path": "/requirements.txt",
"repo_name": "rishaungreen/dynamic-map",
"src_encoding": "UTF-8",
"text": "autopep8==1.4.4\nClick==7.0\nFlask==1.1.1\nFlask-SQLAlchemy==2.4.0\ngunicorn==19.9.0\nitsdangerous==1.1.0\nJinja2==2.10.1\nMarkupSafe==1.1.1\npsycopg2==2.8.3\npsycopg2-binary==2.8.3\npycodestyle==2.5.0\nSQLAlchemy==1.3.7\nWerkzeug==0.15.5\nbeautifulsoup4==4.9.3\nbs4==0.0.1\ncertifi==2020.12.5\nchardet==4.0.0\ncolorama==0.4.4\ncycler==0.10.0\ndatefinder==0.7.1\net-xmlfile==1.0.1\nfuture==0.18.2\nidna==2.10\nkiwisolver==1.3.1\nmatplotlib==3.3.4\nmwparserfromhell==0.6\nnumpy==1.18.5\nopenpyxl==3.0.7\npandas==1.1.5\nPillow==8.0.1\npyparsing==2.4.7\npython-dateutil==2.8.1\npytz==2021.1\nregex==2020.11.13\nrequests==2.25.1\nselenium==3.141.0\nsix==1.15.0\nsoupsieve==2.2\nurllib3==1.26.3\nxlrd==2.0.1\nopencv-contrib-python-headless\nboto3\npython-dotenv==0.15.0\n"
},
{
"alpha_fraction": 0.5603502988815308,
"alphanum_fraction": 0.5850318670272827,
"avg_line_length": 33.50732421875,
"blob_id": "9a753bcb27dfe10a246ccb02004fff1d76881b51",
"content_id": "e7ff2b05f4615e75c0c74f752bd802b72a99dd52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18840,
"license_type": "no_license",
"max_line_length": 593,
"num_lines": 546,
"path": "/app.py",
"repo_name": "rishaungreen/dynamic-map",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, request\nimport os\nimport sqlite3\nimport copy\nimport requests\nimport re\nfrom bs4 import BeautifulSoup\nimport cv2\nimport datetime\nimport json\nimport threading\nimport random\nimport string\nimport time\n\nimport boto3\nfrom dotenv import load_dotenv\n\nload_dotenv()\n\nBUCKET_NAME = \"dynamic-map\"\n\nkey = os.getenv('AWS_KEY_ID')\nsecret = os.getenv('AWS_SECRET_KEY')\n\ns3 = boto3.client('s3',\n aws_access_key_id=key,\n aws_secret_access_key=secret)\n\nupdate_files = [\"corona info.json\", \"travel info.json\", \"pop info.json\"]\n\napp = Flask(__name__)\n\ndatabase = \"static/countries.db\"\n\nlegend_fields = {\"Level 1\": \"#00FF21\", \"Level 2\": \"#FFFF00\", \"Level 3\": \"#FF8C00\", \"Level 4\": \"#FF0000\", \"No Information\": \"#000000\"}\n\nglobal_indicator = 0\n\ncolour_dict = {\n \"#FF0000\": [0, 0, 255],\n \"#00FF21\": [33, 255, 0],\n \"#0000FF\": [255, 0, 0],\n \"#FFFF00\": [0, 255, 255],\n \"#FF8C00\": [0, 140, 255],\n \"#000000\": [0, 0, 0],\n \"#00C455\": [85, 196, 0],\n \"#B0C400\": [0, 196, 176],\n \"#B08C00\": [0, 140, 176],\n \"#C44100\": [0, 65, 196],\n \"#9B1600\": [0, 22, 155],\n \"#500055\": [85, 0, 80],\n \"#78005A\": [90, 0, 120],\n \"#C800AA\": [170, 0, 200],\n \"#E674C8\": [200, 116, 230],\n \"#FFBEFF\": [255, 190, 255]\n}\n\n\[email protected](\"/\", methods=[\"GET\", \"POST\"])\ndef index():\n\n if 'list_of_all_countries' not in globals() or 'list_of_visible_countries' not in globals():\n global list_of_all_countries\n global list_of_visible_countries\n global pixel_dict2\n global pixels\n\n list_of_visible_countries = []\n list_of_all_countries = []\n\n with open('static/outline.json', 'r') as fp:\n pixel_dict2 = json.load(fp)\n\n with open('static/area.json', 'r') as fp:\n pixels = json.load(fp)\n\n get_aliases()\n create_list_of_countries()\n\n threading.Thread(target=crawl_in_background).start()\n time.sleep(1)\n\n if request.method == \"POST\":\n parameter = request.form.get(\"name\", \"travel\")\n dict_travel_info = load_info(parameter)\n full_filename = execute_operation(pixels, parameter)\n time.sleep(1)\n return render_template(\"map.html\", legend_fields=legend_fields, parameter=parameter, pixel_dict=pixel_dict2, temp_map=full_filename, country_info=dict_travel_info, aka2=alias_dictionary2, aka=alias_dictionary, true_name=realname_dictionary)\n # Automatically run travel restrictions\n dict_travel_info = load_info(\"travel\")\n full_filename = execute_operation(pixels)\n time.sleep(1)\n return render_template(\"map.html\", legend_fields=legend_fields, parameter=\"travel\", pixel_dict=pixel_dict2, temp_map=full_filename, country_info=dict_travel_info, aka2=alias_dictionary2, aka=alias_dictionary, true_name=realname_dictionary)\n\n\n# Clear existing colours\ndef reinitialise_map():\n global colour_var\n global clone_img\n global global_indicator\n\n clear_pics()\n global_indicator = 0\n\n original_img = cv2.imread(\"static/World Map 3.png\")\n clone_img = copy.copy(original_img)\n\n\n# Clear other pics\ndef clear_pics():\n dir_name = \"static\"\n pics = os.listdir(dir_name)\n\n for item in pics:\n if item.endswith(\".png\") and not item == \"World Map 3.png\" and not item == \"tmp.png\":\n os.remove(os.path.join(dir_name, item))\n\n\n# Access SQLDatabase\ndef create_connection(db_file):\n \"\"\" create a database connection to the SQLite database\n specified by the db_file\n :param db_file: database file\n :return: Connection object or None\n \"\"\"\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n except sqlite3.Error as e:\n print(e)\n\n return conn\n\n\ndef get_aliases():\n global alias_dictionary2\n global realname_dictionary\n global alias_dictionary\n\n conn = create_connection(database)\n\n mycursor = conn.cursor()\n\n mycursor.execute(\"SELECT search_name, other_names, country_name FROM countries\")\n\n data = mycursor.fetchall()\n\n realname_dictionary = {}\n alias_dictionary = {}\n alias_dictionary2 = {}\n for x in data:\n names = x[0] + \", \" + x[1] + \", \" + x[2]\n names = names.replace(\"[\", \"\").replace(\"]\", \"\").replace(\"'s\", \"#\").replace(\"'\", \"\").replace('\"','').replace(\"#\", \"'s\").split(\", \")\n alias_dictionary[x[0]] = names\n alias_dictionary2[x[2]] = names\n realname_dictionary[x[0]] = x[2]\n\n\ndef load_info(parameter):\n for file in update_files:\n file = \"static/\" + file\n download_file_bucket = BUCKET_NAME\n download_file_key = str(file)\n s3.download_file(download_file_bucket, file, download_file_key)\n print(str(file) + \" has been downloaded\")\n\n if parameter == \"travel\":\n # Seperate into function with ifs depending on parameter\n with open('static/travel info.json', 'r') as fp:\n dict_travel_info2 = json.load(fp)\n elif parameter == \"population\":\n with open('static/pop info.json', 'r') as fp:\n dict_travel_info2 = json.load(fp)\n else:\n with open('static/corona info.json', 'r') as fp:\n dict_travel_info2 = json.load(fp)\n\n dict_travel_info = {}\n for i in dict_travel_info2:\n dict_travel_info[i] = dict_travel_info2[i].replace(\"\\n\", \"<br>\")\n\n return dict_travel_info\n\n\ndef random_imgname():\n chars = string.ascii_lowercase\n img = ''.join(random.choice(chars) for i in range(15))\n\n return img\n\n\n# Loop through database to get a list of countries\ndef create_list_of_countries():\n # create a database connection\n conn = create_connection(database)\n\n mycursor = conn.cursor()\n\n mycursor.execute(\"SELECT search_name, pixel_coordinates FROM countries\")\n\n data = mycursor.fetchall()\n\n for x in data:\n list_of_all_countries.append(x[0])\n if x[1] != \"n/a\":\n list_of_visible_countries.append(x[0])\n\n\n# Change colour by pixel using list of pixels\ndef colour_pixels(new_coords, colour):\n global clone_img\n global colour_dict\n\n try:\n colour = colour_dict[colour]\n except KeyError:\n colour = [0, 0, 0]\n for coord in new_coords:\n clone_img[coord[1], coord[0]] = colour\n\n\ndef crawl_in_background():\n # crawl for all countries travel information as \"Level XX - blah blah\"\n travel_info_dict = {}\n US_travel_info = \"https://travel.state.gov/content/travel/en/international-travel/International-Travel-Country-Information-Pages/\"\n\n for standard_country_name in list_of_all_countries:\n if standard_country_name == \"The United States of America\":\n travel_information = \"This is the home country\"\n else:\n country_info = US_travel_info + standard_country_name.replace(\" \", \"\") + \".html\"\n response = requests.get(country_info)\n soup = BeautifulSoup(response.text, 'html.parser')\n page_title = str(soup.find_all('title'))\n if page_title == \"[<title>404 - Page Not Found</title>]\":\n travel_information = \"Could not find country information\"\n else:\n links = str(soup.find_all('a'))\n linksf = links.find(\"Level \")\n travel_status = links[linksf:linksf + 7]\n links2 = str(soup.find_all('div', {\"class\": \"tsg-rwd-alert-teaser\"}))\n travel_information = travel_status + \"\\n\" + \"\\n\" + cleanhtml(links2).replace(\"\\n\", \"\").replace(\"[\", \"\").replace(\"]\", \"\")\n travel_info_dict[standard_country_name] = travel_information\n\n for standard_country_name in travel_info_dict:\n if standard_country_name == \"Greenland\":\n travel_info_dict[standard_country_name] = travel_info_dict[\"Denmark\"]\n\n # crawl for all countries population / coronavirus information\"\n population_dict = {}\n coronavirus_dict = {}\n country_list = []\n population_list = []\n coronavirus_list = []\n pop_info = \"https://www.worldometers.info/coronavirus/#countries\"\n\n response = requests.get(pop_info)\n soup = BeautifulSoup(response.text, 'html.parser')\n page_title = str(soup.find_all('title'))\n if page_title == \"[<title>404 - Page Not Found</title>]\":\n raise Exception(\"An error has occured\")\n else:\n table = soup.find(\"table\")\n tbody = table.find('tbody')\n trs = tbody.find_all('tr', {\"class\": \"\"})\n\n for tr in trs:\n tds = tr.find_all('td')\n country_list.append(tds[1].text)\n population_list.append(tds[14].text)\n coronavirus_list.append(tds[2].text)\n\n for i in range (len(country_list)):\n if country_list[i] not in list_of_all_countries:\n for x in alias_dictionary:\n if country_list[i].strip() in alias_dictionary[x]:\n country_list[i] = x\n\n for z in range(len(country_list)):\n population_dict[country_list[z]] = population_list[z]\n coronavirus_dict[country_list[z]] = coronavirus_list[z]\n\n other_countries = {\"Turkmenistan\": \"turkmenistan\", \"KoreaDemocraticPeoplesRepublicof\": \"north-korea\"}\n for i in other_countries:\n pop_info2 = \"https://www.worldometers.info/world-population/\" + other_countries[i] + \"-population/\"\n\n response = requests.get(pop_info2)\n soup = BeautifulSoup(response.text, 'html.parser')\n page_title = str(soup.find_all('title'))\n if page_title == \"[<title>404 - Page Not Found</title>]\":\n raise Exception(\"An error has occured\")\n else:\n div_ = soup.find('div', {\"class\": \"col-md-8 country-pop-description\"}).findChildren()\n population_dict[i] = div_[0].find_all('strong')[1].get_text()\n\n date_now = f\"As of {datetime.datetime.now():%H:%M} \" + f\"on {datetime.datetime.now():%d %B %Y}\"\n coronavirus_dict[\"accuracy date\"] = date_now\n population_dict[\"accuracy date\"] = date_now\n travel_info_dict[\"accuracy date\"] = date_now\n coronavirus_dict[\"unknown\"] = \"Could not find country information\"\n population_dict[\"unknown\"] = \"Could not find country information\"\n travel_info_dict[\"unknown\"] = \"Could not find country information\"\n\n travel_lock = threading.Lock()\n with travel_lock:\n try:\n os.remove('static/pop info.json')\n except FileNotFoundError:\n pass\n except PermissionError:\n pass\n try:\n os.remove('static/corona info.json')\n except FileNotFoundError:\n pass\n except PermissionError:\n pass\n try:\n os.remove('static/travel info.json')\n except FileNotFoundError:\n pass\n except PermissionError:\n pass\n with open('static/pop info.json', 'w') as fp:\n json.dump(population_dict, fp)\n with open('static/corona info.json', 'w') as fp:\n json.dump(coronavirus_dict, fp)\n with open('static/travel info.json', 'w') as fp:\n json.dump(travel_info_dict, fp)\n\n for file in os.listdir(\"static/\"):\n if \"info.json\" in file:\n file = \"static/\" + file\n upload_file_bucket = BUCKET_NAME\n upload_file_key = str(file)\n s3.upload_file(file, upload_file_bucket, upload_file_key)\n print(str(file) + \" has been uploaded\")\n return\n\n\ndef index_dict2(dictionary, n=0):\n if n < 0:\n n += len(dictionary)\n for i, (key, value) in enumerate(dictionary.items()):\n if i == n:\n return value\n raise IndexError(\"dictionary index out of range\")\n\n\ndef index_dict(dictionary, n=\"Level 1\"):\n for i, key in enumerate(dictionary.keys()):\n if key == n:\n return i\n\n\n# Based on the parameter, assign colours to countries in the database and call other functions in the appropriate order\ndef assign_colours(standard_country_name, parameter=\"travel\"):\n global legend_fields\n global sorted_dict\n global sorted_split\n global global_indicator\n\n if parameter == \"travel\":\n legend_fields = {\"Level 1\": \"#00FF21\", \"Level 2\": \"#FFFF00\", \"Level 3\": \"#FF8C00\", \"Level 4\": \"#FF0000\", \"No Information\": \"#000000\"}\n if standard_country_name in list_of_visible_countries:\n if \"Level 1\" in info_dict[standard_country_name]:\n return \"#00FF21\"\n elif \"Level 2\" in info_dict[standard_country_name]:\n return \"#FFFF00\"\n elif \"Level 3\" in info_dict[standard_country_name]:\n return \"#FF8C00\"\n elif \"Level 4\" in info_dict[standard_country_name]:\n return \"#FF0000\"\n else:\n return \"#000000\"\n elif parameter == \"population\" or parameter == \"coronavirus\":\n if global_indicator == 0:\n global_indicator = 1\n converted_dict = info_dict\n del converted_dict[\"accuracy date\"]\n for x in converted_dict:\n try:\n converted_dict[x] = int(converted_dict[x].replace(\",\", \"\").strip())\n except ValueError:\n converted_dict[x] = 0\n sorted_dict = dict(sorted(converted_dict.items(), key=lambda item: item[1]))\n sorted_split = int(len(sorted_dict) / 5)\n categories = [f\"0 - {insert_commas(index_dict2(sorted_dict, sorted_split))}\", f\"{insert_commas(index_dict2(sorted_dict, sorted_split))} - {insert_commas(index_dict2(sorted_dict, sorted_split * 2))}\", f\"{insert_commas(index_dict2(sorted_dict, sorted_split * 2))} - {insert_commas(index_dict2(sorted_dict, sorted_split * 3))}\", f\"{insert_commas(index_dict2(sorted_dict, sorted_split * 3))} - {insert_commas(index_dict2(sorted_dict, sorted_split * 4))}\", f\"{insert_commas(index_dict2(sorted_dict, sorted_split * 4))} - {insert_commas(index_dict2(sorted_dict, sorted_split * 5 - 1))}\"]\n if parameter == \"coronavirus\":\n legend_fields = {f\"{categories[4]}\": \"#9B1600\", f\"{categories[3]}\": \"#C44100\", f\"{categories[2]}\": \"#B08C00\", f\"{categories[1]}\": \"#B0C400\", f\"{categories[0]}\": \"#00C455\",\n \"No Information\": \"#000000\"}\n else:\n legend_fields = {f\"{categories[4]}\": \"#500055\", f\"{categories[3]}\": \"#78005A\", f\"{categories[2]}\": \"#C800AA\", f\"{categories[1]}\": \"#E674C8\", f\"{categories[0]}\": \"#FFBEFF\",\n \"No Information\": \"#000000\"}\n try:\n category_no = int(index_dict(sorted_dict, standard_country_name) / sorted_split)\n if category_no == 5 or category_no == 4:\n category_no = 0\n elif category_no == 3:\n category_no = 1\n elif category_no == 2:\n pass\n elif category_no == 1:\n category_no = 3\n else:\n category_no = 4\n except TypeError:\n return \"#000000\"\n return index_dict2(legend_fields, category_no)\n else:\n return \"#000000\"\n\n\ndef insert_commas(string1):\n string1 = str(string1)\n string2 = \"\"\n if len(string1) % 3 == 1:\n counter = 1\n for i in string1:\n string2 += i\n counter -= 1\n if counter == 0:\n string2 += \",\"\n counter = 3\n elif len(string1) % 3 == 2:\n counter = 2\n for i in string1:\n string2 += i\n counter -= 1\n if counter == 0:\n string2 += \",\"\n counter = 3\n else:\n counter = 3\n for i in string1:\n string2 += i\n counter -= 1\n if counter == 0:\n string2 += \",\"\n counter = 3\n\n return string2[:-1]\n\n\n# Parsing html\ndef cleanhtml(raw_html):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n return cleantext\n\n\ndef execute_operation(pixel_dictionary, parameter=\"travel\"):\n #return threading.Thread(target = execute_colouring, args=(pixel_dictionary, parameter)).start()\n return execute_colouring(pixel_dictionary, parameter)\n\n\ndef execute_colouring(pixel_dictionary, parameter):\n global info_dict\n global accuracy_statement\n global final_img_data\n\n reinitialise_map()\n if parameter == \"travel\":\n travel_lock = threading.Lock()\n with travel_lock:\n with open('static/travel info.json', 'r') as fp:\n info_dict = json.load(fp)\n accuracy_statement = info_dict[\"accuracy date\"]\n for i in pixel_dictionary:\n colour_pixels(pixel_dictionary[i], assign_colours(i))\n elif parameter == \"population\":\n travel_lock = threading.Lock()\n with travel_lock:\n with open('static/pop info.json', 'r') as fp:\n info_dict = json.load(fp)\n accuracy_statement = info_dict[\"accuracy date\"]\n for i in pixel_dictionary:\n colour_pixels(pixel_dictionary[i], assign_colours(i, parameter))\n try:\n del sorted_dict\n except NameError:\n pass\n try:\n del sorted_split\n except NameError:\n pass\n else:\n travel_lock = threading.Lock()\n with travel_lock:\n with open('static/corona info.json', 'r') as fp:\n info_dict = json.load(fp)\n accuracy_statement = info_dict[\"accuracy date\"]\n for i in pixel_dictionary:\n colour_pixels(pixel_dictionary[i], assign_colours(i, parameter))\n try:\n del sorted_dict\n except NameError:\n pass\n try:\n del sorted_split\n except NameError:\n pass\n\n cv2.imwrite(\"static/tmp.png\", clone_img)\n img = random_imgname()\n\n original_img2 = cv2.imread(\"static/tmp.png\")\n\n clone_img2 = copy.copy(original_img2)\n\n cv2.imwrite(f\"static/{img}.png\", clone_img2)\n\n full_filename = os.path.join('static', img + '.png')\n\n return full_filename\n\n\ndef convert_infos():\n travel_lock = threading.Lock()\n with travel_lock:\n try:\n with open('static/pop info2.json', 'r') as fp:\n po_dict = json.load(fp)\n with open('static/corona info2.json', 'r') as fp:\n co_dict = json.load(fp)\n with open('static/travel info2.json', 'r') as fp:\n tr_dict = json.load(fp)\n\n os.remove('static/pop info2.json')\n os.remove('static/corona info2.json')\n os.remove('static/travel info2.json')\n\n with open('static/pop info.json', 'w') as fp:\n json.dump(po_dict, fp)\n with open('static/corona info.json', 'w') as fp:\n json.dump(co_dict, fp)\n with open('static/travel info.json', 'w') as fp:\n json.dump(tr_dict, fp)\n\n except FileNotFoundError:\n pass\n\n\nif __name__ == \"__main__\":\n app.run()"
},
{
"alpha_fraction": 0.803147554397583,
"alphanum_fraction": 0.8036635518074036,
"avg_line_length": 73.51923370361328,
"blob_id": "a92738a1ca67402e78e6fe275cd2a0dcc7f501c0",
"content_id": "66f9577b6e0da1f0ebbcee283483d8f57a99a930",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3876,
"license_type": "no_license",
"max_line_length": 911,
"num_lines": 52,
"path": "/README.md",
"repo_name": "rishaungreen/dynamic-map",
"src_encoding": "UTF-8",
"text": "# Dynamic World Map\n(By Rishaun Green)\n\n## Video Demo: https://www.youtube.com/watch?v=ehW_yjhBni0\n\n## Description:\nThe idea behind this project was to develop a flask application that would display a map of the world with each country coloured according to certain criteria. The particular criteria I chose were US travel advisory level, total population and total coronavirus cases.\n\nThis project was coded primarily in Python, making use of the flask framework to retrieve, analyse and render information on the countries on the server-side. I made use of a SQL database and json files to store this information. Also, I have included some user-interactable features coded in JavaScript, such as the search function and the image map, to provide access to more information than can be readily displayed on the map.\n\nThe application works by declaring and generating some global variables including a list of all of the countries in the world and a separate list of all the countries visible on the world map template. At this stage, the application is designed to generate multiple dictionaries each containing static information about the listed countries to which other functions need to refer. Subsequently, the dynamic (or changeable) information on each country is read from an Amazon Simple Storage bucket and used to determine the appropriate colour for each country, the correct search response and the colour/parameters of the legend. The result is displayed on the webpage. Additionally, the application has a threaded function tasked with crawling travel.state and worldometers websites for up-to-date information which, once acquired, is stored in the Amazon Simple Storage bucket overwriting the older information.\n\nAs mentioned, the application has some added features; firstly, the buttons on the webpage allow the user to determine the parameters by which the map is colour-coded, this also dynamically adapts the legend and the information given by the search function. The search function allows users to type in a country and get the appropriate information in response, it supports an autocomplete/correct dropdown feature to capture alternative country names. The map also supports clicking on countries to perform this same search. Finally, the application has a imagemap which allows users to hover over countries to find out their names.\n\n\n### File-By-File Breakdown\n#### static/area.json\nHouses the co-ordinates for each pixel that needs to be coloured for a specific country\n#### static/countries.db\nHouses additional information about each country such as its alternative names\n#### static/outline.json\nHouses the co-ordinates for each pixel on the border of a country. Used to generate the imagemap.\n#### static/area.json\nHouses the co-ordinates for each pixel that needs to be coloured for a specific country \n#### static/tmp.png\nAn image that is overwritten with a new appropriately coloured map and is subsequently to generate the display image\n#### static/World Map 3.png\nA blank World Map image template from which the new tmp.png can be created\n#### templates/layout.html\nHouses CSS and JavaScript for the webpage\n#### templates/map.html\nHouses the html code for the webpage\n#### (.env)\nHouses the information needed to access the Amazon Simple Storage bucket\n#### app.py\nThe flask application file\n#### Procfile\nA file used to ensure that git and Heroku can parse the flask application correctly\n#### requirements.txt\nA list of all the python modules needed for this application to work correctly\n\n\nThe web application is hosted on heroku.com\n\nThe dynamic storage system uses AWS Amazon\n\nThe original world map was acquired from the following source and subsequently edited:\nhttp://www.freeusandworldmaps.com/html/World_Projections/WorldPrint.html\n\nSources for country information:\n-\thttps://travel.state.gov/\n-\thttps://www.worldometers.info/\n\n"
}
] | 3 |
FSU-ACM/Programming-Contest-Suite-v2 | https://github.com/FSU-ACM/Programming-Contest-Suite-v2 | 785f36eb32344ba1fab08c6d27fabb4643568782 | ffc96f48a9272827aa80fa462bfb04f4501ee499 | b22c876f1c049bc8001d28adc62631d359b2f3ff | refs/heads/master | 2021-11-19T11:47:36.824162 | 2021-11-06T00:22:50 | 2021-11-06T00:22:50 | 241,192,222 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6481927633285522,
"alphanum_fraction": 0.6481927633285522,
"avg_line_length": 30.923076629638672,
"blob_id": "02204392e9d1c1594c4e436f819c7c2ccf4adfaa",
"content_id": "02fccc46ed1a459b6ad1b495289d0f4255b1276b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 830,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 26,
"path": "/utils/faculty_csv_gen.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "import csv\n\ntry:\n\twith open(\"faculty.csv\", \"w\") as f:\n\t\tfacultywriter = csv.writer(f, delimiter=',', quoting=csv.QUOTE_MINIMAL)\n\t\tfacultywriter.writerow(['email', 'first_name', 'last_name'])\n\n\t\tcontinue_input = 'Y'\n\n\t\twhile continue_input == 'Y' or continue_input == 'y':\n\t\t\tinstructor_id = input(\"\\nInstructor CS email [excluding @cs.fsu.edu]: \")\n\t\t\tfirst_name = input(\"Faculty first name: \")\n\t\t\tlast_name = input(\"Faculty last name: \")\n\t\t\t\n\t\t\tcorrect_entry = input(\n\t\t\t\t'\\n'+instructor_id+'\\t'+first_name+'\\t'+last_name+\"\\n\\nIs the entry correct [y/n]?: \")\n\n\t\t\tif correct_entry == 'N' or correct_entry == 'n':\n\t\t\t\tcontinue\n\n\t\t\tfacultywriter.writerow(\n\t\t\t\t[instructor_id+'@cs.fsu.edu', first_name, last_name])\n\n\t\t\tcontinue_input = input(\"Enter another [y/n]: \")\nexcept IOError:\n\tprint('Error creating output file. Please retry.\\n')\n"
},
{
"alpha_fraction": 0.8072429895401001,
"alphanum_fraction": 0.8072429895401001,
"avg_line_length": 25.75,
"blob_id": "5a6f32d8178f492f842a36b82addcdde8f250d24",
"content_id": "4b89ea5048e20f4b49a5fb37da37bdd82d52d7df",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 856,
"license_type": "permissive",
"max_line_length": 115,
"num_lines": 32,
"path": "/src/manager/admin.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom import_export import fields, resources\nfrom import_export.admin import ImportExportModelAdmin\nfrom import_export.widgets import ForeignKeyWidget\n\nfrom . import models\n\n# Register your models here.\n\nclass FacultyResource(resources.ModelResource):\n\tclass Meta:\n\t\tmodel = models.Faculty\n\t\timport_id_fields = ('email',)\n\n\nclass FacultyAdmin(ImportExportModelAdmin):\n resource_class = FacultyResource\n\n\nclass CourseResource(resources.ModelResource):\n\tinstructor = fields.Field(column_name='instructor',attribute='instructor',widget=ForeignKeyWidget(models.Faculty))\n\tclass Meta:\n\t\tmodel = models.Course\n\n\nclass CourseAdmin(ImportExportModelAdmin):\n resource_class = CourseResource\n\n\nadmin.site.register(models.Profile)\nadmin.site.register(models.Faculty, FacultyAdmin)\nadmin.site.register(models.Course, CourseAdmin)\n"
},
{
"alpha_fraction": 0.5470737814903259,
"alphanum_fraction": 0.5631891489028931,
"avg_line_length": 41.87272644042969,
"blob_id": "e4f91d110ccd269fc6f2aa462035294a5bb1aadf",
"content_id": "5ca1c3f5dad46b973d8815522bcde3bc29b0d8ad",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2358,
"license_type": "permissive",
"max_line_length": 162,
"num_lines": 55,
"path": "/src/manager/migrations/0001_initial.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.1 on 2021-10-10 16:36\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('register', '0001_initial'),\n ('auth', '0012_alter_user_first_name_max_length'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Course',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('code', models.CharField(max_length=8)),\n ('name', models.CharField(max_length=50)),\n ],\n options={\n 'ordering': ['code'],\n },\n ),\n migrations.CreateModel(\n name='Faculty',\n fields=[\n ('email', models.EmailField(max_length=50, primary_key=True, serialize=False)),\n ('first_name', models.CharField(max_length=50)),\n ('last_name', models.CharField(max_length=50)),\n ],\n ),\n migrations.CreateModel(\n name='Profile',\n fields=[\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='auth.user')),\n ('role', models.PositiveSmallIntegerField(choices=[(1, 'Contestant'), (2, 'Proctor'), (3, 'Question Writer'), (4, 'Organizer')], default=1)),\n ('team_admin', models.BooleanField(default=False)),\n ('fsu_id', models.CharField(blank=True, max_length=8, null=True, unique=True)),\n ('fsu_num', models.CharField(blank=True, max_length=8, null=True, unique=True)),\n ('checked_in', models.BooleanField(default=False)),\n ('email_confirmed', models.BooleanField(default=False)),\n ('courses', models.ManyToManyField(blank=True, to='manager.Course')),\n ('team', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='profile_team', to='register.team')),\n ],\n ),\n migrations.AddField(\n model_name='course',\n name='instructor',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='manager.faculty'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5726495981216431,
"alphanum_fraction": 0.5944918990135193,
"avg_line_length": 38,
"blob_id": "bd66e889f1c725a0e74dcfc9e56df1be4c4ce4dd",
"content_id": "b74fdc15549c02bece373135d8f0fdef0fc6ad26",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1053,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 27,
"path": "/src/register/migrations/0001_initial.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.1 on 2021-10-10 16:36\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Team',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=30, unique=True)),\n ('division', models.PositiveSmallIntegerField(choices=[(1, 'Upper Division'), (2, 'Lower Division')])),\n ('pin', models.CharField(max_length=6, unique=True)),\n ('contest_id', models.CharField(blank=True, max_length=7, null=True, unique=True)),\n ('contest_password', models.CharField(blank=True, max_length=6, null=True, unique=True)),\n ('questions_answered', models.PositiveSmallIntegerField(default=0)),\n ('num_members', models.PositiveSmallIntegerField(default=0)),\n ],\n ),\n ]\n"
},
{
"alpha_fraction": 0.7138263583183289,
"alphanum_fraction": 0.7556270360946655,
"avg_line_length": 30.100000381469727,
"blob_id": "99afa4fda532129dd4e81f0f42fb1ad4d5c3438d",
"content_id": "9172e03f8435b7a7c95e7f53e9da3907e684ea21",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 311,
"license_type": "permissive",
"max_line_length": 144,
"num_lines": 10,
"path": "/deploy/prod/contestsuite/scripts/django/start.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nset -o errexit\nset -o pipefail\nset -o nounset\n\npython3 manage.py migrate --noinput\npython3 manage.py collectstatic --noinput\npython3 manage.py initadmin\ngunicorn contestsuite.wsgi:application --bind 0.0.0.0:8000 --workers=4 --threads=4 --worker-class=gthread --worker-tmp-dir /dev/shm --log-file=-\n"
},
{
"alpha_fraction": 0.7777777910232544,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 18.799999237060547,
"blob_id": "67457b8db3046015e5b25220157427253280c2c3",
"content_id": "a40d55f5e604c26e75e9f4561f3697c84109fb42",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 99,
"license_type": "permissive",
"max_line_length": 36,
"num_lines": 5,
"path": "/src/contestadmin/apps.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass ContestAdminConfig(AppConfig):\n name = 'contestadmin'\n"
},
{
"alpha_fraction": 0.772243857383728,
"alphanum_fraction": 0.772243857383728,
"avg_line_length": 47.1875,
"blob_id": "4218b7e7a29b525148434f5c30377687753e38ba",
"content_id": "af4592a438464cac7a772bc8618493baa0535d63",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3855,
"license_type": "permissive",
"max_line_length": 423,
"num_lines": 80,
"path": "/README.md",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "# ACM at FSU Programming Contest Suite\n\nThe Programming Contest Suite is a set of tools for running [ICPC](https://icpc.global) style programming competitions hosted by the [Association for Computing Machinery Florida State University Student Chapter](https://fsu.acm.org). The PCS is a [Django](https://www.djangoproject.com/) powered contest account registration & management application working alongside the [DOMJudge](https://www.domjudge.org/) jury system.\n\n## Installation\nEnsure the target machine has a functioning installation of [Docker](https://www.docker.com) with `docker-compose` and `docker-swarm`. Obtain a copy of the project by cloning the reposity, or downloading one of our [official releases](https://github.com/FSU-ACM/Programming-Contest-Suite/releases). \n\n#### Clone \n\tgit clone https://github.com/FSU-ACM/Programming-Contest-Suite.git \n\n## Deployment \n### Development \nFrom the project root, navigate to the development folder: \n\n\tcd deploy/dev\n\nIf you are running the development deployment for the first time, or have made any changes to the project's Celery tasks run this: \n\n\tdocker-compose build \n\nLaunch the project in dev mode: \n\n\tdocker-compose up\n\nNOTE: In order to monitor the debug logs, as well as view any emails the suite sends while in debug, it is suggested to NOT use the `-d` flag with the `docker-compose up` command.\n### Production \nThe following steps are intended for deploying the suite on the Chapter's server, Agon. \n\nInitialize the swarm: \n\n\tdocker swarm init\n\t\nFrom the project root, navigate to the production folder: \n\n\tcd deploy/prod\n\n\nDeploy the services needed to run both registration (contestsuite) and judging (DOMjudge) platforms. Deployment of services uses the Docker stacks, and will utilize the following format: \n\n\tdocker stack deploy -c path/to/compose-file.yaml service_name\n\nNOTE: The `service_name` above is the stack service name, and does not need to match the name of the app being deployed. Picking good service names aids in monitoring running stacks in the swarm.\n\nDeploy nginx-proxy, which routes traffic between the registration platform and the judging platform: \n\n\tdocker stack deploy -c ./nginx-proxy/docker-compose.yaml nginxproxy\n\nNext, deploy the registration platform. \n\n\tdocker stack deploy -c ./contestsuite/docker-compose.yaml contestsuite\n\n* The `MAIL` environment variables in the Compose file should be updated in order to connect to a valid smtp email account. \n* The `SECRET_KEY` environment variable in the Compose file should be updated as well. Django secret key generators are easily found with a Google search. \n* The `MARIADB` and `SQL` environment variables in the Compose file should be updated to properly secure the service. The credentils should match, as Django uses them to connect to MariaDB. \n* If the suite it being initialized (i.e. an empty database with no users), the default Django superuser's password should be updated from it's default of `seminoles1!` This should be performed in Django Admin. \n\nThen deploy DOMjudge: \n\n\tdocker stack deploy -c ./domjudge/docker-compose.yaml domjudge\n\n* Similar to the registration platform, the `MARIADB` and `MYSQL` in the Compose file should be updated to secure the service. \n* The default admin password should be updated from `adminpw` This can be done by navigating to the DOMjudge site, logging in as admin, and nagivating to the Users section. \n* The Judgehost user's password should also be updated, as this is randomly initialized.\n\nLastly, deploy the Judgehosts:\n\n\tcd ./judgehosts\n\n\tdocker-compose up -d --scale judgehost=<an_integer>\n\n* The `JUDGEDAEMON_PASSWORD` environment variable in the Compose file should be updated to what was set in the step above. \n\n#### Teardown \nDocker stacks: \n\n\tdocker stack rm service_name\n\nJudgehosts: \n\n\tdocker-compose down\n"
},
{
"alpha_fraction": 0.6749271154403687,
"alphanum_fraction": 0.6749271154403687,
"avg_line_length": 41.875,
"blob_id": "c32cd51fa8998832c29e5de53d551f2f40c4ef73",
"content_id": "765342bbb92399fee8ab38e4e3a29504fd314119",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 686,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 16,
"path": "/src/manager/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import views\n\nurlpatterns = [\n # contest suite homepage\n path('', views.dashboard, name='manage_dashboard'),\n path('courses/', views.manage_courses, name='manage_courses'),\n path('courses/clear/', views.clear_courses, name='clear_courses'),\n path('profile/', views.manage_profile, name='manage_profile'),\n path('team/', views.manage_team, name='manage_team'),\n path('team/delete/', views.delete_team, name='delete_team'),\n path('team/join/', views.join_team, name='join_team'),\n path('team/leave/', views.leave_team, name='leave_team'),\n path('team/remove/<str:username>', views.remove_member, name='remove_member'),\n]\n"
},
{
"alpha_fraction": 0.5833333134651184,
"alphanum_fraction": 0.6527777910232544,
"avg_line_length": 23,
"blob_id": "c0c7a26eceebd48e40ab17ca375c8eba6fcfdd76",
"content_id": "571203cacca59ab2b2a08c3b087401fb110a1370",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 72,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 3,
"path": "/deploy/dev/scripts/django/runserver.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\npython3 ../../../../src/manage.py runserver localhost:8000\n"
},
{
"alpha_fraction": 0.6723768711090088,
"alphanum_fraction": 0.6745182275772095,
"avg_line_length": 41.54545593261719,
"blob_id": "2616690dd15329b383e89427a04065cd75e4b693",
"content_id": "b5c563a76b5e29b5e4c20ee201aab337723d8288",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 467,
"license_type": "permissive",
"max_line_length": 143,
"num_lines": 11,
"path": "/src/core/management/commands/initadmin.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User\nfrom django.core.management.base import BaseCommand\n\nclass Command(BaseCommand):\n\n def handle(self, *args, **options):\n if User.objects.count() == 0:\n admin = User.objects.create_superuser(username='contestadmin', email='[email protected]', password='seminoles1!', first_name='Administrator')\n admin.save()\n else:\n print('Admin accounts can only be initialized if no Accounts exist')"
},
{
"alpha_fraction": 0.4997788667678833,
"alphanum_fraction": 0.50597083568573,
"avg_line_length": 31.768115997314453,
"blob_id": "ad88956ecfd66dd405450c68e6e070068ecab90a",
"content_id": "dbdec261c8c8972bd0435eabf0366a3140bf47bc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2261,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 69,
"path": "/utils/course_csv_gen.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "import csv, random\n\nwhile True:\n write_csv = input(\"[W]rite to CSV or [P]rint to console?: \")\n\n if write_csv == 'W' or write_csv == 'w' or write_csv == 'P' or write_csv == 'p':\n break\n else:\n print('Invalid input. Please enter W or P [case insensitive].\\n')\n \n\ncontinue_input = 'Y'\n\n# Print Course IDs to the terminal\nif write_csv == 'P' or write_csv == 'p':\n while continue_input == 'Y' or continue_input == 'y':\n course_code = input(\"\\nCourse code [ex. COP3014]: \")\n instructor = input(\"Instructor CS email [excluding @cs.fsu.edu]: \")\n\n to_hash = course_code + instructor\n\n id = hash(to_hash)\n if id < 0:\n id *= -1\n\n print('\\n' + 'Course ID: ' + str(id)[:8] + '\\n')\n\n continue_input = input(\"Enter another [y/n]: \")\n\n# Write Course Data to CSV file for Django import\nelse:\n try:\n with open(\"courses.csv\", \"w\") as f:\n coursewriter = csv.writer(f, delimiter=',', quoting=csv.QUOTE_MINIMAL)\n coursewriter.writerow(['id', 'code', 'name', 'instructor'])\n used_ids = []\n \n while continue_input == 'Y' or continue_input == 'y':\n course_code = input(\"\\nCourse code [ex. COP3014]: \")\n course_name = input(\"Course name [ex. Programming I]: \")\n instructor = input(\"Instructor CS email [excluding @cs.fsu.edu]: \")\n\n correct_entry = input(\n '\\n'+course_code+'\\t'+course_name+'\\t'+instructor+\"\\n\\nIs the entry correct [y/n]?: \")\n\n if correct_entry == 'N' or correct_entry =='n':\n continue\n\n to_hash = course_code + instructor\n\n id = hash(to_hash)\n if id < 0:\n id *= -1\n\n id = str(id)[:8]\n\n while id in used_ids:\n id_list = list(id)\n random.shuffle(id_list)\n id = ''.join(id_list)\n\n used_ids.append(id)\n\n\n coursewriter.writerow([id, course_code, course_name, instructor+'@cs.fsu.edu'])\n\n continue_input = input(\"Enter another [y/n]: \")\n except IOError:\n print('Error creating output file. Please retry.\\n')\n"
},
{
"alpha_fraction": 0.700824499130249,
"alphanum_fraction": 0.7020023465156555,
"avg_line_length": 25.53125,
"blob_id": "75a02022f1891d4b79b1677aad7db6de11c61e63",
"content_id": "ede4301251accb5367d4f66b8549a9b82c2bd74f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 849,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 32,
"path": "/src/announcements/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.utils import timezone\nfrom django.views.generic.detail import DetailView\nfrom django.views.generic.list import ListView\n\nfrom .models import Announcement\nfrom contestsuite.settings import CACHE_TIMEOUT\n\n# Create your views here.\n\n\nclass AnnouncementListView(ListView):\n\n model = Announcement\n paginate_by = 5\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['now'] = timezone.now()\n context['cache_timeout'] = CACHE_TIMEOUT\n return context\n\n\nclass AnnouncementDetailView(DetailView):\n\n model = Announcement\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['now'] = timezone.now()\n context['cache_timeout'] = CACHE_TIMEOUT\n return context\n"
},
{
"alpha_fraction": 0.6321428418159485,
"alphanum_fraction": 0.6321428418159485,
"avg_line_length": 24.454545974731445,
"blob_id": "3ea1748148f88582ea4f6f99c8bbe5d1cc68fa67",
"content_id": "cf102a5f95db39b8edc7aad734e0c9b325b7dd0c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 280,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 11,
"path": "/src/core/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import views\n\nurlpatterns = [\n # contest suite homepage\n path('', views.index, name='index'),\n path('contact/', views.contact, name='contact'),\n path('faq/', views.faq, name='faq'),\n path('teams/', views.teams, name='teams'),\n]\n"
},
{
"alpha_fraction": 0.6703600883483887,
"alphanum_fraction": 0.6759002804756165,
"avg_line_length": 31.81818199157715,
"blob_id": "be828e87e85efcb32a1c192ef7466e9b71af4739",
"content_id": "bad4d5088b74db425edd2e980c99cd1351a4805a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 361,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 11,
"path": "/src/register/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import views\n\nurlpatterns = [\n # contest suite registeration page\n path('', views.base, name='register_base'),\n path('account/', views.account, name='register_account'),\n path('activate/<uidb64>/<token>/', views.ActivateAccount.as_view(), name='activate'),\n path('team/', views.team, name='register_team'),\n]\n"
},
{
"alpha_fraction": 0.5534999966621399,
"alphanum_fraction": 0.5609999895095825,
"avg_line_length": 28.8358211517334,
"blob_id": "0b835e27c58203bbe635a2deef4981b96344278a",
"content_id": "43c3f90bacff1efe9d9980246404ef295b24ce1f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2000,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 67,
"path": "/src/manager/forms.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django import forms\nfrom django.contrib.auth.models import User\nfrom django.contrib.admin.widgets import FilteredSelectMultiple\n\n\nfrom .models import Profile, Course\nfrom register.models import Team\n\n\nclass UserForm(forms.ModelForm):\n class Meta:\n model = User\n fields = ('first_name', 'last_name', 'email')\n help_texts = {\n 'first_name': '30 Characters max',\n 'last_name': '150 Characters max',\n 'email': 'Does NOT need to be your FSU email',\n }\n\n\nclass ProfileForm(forms.ModelForm):\n class Meta:\n model = Profile\n fields = ('fsu_id', 'fsu_num')\n labels = {\n 'fsu_id': 'FSU ID',\n 'fsu_num': 'FSU number',\n }\n help_texts = {\n 'fsu_id': 'Excluding @my.fsu.edu ex: [email protected] -> ab12c',\n 'fsu_num': 'Last 8 numbers on FSUCard. Exclude spaces.',\n }\n error_messages = {\n 'fsu_id': {\n 'max_length': \"The id entered is too long.\",\n },\n 'fsu_num': {\n 'max_length': \"The number entered is too long.\",\n },\n }\n\n\nclass CourseForm(forms.ModelForm):\n class Meta:\n model = Profile\n fields = ('courses',)\n widgets = {'courses': forms.CheckboxSelectMultiple()}\n help_texts = {\n 'courses': 'Select any course above in which you are currently registered.',\n }\n\n\nclass TeamForm(forms.ModelForm):\n class Meta:\n model = Team\n fields = ('name', 'division')\n help_texts = {\n 'name': '30 characters max. Keep it PG-13 please!',\n 'division': 'The division in which your team will compete.',\n }\n\n\nclass JoinForm(forms.Form):\n team = forms.ModelChoiceField(queryset=Team.objects.all(\n ), label='Registered Teams', help_text='Teamname : Division where UPPER = 1 and LOWER = 2')\n pin = forms.CharField(\n max_length=6, label='PIN', help_text='Ask team admin for PIN')\n\n"
},
{
"alpha_fraction": 0.6217936873435974,
"alphanum_fraction": 0.6272233724594116,
"avg_line_length": 35.08783721923828,
"blob_id": "5acbc4355bedb0fc6202de55eee891e97b746983",
"content_id": "ef3b1f3d3095f28f3286aea1eaa262b133d25f78",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5341,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 148,
"path": "/src/register/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib import messages\nfrom django.contrib.auth import login\nfrom django.contrib.auth.models import User\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nfrom django.contrib.sites.shortcuts import get_current_site\nfrom django.db import transaction\n# from django.forms import formset_factory\nfrom django.shortcuts import redirect, render\nfrom django.utils.encoding import force_text\nfrom django.utils.http import urlsafe_base64_decode\nfrom django.views import View\n\nfrom . import forms\nfrom . import models\nfrom . import tasks\nfrom .tokens import account_activation_token\nfrom manager.utils import has_no_team, not_registered\n\n# Create your views here.\n\n\nclass ActivateAccount(View):\n\n def get(self, request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except (TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n\n if user is not None and account_activation_token.check_token(user, token):\n user.is_active = True\n user.profile.email_confirmed = True\n user.save()\n \n login(request, user)\n messages.success(request, ('Your registration has been confirmed!'))\n return redirect('manage_dashboard')\n else:\n messages.warning(request, ('The confirmation link was invalid, possibly because it has already been used.'))\n return redirect('index')\n\n\ndef base(request):\n return render(request, 'register/register.html')\n\n\n# Limit view to those who are not logged in. Others redirected to manage.\n@user_passes_test(not_registered, login_url='/manage/')\[email protected]\ndef account(request):\n context = {}\n\n if request.method == 'POST':\n form = forms.ExtendedUserCreationForm(request.POST)\n if form.is_valid():\n user = form.save(commit=False)\n user.is_active = False # Deactivate account until it is validated\n user.save()\n\n current_site = get_current_site(request)\n tasks.send_validation_email.delay(current_site.domain, user.username)\n\n messages.success(\n request, 'Account registered! Please check your inbox for an account confirmation email.')\n return redirect('login')\n\n messages.error(\n request, 'Please correct the error(s) below.', fail_silently=True)\n else:\n form = forms.ExtendedUserCreationForm()\n\n context['page_title'] = 'Contest Registration'\n context['heading'] = 'Contest'\n context['form'] = form\n return render(request, 'register/register_form.html', context)\n\n\n# key error on password1, more research required\n# https://stackoverflow.com/questions/34962398/keyerror-at-registration-value-password1/34963664\n\n'''def group(request):\n context = {}\n UserFormSet = formset_factory(forms.ExtendedUserCreationForm, extra=3)\n \n if request.method == 'POST':\n formset = UserFormSet(request.POST)\n if formset.is_valid():\n for form in formset:\n if form.is_valid():\n new_user = User(\n first_name=form.cleaned_data['first_name'], \n last_name=form.cleaned_data['last_name'],\n username=form.cleaned_data['username'],\n email=form.cleaned_data['email'],\n password=form.cleaned_data['password1']\n )\n # new_user = form.save(commit=False)\n # new_user.password = form.cleaned_data['password1']\n new_user.save()\n \n messages.success(\n request, 'Accounts registered!')\n return redirect('index')\n \n messages.error(\n request, 'Please correct the error(s) below.', fail_silently=True)\n else:\n formset = UserFormSet()\n\n context['formset'] = formset\n return render(request, 'register/group_register_form.html', {'formset': formset})'''\n\n\n# Limit view to those are not on a team. Others redirected to manage.\n@login_required\n@user_passes_test(has_no_team, login_url='/manage/')\[email protected]\ndef team(request):\n context = {}\n\n if request.method == 'POST':\n form = forms.TeamForm(request.POST)\n if form.is_valid():\n # Create a temporary object, add additional attribute data, then save to DB\n newTeam = form.save(commit=False)\n newTeam.pin = User.objects.make_random_password(length=6)\n newTeam.num_members += 1\n newTeam.save()\n\n # Update user profile with new team\n request.user.profile.team = newTeam\n request.user.profile.team_admin = True\n request.user.profile.save()\n \n messages.success(\n request, 'Team registered!', fail_silently=True)\n return redirect('manage_dashboard')\n\n messages.error(\n request, 'Please correct the error(s) below.', fail_silently=True)\n else:\n form = forms.TeamForm()\n\n context['page_title'] = 'Team Registration'\n context['heading'] = 'Team'\n context['form'] = form\n return render(request, 'register/register_form.html', context)\n"
},
{
"alpha_fraction": 0.644957959651947,
"alphanum_fraction": 0.654411792755127,
"avg_line_length": 32.20930099487305,
"blob_id": "28aab67f4292605a646934f8da0b68b9d705c2cc",
"content_id": "6b1db9e48d72e2ec92baa5caa4dac82ad35b9248",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2856,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 86,
"path": "/src/manager/models.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.contrib.auth.models import User\n\nfrom register.models import Team\n\n# Create your models here.\n\nclass Faculty(models.Model):\n \"\"\"\n Faculty Model\n - Email address used as primary key due to guaranteed uniqueness\n - Imported by contest organizers before registration opens \n \"\"\"\n\n email = models.EmailField(max_length=50, primary_key=True)\n first_name = models.CharField(max_length=50)\n last_name = models.CharField(max_length=50)\n \n def __str__(self):\n return (str(self.first_name) + ' ' + str(self.last_name))\n\n\nclass Course(models.Model):\n \"\"\"\n Course Model\n - Courses added manually at the this point in time\n - code = 'COP3014'\n - name = 'Programming I'\n - sections = [1, 2, 3, 4, 5]\n \"\"\"\n\n code = models.CharField(max_length=8)\n name = models.CharField(max_length=50)\n instructor = models.ForeignKey(Faculty, on_delete=models.SET_NULL, null=True)\n\n class Meta:\n ordering = ['code']\n\n def __str__(self):\n return (str(self.code) + ' : ' + str(self.name) + ' - ' + str(self.instructor.last_name) + ', ' + str(self.instructor.first_name)[0])\n\n def num_registered(self):\n return Profile.objects.filter(courses=self).count()\n\n\nclass Profile(models.Model):\n \"\"\"\n Profile Model\n - Extends built-in User model\n - team attaches user to a contest team, if null user listed as 'unnamed team' in contest \n - team_admin gives user ability to delete team, permission transferrable to other teammate\n - fsu_id used for tracking extra credit participation\n - fsu_num used for swipe checkin\n - courses used for extra credit tracking\n - checked_in used to ensure only active participants get extra credit\n \"\"\"\n\n ROLES = (\n (1, 'Contestant'),\n (2, 'Proctor'),\n (3, 'Question Writer'),\n (4, 'Organizer')\n )\n\n user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)\n role = models.PositiveSmallIntegerField(choices=ROLES, default=1)\n team = models.ForeignKey(Team, related_name='profile_team', on_delete=models.SET_NULL, blank=True, null=True)\n team_admin = models.BooleanField(default=False)\n fsu_id = models.CharField(max_length=8, unique=True, blank=True, null=True)\n fsu_num = models.CharField(max_length=8, unique=True, blank=True, null=True)\n courses = models.ManyToManyField(Course, blank=True)\n checked_in = models.BooleanField(default=False)\n email_confirmed = models.BooleanField(default=False)\n \n def __str__(self):\n return (str(self.user.first_name) + ' ' + str(self.user.last_name))\n\n def has_team(self):\n if self.team is None:\n return False\n return True\n\n def has_courses(self):\n if self.courses.count() == 0:\n return False\n return True\n"
},
{
"alpha_fraction": 0.69753497838974,
"alphanum_fraction": 0.6982011795043945,
"avg_line_length": 27.884614944458008,
"blob_id": "00202e7c92ed748448660e8d1b3baef5c24a7f8b",
"content_id": "c6c00ce82de7022de43e399b356aef2f38e15b40",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1501,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 52,
"path": "/src/checkin/static/checkin/js/common.js",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "// Toggles between email and swipe checkin elements\nfunction swap_form() {\n\tvar email = document.getElementById(\"emailDiv\");\n\tvar swipe = document.getElementById(\"swipeDiv\");\n\tvar button = document.getElementById(\"formSwap\");\n\n\tif (email.style.display === \"none\") {\n\t\tswipe.style.display = \"none\";\n\t\temail.style.display = \"block\";\n\t\tdocument.getElementById(\"id_email\").focus();\n\t\tbutton.innerHTML = \"Swipe checkin\";\n\t} \n\telse {\n\t\tswipe.style.display = \"block\";\n\t\temail.style.display = \"none\";\n\t\tdocument.getElementById(\"id_fsu_num\").focus();\n\t\tbutton.innerHTML = \"Email checkin\";\n\t}\n}\n\n// Displays walkin contestant prompt followed by swipe form\nfunction walkin_prompt(is_walkin) {\n\tvar walkin = document.getElementById(\"walkinDiv\");\n\tvar checkin = document.getElementById(\"checkin_form\");\n\tvar prompt = document.getElementById(\"walkin_prompt\");\n\n\tif (is_walkin) {\n\t\twalkin.style.display = \"block\";\n\t}\n\n\tprompt.style.display = \"none\";\n\tcheckin.style.display = \"block\";\n\tdocument.getElementById(\"id_fsu_num\").focus();\n}\n\n// Looks for ? sentinal character or press of the enter key\n// Submits swipe form input on hit\nfunction read_swipe() {\n\tvar inputarea = document.getElementById(\"id_fsu_num\");\n\n\tinputarea.addEventListener(\"input\", function () {\n\t\tif (inputarea.value.slice(-1) == \"?\") {\n\t\t\tdocument.getElementById(\"swipeForm\").submit();\n\t\t}\n\t});\n\n\tinputarea.addEventListener(\"keyup\", function(event) {\n\t\tif (event.key === \"Enter\") {\n\t\t\tdocument.getElementById(\"swipeForm\").submit();\n\t\t}\n\t }); \n}"
},
{
"alpha_fraction": 0.6269146800041199,
"alphanum_fraction": 0.636761486530304,
"avg_line_length": 25.114286422729492,
"blob_id": "6d8372f5480dd3aa1bff807dc792ad3b65eb3984",
"content_id": "696376093610a53174773380eb7bdb93ad3d99ec",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 914,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 35,
"path": "/src/checkin/forms.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django import forms\n\n\nclass EmailCheckinForm(forms.Form):\n\temail = forms.EmailField(widget=forms.EmailInput(\n attrs={'placeholder': 'Enter your registered email'}),\n\t\t\trequired=False,\n\t)\n\n\nclass SwipeCheckinForm(forms.Form):\n fsu_num = forms.CharField(widget=forms.PasswordInput(\n attrs={'placeholder': 'Swipe your FSU Card'}),\n\t\trequired=False,\n )\n\n # determines if the swipe is valid\n def valid_read(self):\n if len(self.cleaned_data['fsu_num']) > 2 and self.cleaned_data['fsu_num'][1] == 'B':\n return True\n return False\n\n # returns last 8 numbers of fsu number\n def parse(self):\n return self.cleaned_data['fsu_num'][10:18]\n\n\nclass WalkinForm(forms.Form):\n # iterable\n DIVISIONS = (\n (1, \"Upper\"),\n (2, \"Lower\"),\n )\n \n division = forms.ChoiceField(choices=DIVISIONS, widget=forms.RadioSelect(), required=False)\n"
},
{
"alpha_fraction": 0.6628308296203613,
"alphanum_fraction": 0.6795166730880737,
"avg_line_length": 26.799999237060547,
"blob_id": "1ca9467870f7dc36fc373027c8c9e267c93b399f",
"content_id": "97c553b1824394c986af61f1d64b000472cffe85",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6952,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 250,
"path": "/src/contestsuite/settings.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "\"\"\"\nDjango settings for contestsuite project.\n\nGenerated by 'django-admin startproject' using Django 2.2.5.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.2/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.2/ref/settings/\n\"\"\"\n\nimport os\n\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\n# Raises django's ImproperlyConfigured exception if SECRET_KEY not in os.environ\n\nSECRET_KEY = os.environ.get('SECRET_KEY', '86@j2=z!=&1r_hoqboog1#*mb$jx=9mf0uw#hrs@lw&7m34sqz')\n\n# SECURITY WARNING: don't run with debug turned on in production!\n# False if not in os.environ\n\nif os.environ.get('DEBUG'):\n DEBUG = os.environ.get('DEBUG') == 'True'\nelse:\n DEBUG = True\n#DEBUG = os.environ.get('DEBUG'=='True', True)\n\nif DEBUG:\n ALLOWED_HOSTS = ['localhost', '127.0.0.1', '0.0.0.0', '[::1]']\nelse:\n ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS').split(',')\n \n#ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS', ['.localhost', '127.0.0.1', '[::1]'])\n\n\n# Debug Toolbar Access \n\nif DEBUG:\n INTERNAL_IPS = [\n 'localhost',\n '0.0.0.0',\n '127.0.0.1',\n ]\n\n\n# Application definition\n\nINSTALLED_APPS = [\n # Django built-ins\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n # User defined\n 'announcements.apps.AnnouncementsConfig',\n 'checkin.apps.CheckinConfig',\n 'contestadmin.apps.ContestAdminConfig',\n 'core.apps.CoreConfig',\n 'manager.apps.ManagerConfig',\n 'register.apps.RegisterConfig',\n # 3rd party packages\n 'import_export',\n]\n\n# Add debug_toolber only if site is in debug mode\nif DEBUG:\n INSTALLED_APPS.append('debug_toolbar')\n\nMIDDLEWARE = [\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\n# Add debug_toolber middleware only if site is in debug mode\nif DEBUG:\n MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')\n\nROOT_URLCONF = 'contestsuite.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'templates')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'contestsuite.wsgi.application'\n\n\n# Database\n# https://docs.djangoproject.com/en/2.2/ref/settings/#databases\n\n# Parse database connection url strings like psql://user:[email protected]:8458/db\n# read os.environ['DATABASE_URL'] and raises ImproperlyConfigured exception if not found\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.mysql',\n 'HOST': os.environ.get('SQL_HOST', 'localhost'),\n 'PORT': os.environ.get('SQL_PORT', '3306'),\n 'NAME': os.environ.get('SQL_DATABASE', 'contestsuite'),\n 'USER': os.environ.get('SQL_USER', 'dev'),\n 'PASSWORD': os.environ.get('SQL_PASSWORD','seminoles'),\n 'OPTIONS': {'charset': 'utf8mb4'},\n 'TIME_ZONE': 'America/New_York',\n 'AUTOCOMMIT': True,\n 'CONN_MAX_AGE': 0,\n }\n}\n\nDEFAULT_AUTO_FIELD = 'django.db.models.AutoField'\n\n\n# Celery\n# https://docs.celeryproject.org/en/stable/getting-started/first-steps-with-celery.html#configuration\n \nCELERY_BROKER_URL = os.environ.get('CELERY_BROKER', 'amqp://127.0.0.1:5672')\nCELERY_RESULT_BACKEND = os.environ.get('CELERY_BACKEND', 'redis://127.0.0.1:6379/1')\nCELERY_TASK_SERIALIZER = 'json'\nCELERY_RESULT_SERIALIZER = 'json'\nCELERY_ACCEPT_CONTENT = ['json']\nCELERY_TIMEZONE = 'America/New_York'\nCELERY_ENABLE_UTC = True\n\n\n# Cache\n# https://docs.djangoproject.com/en/2.2/ref/settings/#caches\n\nCACHES = {\n 'default': {\n 'BACKEND': 'django_redis.cache.RedisCache',\n 'LOCATION': os.environ.get('CACHE_LOCATION', 'redis://127.0.0.1:6379/0'),\n \"OPTIONS\": {\n \"CLIENT_CLASS\": \"django_redis.client.DefaultClient\",\n }\n }\n}\n\nif DEBUG:\n CACHE_TIMEOUT = 0\nelse:\n CACHE_TIMEOUT = int(os.environ.get('CACHE_TIMEOUT', 300))\n\n\n# Password validation\n# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.2/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'America/New_York'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/2.2/howto/static-files/\n\nSTATIC_ROOT = os.path.join(BASE_DIR, \"static\")\nSTATIC_URL = '/static/'\n\n\n# Uploaded files (TSV)\n\nMEDIA_ROOT = os.path.join(BASE_DIR, \"media\")\n\n\n# Redirect to home URL after login (Default redirects to /accounts/profile/)\n\nLOGIN_REDIRECT_URL = '/manage/'\n\n\n# Sessions\n# https://docs.djangoproject.com/en/3.2/topics/http/sessions/\n\nSESSION_ENGINE = 'django.contrib.sessions.backends.cache'\nSESSION_SAVE_EVERY_REQUEST = True\n\nif not DEBUG:\n SESSION_EXPIRE_AT_BROWSER_CLOSE = True\n\n\n# Messages\n# https://docs.djangoproject.com/en/3.2/ref/contrib/messages/\n\nMESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'\n\n\n# Email\n# https://docs.djangoproject.com/en/3.1/topics/email/\n\nif DEBUG:\n EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'\nelse:\n EMAIL_BACKEND = os.environ.get('MAIL_BACKEND', 'django.core.mail.backends.console.EmailBackend') \n EMAIL_HOST = os.environ.get('MAIL_HOST', None)\n EMAIL_PORT = int(os.environ.get('MAIL_PORT', 465))\n EMAIL_HOST_USER = os.environ.get('MAIL_USER', None)\n EMAIL_HOST_PASSWORD = os.environ.get('MAIL_PASSWORD', None)\n EMAIL_USE_SSL = False\n EMAIL_USE_TLS = True\n\nDEFAULT_FROM_EMAIL = 'ACM Programming Contest<[email protected]>' \n"
},
{
"alpha_fraction": 0.4991873502731323,
"alphanum_fraction": 0.5060136318206787,
"avg_line_length": 34.36015319824219,
"blob_id": "55a38bd53cfe1ea95336e0577bccbd02c4057688",
"content_id": "c2f0a7f689f4de937cbf60f57dd155009d03d14d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9229,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 261,
"path": "/src/contestadmin/tasks.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "import csv\nimport os\n\nfrom django.contrib.auth.models import User\nfrom django.core.mail import send_mail\nfrom django.db import transaction\nfrom django.template.loader import render_to_string\nfrom django.utils.encoding import force_bytes\nfrom django.utils.http import urlsafe_base64_encode\n\nfrom celery import shared_task\nfrom celery.utils.log import get_task_logger\n\nfrom contestsuite.settings import MEDIA_ROOT, DEFAULT_FROM_EMAIL\nfrom contestadmin.models import Contest\nfrom manager.models import Course, Faculty, Profile\nfrom register.models import Team\n\n\nlogger = get_task_logger(__name__)\n\n\n@shared_task\[email protected]\ndef create_walkin_teams(division, total):\n logger.info('Starting walk-in team creation')\n\n if division == 1:\n base_name = 'Walk-in-U-'\n existing_count = Team.objects.filter(\n name__contains='Walk-in-U-').count()\n else:\n base_name = 'Walk-in-L-'\n existing_count = Team.objects.filter(\n name__contains='Walk-in-L-').count()\n\n for i in range(total):\n '''if division == 1:\n name = 'Walk-in-U-' + str(upper_count+i+1).zfill(3)\n else:\n name = 'Walk-in-L-' + str(lower_count+i+1).zfill(3)'''\n name = base_name + str(existing_count+i+1).zfill(3)\n pin = User.objects.make_random_password(length=6)\n Team.objects.create(name=name, division=division, pin=pin)\n logger.info('Created walk-in team %d' % (i+1))\n\n logger.info('Walk-in team creation complete')\n\n\n@shared_task\[email protected]\ndef generate_contest_files():\n count = 0\n teams = Team.objects.all()\n\n logger.info('Starting team credential creation')\n\n for team in teams:\n count += 1\n team.contest_id = 'acm-' + str(count).zfill(3)\n team.contest_password = User.objects.make_random_password(length=6)\n team.save()\n\n logger.info('Created credentials for %d teams' % count)\n\n \n for division in Team.DIVISION:\n if division[0] == 1:\n account_file = MEDIA_ROOT + '/contest_files/accounts_upper.tsv'\n group_file = MEDIA_ROOT + '/contest_files/groups_upper.tsv'\n team_file = MEDIA_ROOT + '/contest_files/teams_upper.tsv'\n else:\n account_file = MEDIA_ROOT + '/contest_files/accounts_lower.tsv'\n group_file = MEDIA_ROOT + '/contest_files/groups_lower.tsv'\n team_file = MEDIA_ROOT + '/contest_files/teams_lower.tsv'\n\n with open(account_file, 'w', newline='') as account_tsv:\n with open(group_file, 'w', newline='') as group_tsv:\n with open(team_file, 'w', newline='') as team_tsv:\n account_writer = csv.writer(\n account_tsv, delimiter='\\t', quoting=csv.QUOTE_MINIMAL)\n group_writer = csv.writer(\n group_tsv, delimiter='\\t', quoting=csv.QUOTE_MINIMAL)\n team_writer = csv.writer(\n team_tsv, delimiter='\\t', quoting=csv.QUOTE_MINIMAL)\n\n account_writer.writerow(['accounts', '1'])\n group_writer.writerow(['File_Version', '1'])\n # Upper Division Group -> 10\n # Lower Division Group -> 11\n group_writer.writerow([division[0]+9, division[1]])\n team_writer.writerow(['File_Version', '2']) \n\n teams = Team.objects.filter(division=division[0])\n for team in teams:\n account_writer.writerow([\n 'team', \n team.contest_id, \n team.contest_id, \n team.contest_password, \n int((team.contest_id).strip(\"acm-\")),\n ])\n\n team_writer.writerow([\n int((team.contest_id).strip(\"acm-\")), \n '', \n team.division, \n team.name, \n 'Florida State University', \n 'FSU', \n 'USA', \n '',\n ])\n\n logger.info('Successfully generated contest files')\n\n\n@shared_task\[email protected]\ndef check_in_out_users(action):\n # Check-in\n if action == 1 or action == 2:\n users = User.objects.all()\n\n for user in users:\n\n if user.profile.team == None or user.profile.checked_in == True:\n continue\n\n user.profile.checked_in = True\n user.save()\n\n \n if action == 1:\n subject = 'Programming Contest DOMjudge Credentials'\n message = render_to_string(\n 'checkin/team_credentials_email.html', {'user': user})\n else:\n subject = 'Practice Contest DOMjudge Credentials'\n message = render_to_string(\n 'checkin/team_credentials_practice_email.html', {'user': user})\n user.email_user(subject, message)\n\n logger.info('Sent credentials to %s' % user.username)\n # Check-out\n else:\n users = User.objects.all()\n\n for user in users:\n user.profile.checked_in = False\n user.save()\n\n\n@shared_task\ndef generate_ec_reports():\n num_courses = 0\n faculty_members = Faculty.objects.all()\n\n for faculty in faculty_members:\n courses = Course.objects.filter(instructor=faculty)\n num_files = 0\n\n for course in courses:\n students = User.objects.filter(profile__courses=course).filter(profile__checked_in=True)\n\n if students.exists():\n num_courses += 1\n num_files += 1\n filename = MEDIA_ROOT+'/ec_files/'+(faculty.email.split('@'))[0]+'_'+course.code+'.csv'\n\n with open(filename, 'w', newline='') as csvfile:\n writer = csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL)\n writer.writerow(\n ['fsu_id', 'last_name', 'first_name', 'questions_answered', 'role'])\n for student in students:\n if student.profile.role == 1:\n role = 'Contestant'\n elif student.profile.role == 2:\n role = 'Proctor'\n elif student.profile.role == 3:\n role = 'Question Writer'\n else:\n role = 'Organizer'\n\n if student.profile.team is None:\n questions_answered = 0\n else:\n questions_answered = student.profile.team.questions_answered\n\n writer.writerow([\n student.profile.fsu_id,\n student.last_name,\n student.first_name,\n questions_answered,\n role\n ])\n else:\n continue\n \n logger.info(\n 'Processed extra credit files for %d courses' % num_courses)\n\n\n@ shared_task\ndef email_faculty(domain):\n faculty_members = Faculty.objects.all()\n fpath = MEDIA_ROOT + '/ec_files/'\n\n for faculty in faculty_members:\n for fname in os.listdir(fpath):\n uid=((faculty.email).split('@'))[0]\n if uid in fname: #not faculty_nanmer\n message = render_to_string('contestadmin/ec_available_email.html', {\n 'faculty': faculty,\n 'domain': domain,\n 'uid': urlsafe_base64_encode(force_bytes(uid)),\n })\n \n send_mail(\n 'Programming Contest EC files',\n message,\n DEFAULT_FROM_EMAIL,\n [faculty.email],\n fail_silently = False,\n )\n\n break\n\n\n\n@ shared_task\n@ transaction.atomic\ndef process_contest_results():\n num_teams = 0\n contest = Contest.objects.all().first()\n\n with open(contest.results.path) as resultsfile:\n results = csv.reader(resultsfile, delimiter=\"\\t\", quotechar='\"')\n for row in results:\n #if 'acm-' in row[0]:\n # Exclude header of file\n if 'results' not in row[0]:\n if int(row[0]) < 10:\n id='acm-00'+row[0]\n elif int(row[0]) < 100:\n id='acm-0'+row[0]\n else:\n id='acm-'+row[0]\n\n try:\n #team= Team.objects.get(contest_id=row[0])\n team = Team.objects.get(contest_id=id)\n team.questions_answered= row[3]\n team.save()\n num_teams += 1\n except:\n logger.info('Could not process contest results for %s teams' % id)\n else:\n pass\n\n logger.info('Processed contest results for %d teams' % num_teams)\n"
},
{
"alpha_fraction": 0.6846941113471985,
"alphanum_fraction": 0.6857381463050842,
"avg_line_length": 37.620967864990234,
"blob_id": "416fd06334985f38fbcb779b4555a5522ae140d9",
"content_id": "946b480bb84d98fb7530c6e53e5cc63b404142b4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4789,
"license_type": "permissive",
"max_line_length": 156,
"num_lines": 124,
"path": "/src/checkin/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib import messages\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib.auth.models import User\nfrom django.db import transaction\nfrom django.shortcuts import render, redirect\n\nfrom . import forms\nfrom . import tasks\nfrom register.models import Team\n\n# Create your views here.\n\n\n@staff_member_required\[email protected]\ndef checkin(request):\n\tcontext = {}\n\n\tif request.method == 'POST':\n\t\temail_form = forms.EmailCheckinForm(request.POST)\n\t\tswipe_form = forms.SwipeCheckinForm(request.POST)\n\t\twalkin_form = forms.WalkinForm(request.POST)\n\t\twalk_in_team = None\n\t\tis_walkin = False\n\n\t\tif email_form.is_valid() and swipe_form.is_valid() and walkin_form.is_valid():\n\t\t\tif walkin_form.cleaned_data['division']:\n\t\t\t\ttry:\n\t\t\t\t\tif walkin_form.cleaned_data['division'] == 1:\n\t\t\t\t\t\twalk_in_team = Team.objects.filter(name__contains='Walk-in-U-').filter(num_members=0).first()\n\t\t\t\t\telse:\n\t\t\t\t\t\twalk_in_team = Team.objects.filter(name__contains='Walk-in-L-').filter(num_members=0).first()\n\t\t\t\texcept:\n\t\t\t\t\tmessages.error(request, 'Walk-in team assignment failed. Please reattempt check-in.', fail_silently=True)\n\t\t\t\t\treturn redirect('checkin_result')\n\t\t\t\telse:\n\t\t\t\t\tis_walkin = True\n\t\t\t\t\tif walk_in_team is None:\n\t\t\t\t\t\tmessages.error(request, 'There are no Walk-in teams available.', fail_silently=True)\n\t\t\t\t\t\treturn redirect('checkin_result')\n\t\t\tif email_form.cleaned_data['email']:\n\t\t\t\ttry:\n\t\t\t\t\tuser = User.objects.get(email=email_form.cleaned_data['email'])\n\t\t\t\texcept:\n\t\t\t\t\tmessages.error(\n\t\t\t\t\t\trequest, 'Checkin failed. Email address not found.', fail_silently=True)\n\t\t\t\telse:\n\t\t\t\t\tif user.profile.checked_in:\n\t\t\t\t\t\tmessages.info(request, 'You are already checked in.', fail_silently=True)\n\t\t\t\t\telif user.profile.team is None:\n\t\t\t\t\t\tmessages.info(request, 'You are not a member of a registered team. Join a registered team, or select YES at the walk-in prompt.', fail_silently=True)\n\t\t\t\t\telse:\n\t\t\t\t\t\tuser.profile.checked_in = True\n\t\t\t\t\t\tif is_walkin == True:\n\t\t\t\t\t\t\tif user.profile.has_team:\n\t\t\t\t\t\t\t\tmessages.info(request, 'You are a member of a registered team. Walk-in selection ignored.', fail_silently=True)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t# update user\n\t\t\t\t\t\t\t\tuser.profile.team = walk_in_team\n\t\t\t\t\t\t\t\tuser.profile.team_admin = True\n\n\t\t\t\t\t\t\t\t#update team\n\t\t\t\t\t\t\t\twalk_in_team.num_members += 1\n\t\t\t\t\t\t\t\twalk_in_team.members.append(user.get_full_name())\n\t\t\t\t\t\t\t\twalk_in_team.save()\n\n\t\t\t\t\t\tuser.save()\n\n\t\t\t\t\t\ttasks.send_credentials.delay(user.username)\n\t\t\t\t\t\tmessages.success(request, str(user.first_name) + ', you are checked in!', fail_silently=True)\n\t\t\t\t\t\tmessages.info(request, 'Check your registered email or account dashboard for DOMJudge credentials.', fail_silently=True)\n\t\t\telif swipe_form.cleaned_data['fsu_num']:\n\t\t\t\tif swipe_form.valid_read():\n\t\t\t\t\tfsu_num = swipe_form.parse()\n\t\t\t\t\t\n\t\t\t\t\ttry:\n\t\t\t\t\t\tuser = User.objects.get(profile__fsu_num=fsu_num)\n\t\t\t\t\texcept:\n\t\t\t\t\t\tmessages.error(request, 'Checkin failed. FSU number not found.', fail_silently=True)\n\t\t\t\t\telse:\n\t\t\t\t\t\tif user.profile.checked_in:\n\t\t\t\t\t\t\tmessages.info(request, 'You are already checked in.', fail_silently=True)\n\t\t\t\t\t\telif user.profile.team is None:\n\t\t\t\t\t\t\tmessages.info(request, 'You are not a member of a registered team. Join a registered team, or select YES at the walk-in prompt.', fail_silently=True)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tuser.profile.checked_in = True\n\t\t\t\t\t\t\tif is_walkin == True:\n\t\t\t\t\t\t\t\tif user.profile.has_team:\n\t\t\t\t\t\t\t\t\tmessages.info(request, 'You are a member of a registered team. Walk-in selection ignored.', fail_silently=True)\n\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t# update user\n\t\t\t\t\t\t\t\t\tuser.profile.team = walk_in_team\n\t\t\t\t\t\t\t\t\tuser.profile.team_admin = True\n\n\t\t\t\t\t\t\t\t\t#update team\n\t\t\t\t\t\t\t\t\twalk_in_team.num_members += 1\n\t\t\t\t\t\t\t\t\twalk_in_team.members.append(user.get_full_name())\n\t\t\t\t\t\t\t\t\twalk_in_team.save()\n\n\t\t\t\t\t\t\tuser.save()\n\n\t\t\t\t\t\t\ttasks.send_credentials.delay(user.username)\n\t\t\t\t\t\t\tmessages.success(request, str(user.first_name) + ', you are checked in!', fail_silently=True)\n\t\t\t\t\t\t\tmessages.info(request, 'Check your registered email or account dashboard for DOMJudge credentials.', fail_silently=True)\n\t\t\t\telse:\n\t\t\t\t\tmessages.error(request, 'Invalid card read. Please try again.', fail_silently=True)\n\t\t\telse:\n\t\t\t\tmessages.error(request, 'Invalid form submission. Please resubmit your information.', fail_silently=True)\n\n\t\t\treturn redirect('checkin_result')\n\telse:\n\t\temail_form = forms.EmailCheckinForm()\n\t\tswipe_form = forms.SwipeCheckinForm()\n\t\twalkin_form = forms.WalkinForm()\n\n\tcontext['email_form'] = email_form\n\tcontext['swipe_form'] = swipe_form\n\tcontext['walkin_form'] = walkin_form\n\treturn render(request, 'checkin/checkin.html', context)\n\n\n@staff_member_required\ndef checkin_result(request):\n\treturn render(request, 'checkin/checkin_result.html')\n"
},
{
"alpha_fraction": 0.7054161429405212,
"alphanum_fraction": 0.7054161429405212,
"avg_line_length": 31.913043975830078,
"blob_id": "1840dc752e4db170f4397dac6190f0613f31efd6",
"content_id": "a8d80084b3f4ccc131f52a97ac62a37ec6b7a35f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 757,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 23,
"path": "/src/contestadmin/models.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.contrib.auth.models import User\n\n\n# Create your models here.\n\nclass Contest(models.Model):\n \"\"\"\n Contest Model\n - Results field for DOMJudge contest results uploading\n \"\"\"\n\n contest_date = models.DateField(auto_now=False)\n contest_doors = models.TimeField(auto_now=False)\n contest_start = models.TimeField(auto_now=False)\n contest_freeze = models.TimeField(auto_now=False)\n contest_end = models.TimeField(auto_now=False)\n contest_awards = models.TimeField(auto_now=False)\n results = models.FileField(upload_to='uploads/', blank=True)\n ec_processed = models.BooleanField(default=False)\n \n def __str__(self):\n return (\"Programming Contest \"+str(self.contest_date))\n"
},
{
"alpha_fraction": 0.7372400760650635,
"alphanum_fraction": 0.7372400760650635,
"avg_line_length": 26.842105865478516,
"blob_id": "bc17bb42f3f556338d75625465a26b40b7d9b9bd",
"content_id": "d4ef0469cdb94bc11401da2bba3b5eded211419c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 529,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 19,
"path": "/src/checkin/tasks.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User\nfrom django.template.loader import render_to_string\n\nfrom celery import shared_task\nfrom celery.utils.log import get_task_logger\n\n\nlogger = get_task_logger(__name__)\n\n\n@shared_task\ndef send_credentials(username):\n user = User.objects.get(username=username)\n \n subject = 'Your DOMJudge Credentials'\n message = render_to_string('checkin/team_credentials_email.html', {'user': user})\n user.email_user(subject, message)\n\n logger.info('Sent credentials to %s' % username)\n"
},
{
"alpha_fraction": 0.6785714030265808,
"alphanum_fraction": 0.7053571343421936,
"avg_line_length": 17.66666603088379,
"blob_id": "25259c96bcacccac861e5193ec02a2ddf6247ebb",
"content_id": "fcf916ea76fa7b9432abdf3deb24ce8defb3e222",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 112,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 6,
"path": "/deploy/prod/contestsuite/scripts/celery/worker/start.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nset -o errexit\nset -o nounset\n\ncelery -A contestsuite worker --autoscale=10,3 -n worker@%n -l INFO\n"
},
{
"alpha_fraction": 0.7215189933776855,
"alphanum_fraction": 0.7215189933776855,
"avg_line_length": 15.928571701049805,
"blob_id": "c9f3e6e829755cada60c346f4eebce0fdf4507bc",
"content_id": "b92557f91eb39198a2eef6c8f8e3258a04534196",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 237,
"license_type": "permissive",
"max_line_length": 38,
"num_lines": 14,
"path": "/src/manager/utils.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "def has_team(user):\n return user.profile.has_team()\n\n\ndef has_no_team(user):\n return not user.profile.has_team()\n\n\ndef team_admin(user):\n\treturn user.profile.team_admin\n\n\ndef not_registered(user):\n\treturn not user.is_authenticated\n"
},
{
"alpha_fraction": 0.7123894095420837,
"alphanum_fraction": 0.7654867172241211,
"avg_line_length": 21.700000762939453,
"blob_id": "fd2fcc6424c8de31034c85c83f93b2d23fa3dc07",
"content_id": "910d9e798f93316e3e58c1b9876758031d84296c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 226,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 10,
"path": "/deploy/dev/scripts/django/entrypoint.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nset -o errexit\nset -o pipefail\nset -o nounset\n\npython3 /app/manage.py migrate --noinput\npython3 /app/manage.py collectstatic --noinput\npython3 /app/manage.py initadmin\npython3 /app/manage.py runserver 0.0.0.0:8000"
},
{
"alpha_fraction": 0.6530014872550964,
"alphanum_fraction": 0.6603221297264099,
"avg_line_length": 21.032258987426758,
"blob_id": "93797db5e321cb3aa31c7162ed7a913523c9bae4",
"content_id": "8dad792de64f7ee30ebd18844a425c0107b2a6a2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 683,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 31,
"path": "/src/contestadmin/forms.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django import forms\nfrom django.contrib.auth.models import User\nfrom django.contrib.admin.widgets import FilteredSelectMultiple\n\nfrom contestadmin.models import Contest\n\n\nclass CheckinUsersForm(forms.Form):\n ACTION = (\n (1, 'Live Checkin'),\n (2, 'Practice Checkin'),\n (3, 'Checkout')\n )\n\n action = forms.ChoiceField(choices=ACTION)\n\n\nclass GenerateWalkinForm(forms.Form):\n DIVISION = (\n (1, 'Upper Division'),\n (2, 'Lower Division')\n )\n\n total = forms.IntegerField()\n division = forms.ChoiceField(choices=DIVISION)\n\n\nclass ResultsForm(forms.ModelForm):\n class Meta:\n model = Contest\n fields = ['results']\n"
},
{
"alpha_fraction": 0.6777777671813965,
"alphanum_fraction": 0.6777777671813965,
"avg_line_length": 21.5,
"blob_id": "537d0eb3c3adabfe6085c558c60c0b46088e219e",
"content_id": "e5e911907ed875e7d3dbf299e67431d294bd9b50",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 180,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 8,
"path": "/src/checkin/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import views\n\nurlpatterns = [\n path('', views.checkin, name='checkin'),\n path('result/', views.checkin_result, name='checkin_result'),\n]\n"
},
{
"alpha_fraction": 0.6433800458908081,
"alphanum_fraction": 0.6449487805366516,
"avg_line_length": 36.49803924560547,
"blob_id": "c4b3ec223d3dbf738bdcf1162141a434c4f9dbd6",
"content_id": "5759a28d1dbdbdb23809a0e31c0984059ce90abe",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9562,
"license_type": "permissive",
"max_line_length": 170,
"num_lines": 255,
"path": "/src/manager/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib import messages\nfrom django.shortcuts import redirect, render\nfrom django.db import transaction\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nfrom django.contrib.auth.models import User\nfrom django.db import transaction\nfrom django.shortcuts import get_object_or_404, redirect, render\n\nfrom . import forms\nfrom .utils import team_admin, has_no_team, has_team\nfrom .models import Profile\nfrom announcements.models import Announcement\nfrom register.models import Team \n\n# Create your views here.\n\n@login_required\ndef dashboard(request):\n context = {}\n context['courses'] = request.user.profile.courses.all()\n context['team_members'] = User.objects.filter(profile__team=request.user.profile.team)\n context['announcements'] = (Announcement.objects.filter(status=1))[:1]\n\n # Generate account some useful account notifications\n if not request.user.profile.has_team():\n messages.warning(\n request, 'You are not a member of a registered team. You must be a team member in order to compete. Check out the FAQ for more information.')\n if not request.user.profile.has_courses():\n messages.info(\n request, 'You have not added any extra credit courses. You must add them to your profile in order to receive credit. Check out the FAQ for more information.')\n if request.user.profile.fsu_id is None or request.user.profile.fsu_id == '':\n messages.info(\n request, 'Your FSU ID is blank. You must add it to your profile in order to receive extra credit. Check out the FAQ for more information.')\n if request.user.profile.fsu_num is None or request.user.profile.fsu_num == '':\n messages.info(\n request, 'Your FSU number is blank. You must add it to your profile in order to swipe check in on contest day. Check out the FAQ for more information.')\n\n return render(request, 'manager/dashboard.html', context)\n\n\n@login_required\[email protected]\ndef manage_profile(request):\n context = {}\n\n if request.method == 'POST':\n # Forms for both user and profile models\n user_form = forms.UserForm(request.POST, instance=request.user)\n profile_form = forms.ProfileForm(request.POST, instance=request.user.profile)\n\n if user_form.is_valid() and profile_form.is_valid():\n user_form.save()\n profile_form.save()\n \n messages.success(request, 'Your profile was successfully updated!', fail_silently=True)\n return redirect('manage_dashboard')\n else:\n messages.error(request, 'Please correct the error(s) below.')\n else:\n user_form = forms.UserForm(instance=request.user)\n profile_form = forms.ProfileForm(instance=request.user.profile)\n\n context['user_form'] = user_form\n context['profile_form'] = profile_form\n return render(request, 'manager/profile_form.html', context)\n\n\n@login_required\[email protected]\ndef manage_courses(request):\n context = {}\n\n if request.method == 'POST':\n form = forms.CourseForm(request.POST, instance=request.user.profile)\n if form.is_valid():\n request.user.profile.courses.set(form.cleaned_data['courses'])\n request.user.save()\n\n messages.success(\n request, 'Your courses were successfully updated!', fail_silently=True)\n return redirect('manage_dashboard')\n else:\n form = forms.CourseForm(instance=request.user.profile)\n\n context['form'] = form\n return render(request, 'manager/course_form.html', context)\n\n\n# Remove all courses user has selected for extra credit\n@login_required\[email protected]\ndef clear_courses(request):\n request.user.profile.courses.clear()\n request.user.save()\n\n messages.success(\n request, 'Your courses were successfully cleared.', fail_silently=True)\n return redirect('manage_dashboard')\n\n\n# Only team admin can access view\n@login_required\n@user_passes_test(team_admin, login_url='/manage/')\[email protected]\ndef manage_team(request):\n context = {}\n\n if request.method == 'POST':\n form = forms.TeamForm(request.POST, instance=request.user.profile.team)\n if form.is_valid():\n form.save()\n messages.success(\n request, 'Your team was successfully updated!', fail_silently=True)\n return redirect('manage_dashboard')\n else:\n messages.error(request, 'Please correct the error(s) below.', fail_silently=True)\n else:\n form = forms.TeamForm(instance=request.user.profile.team)\n team_members = User.objects.filter(\n profile__team=request.user.profile.team).exclude(username=request.user.username)\n\n context['form'] = form\n context['team_members'] = team_members\n return render(request, 'manager/team_form.html', context)\n\n\n# Only person not on a team can access view\n@login_required\n@user_passes_test(has_no_team, login_url='/manage/')\[email protected]\ndef join_team(request):\n context = {}\n\n if request.method == 'POST':\n form = forms.JoinForm(request.POST)\n\n if form.is_valid():\n # Check team is not full\n if form.cleaned_data['team'].num_members <= 2:\n # Check if user entered pin matchs team pin\n if form.cleaned_data['pin'] == form.cleaned_data['team'].pin:\n # Update user\n request.user.profile.team = form.cleaned_data['team']\n request.user.save()\n\n # Update team\n request.user.profile.team.num_members += 1\n request.user.profile.team.save()\n\n messages.success(\n request, 'You have joined the team!', fail_silently=True)\n return redirect('manage_dashboard')\n else:\n messages.error(\n request, 'The PIN you entered is incorrect. Please try again', fail_silently=True)\n else:\n messages.error(\n request, 'The team you have selected is full. Please select another team, or create your own.', fail_silently=True)\n else:\n form = forms.JoinForm()\n\n context['form'] = form\n return render(request, 'manager/join_form.html', context)\n\n\n# Only person on a team can access view.\n@login_required\n@user_passes_test(has_team, login_url='/manage/')\[email protected]\ndef leave_team(request):\n if request.user.profile.team_admin:\n # If admin tries to leave a solo team, then just delete it\n if request.user.profile.team.num_members == 1:\n request.user.profile.team.delete()\n request.user.profile.team = None\n request.user.profile.team_admin = False\n request.user.save()\n # If admin leaves a team with 2 or more people, then reassign admin credential first\n else:\n members = Profile.objects.filter(team=request.user.profile.team)\n\n # Find first non admin and assign them admin credential\n for member in members:\n if not member.team_admin:\n member.team_admin = True\n member.save()\n break\n \n # Update the team\n request.user.profile.team.num_members -= 1\n request.user.profile.team.save()\n\n # Update user\n request.user.profile.team_admin = False\n request.user.profile.team = None\n request.user.profile.save()\n # If user only a team member, then simply leave the team\n else:\n request.user.profile.team.num_members -= 1\n request.user.profile.team.save()\n\n request.user.profile.team = None\n request.user.save()\n\n messages.success(\n request, 'You have left the team.', fail_silently=True)\n return redirect('manage_dashboard')\n\n\n# Only team admin can access delete view\n@login_required\n@user_passes_test(team_admin, login_url='/manage/')\[email protected]\ndef delete_team(request):\n try:\n members = Profile.objects.filter(team=request.user.profile.team)\n \n # Remove all non team admins from team\n for member in members:\n if not member.team_admin:\n member.team = None\n member.save()\n\n request.user.profile.team.delete()\n request.user.profile.team = None\n request.user.profile.team_admin = False\n request.user.save()\n\n messages.success(request, 'You have deleted the team.', fail_silently=True)\n except:\n messages.error(request, 'Unable to delete team. Please try again later.', fail_silently=True)\n return redirect('manage_dashboard')\n\n\n# Only team admin can access remove view\n@login_required\n@user_passes_test(team_admin, login_url='/manage/')\[email protected]\ndef remove_member(request, username):\n try:\n #member = get_object_or_404(User, username=username)\n member = User.objects.get(username=username)\n \n # Update team \n member.profile.team.num_members -= 1\n member.profile.team.save()\n\n #Update user being removed\n member.profile.team = None\n member.profile.save()\n \n messages.success(request, str(member.get_full_name()) + ' removed from the team.', fail_silently=True)\n except:\n messages.error(request, 'Unable to remove member from the team. Please try again later.', fail_silently=True)\n return redirect('manage_dashboard')\n"
},
{
"alpha_fraction": 0.6234626173973083,
"alphanum_fraction": 0.633869469165802,
"avg_line_length": 34.266666412353516,
"blob_id": "384cf12c9f63580418744145e88a67a3ca2a0abd",
"content_id": "bee6144bbe71ffabbd1f11a534fe96be97b07d62",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1057,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 30,
"path": "/src/register/forms.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django import forms\nfrom django.contrib.auth.models import User\nfrom django.contrib.auth.forms import UserCreationForm\n\nfrom .models import Team\n\n# Extend built-in User form to include email, first name, and last name fields\nclass ExtendedUserCreationForm(UserCreationForm):\n email = forms.EmailField(required=True)\n # name lengths as specified by Django 3.0.* documentation\n first_name = forms.CharField(max_length=30, required=True)\n last_name = forms.CharField(max_length=150, required=True)\n\n class Meta:\n model = User\n fields = ['first_name', 'last_name', 'email', 'username', 'password1', 'password2']\n\nclass TeamForm(forms.ModelForm):\n class Meta:\n model = Team\n fields = ['name', 'division']\n help_texts = {\n 'name': '30 characters max. Keep it PG-13 please!',\n 'division': 'The division in which your team will compete.',\n }\n error_messages = {\n 'name': {\n 'max_length': \"This team name is too long.\",\n },\n }"
},
{
"alpha_fraction": 0.7377049326896667,
"alphanum_fraction": 0.7413479089736938,
"avg_line_length": 27.894737243652344,
"blob_id": "090f17401bb34abff2aaf934f6ce5c33c0de0a73",
"content_id": "d9ca568c95d1415d3ae8d74d02fa994e807c0fc2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 1098,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 38,
"path": "/Dockerfile",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "FROM python:3.9-slim\nLABEL maintainer=\"ACM at FSU <[email protected]>\"\n\nENV PYTHONUNBUFFERED 1\nENV PYTHONDONTWRITEBYTECODE 1\n\nRUN apt-get update \\\n # Dependencies for building Python packages\n && apt-get install -y build-essential \\\n # Translations dependencies\n && apt-get install -y gettext \\\n # MariaDB dependency\n && apt-get install -y libmariadb-dev \\\n # cleaning up unused files\n && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \\\n && rm -rf /var/lib/apt/lists/*\n\n# Requirements are installed here to ensure they will be cached.\nRUN pip install --upgrade pip\nCOPY ./requirements.txt requirements.txt\nRUN pip install -r requirements.txt\n\nCOPY ./src app\n\nRUN mkdir -p /app/static\nRUN mkdir -p /app/media/contest_files\nRUN mkdir -p /app/media/ec_files\nRUN mkdir -p /app/media/uploads\n\nWORKDIR /app/\n\nCOPY ./deploy/prod/contestsuite/scripts/django/start.sh start\nRUN sed -i 's/\\r$//g' start\nRUN chmod +x start\n\nCOPY ./deploy/prod/contestsuite/scripts/celery/worker/start.sh start-celeryworker\nRUN sed -i 's/\\r$//g' start-celeryworker\nRUN chmod +x start-celeryworker\n"
},
{
"alpha_fraction": 0.5845410823822021,
"alphanum_fraction": 0.6231883764266968,
"avg_line_length": 22.11111068725586,
"blob_id": "15911c1ac4d6f1cfb595d014614ffce7abcf2eaf",
"content_id": "0c5689c2bea149f65141246b38064002fa737f11",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 207,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 9,
"path": "/src/templates/400.html",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "{% extends 'core/base.html' %}\n{% load static %}\n\n{% block title %}400:Bad Request{% endblock %}\n\n{% block content %}\n\t<h1 class=\"text-center\">400</h1>\n\t<p class=\"text-center\">Bad Request.</p>\n{% endblock %}"
},
{
"alpha_fraction": 0.6686131358146667,
"alphanum_fraction": 0.6751824617385864,
"avg_line_length": 33.25,
"blob_id": "ea761e892ca698f21a95f1a9e9a29f18aa3e624a",
"content_id": "1f94dd1c161fbac9e57a4e3e787d43b72a20e238",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1370,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 40,
"path": "/src/register/models.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.contrib.auth.models import User\n\n\n# Create your models here.\t\n\nclass Team(models.Model):\n \"\"\"\n Team Model\n - name attribute is unique but not primary key\n - division is the contest division\n - pin used to allow nonmembers to join team\n - contest_id used by domjudge as team login username\n - contest_password used by domjudge as team login password\n - num_members used to avoid extra DB queries\n \"\"\"\n DIVISION = (\n (1, 'Upper Division'),\n (2, 'Lower Division')\n )\n \n name = models.CharField(max_length=30, unique=True)\n division = models.PositiveSmallIntegerField(choices=DIVISION)\n pin = models.CharField(max_length=6, unique=True)\n contest_id = models.CharField(max_length=7, unique=True, blank=True, null=True)\n contest_password = models.CharField(max_length=6, unique=True, blank=True, null=True)\n questions_answered = models.PositiveSmallIntegerField(default=0)\n num_members = models.PositiveSmallIntegerField(default=0)\n\n def __str__(self):\n return (str(self.name) + ' : ' + str(self.division))\n\n def get_members(self):\n members = User.objects.filter(profile__team=self)\n\n member_names = []\n for member in members:\n member_names.append(member.first_name+' '+member.last_name)\n\n return member_names\n"
},
{
"alpha_fraction": 0.7358490824699402,
"alphanum_fraction": 0.7409948706626892,
"avg_line_length": 28.149999618530273,
"blob_id": "7223cc50e2c55737e10b8a19751c559674e6e0be",
"content_id": "2de09f85c1cd0c5b73285516b181d4af21acf51b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 583,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 20,
"path": "/src/announcements/feeds.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib.syndication.views import Feed\nfrom django.template.defaultfilters import truncatewords\nfrom django.urls import reverse\n\nfrom . import models\n\n\nclass LatestAnnouncementsFeed(Feed):\n title = \"ACM Programming Contest Announcements\"\n link = \"/announcements/\"\n description = \"Latest announcments from The Programming Contest Team.\"\n\n def items(self):\n return models.Announcement.objects.filter(status=1)\n\n def item_title(self, item):\n return item.title\n\n def item_description(self, item):\n return truncatewords(item.content, 30)\n"
},
{
"alpha_fraction": 0.719763994216919,
"alphanum_fraction": 0.719763994216919,
"avg_line_length": 32.900001525878906,
"blob_id": "a88a924a929e8fb54a68dead31d78063ca2b5766",
"content_id": "8440914494d7c9c82e90182ea089de5a51fed4ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 339,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 10,
"path": "/src/announcements/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import feeds\nfrom . import views\n\nurlpatterns = [\n path('', views.AnnouncementListView.as_view(), name='announcements'),\n path(\"feed/rss\", feeds.LatestAnnouncementsFeed(), name=\"announcements_feed\"),\n path('<slug:slug>/', views.AnnouncementDetailView.as_view(), name='announcement_detail'),\n]\n"
},
{
"alpha_fraction": 0.6208978295326233,
"alphanum_fraction": 0.6247393488883972,
"avg_line_length": 36.34016418457031,
"blob_id": "bfacf1040557dfb83214a5a5979d54f92db0669c",
"content_id": "cf9977c3df2c8309939f677650a670e394e288f3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9111,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 244,
"path": "/src/contestadmin/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "import os\n\nfrom django.contrib import messages\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom django.contrib.auth.models import User\nfrom django.db import transaction\nfrom django.http import HttpResponse\nfrom django.utils.encoding import force_text\nfrom django.utils.http import urlsafe_base64_decode\nfrom django.shortcuts import redirect, render\nfrom django.views import View\n\nfrom io import BytesIO\nfrom zipfile import ZipFile\n\nfrom . import forms\nfrom . import tasks\nfrom contestadmin.models import Contest\nfrom contestsuite.settings import MEDIA_ROOT\nfrom manager.models import Course, Faculty, Profile\nfrom register.models import Team\n\n# Create your views here.\n\nclass DownloadExtraCreditFiles(View):\n\n def get(self, request):\n in_memory = BytesIO()\n zip = ZipFile(in_memory, \"a\")\n \n fpath = MEDIA_ROOT + '/ec_files/'\n for fname in os.listdir(fpath):\n zip.write(fpath+fname, fname)\n\n # fix for Linux zip files read in Windows\n for file in zip.filelist:\n file.create_system = 0 \n\n zip.close()\n\n response = HttpResponse(content_type=\"application/zip\")\n response['Content-Disposition'] = \"attachment; filename=all_ec_files.zip\"\n \n in_memory.seek(0) \n response.write(in_memory.read())\n \n return response\n\nclass DownloadTSVFiles(View):\n\n def get(self, request):\n in_memory = BytesIO()\n zip = ZipFile(in_memory, \"a\")\n \n fpath = MEDIA_ROOT + '/contest_files/'\n for fname in os.listdir(fpath):\n zip.write(fpath+fname, fname)\n\n # fix for Linux zip files read in Windows\n for file in zip.filelist:\n file.create_system = 0 \n\n zip.close()\n\n response = HttpResponse(content_type=\"application/zip\")\n response['Content-Disposition'] = \"attachment; filename=dj_tsv_files.zip\"\n \n in_memory.seek(0) \n response.write(in_memory.read())\n \n return response\n\n\nclass EmailFaculty(View):\n\n def get(self, request):\n tasks.email_faculty.delay(request.META['HTTP_HOST'])\n messages.info(request, 'Email faculty task scheduled.', fail_silently=True)\n\n return redirect('admin_dashboard')\n\n\nclass FacultyDashboard(View):\n\n def get(self, request, uidb64):\n try:\n faculty_member = Faculty.objects.get(email__contains=force_text(urlsafe_base64_decode(uidb64)))\n except: #(TypeError, ValueError, OverflowError):\n faculty_member = None\n\n if faculty_member is not None:\n context = {}\n context['first_name'] = faculty_member.first_name\n context['last_name'] = faculty_member.last_name\n context['uid'] = uidb64\n\n return render(request,'contestadmin/faculty_dashboard.html', context)\n \n def download(self, uidb64):\n try:\n faculty_member = force_text(urlsafe_base64_decode(uidb64))\n except: #(TypeError, ValueError, OverflowError):\n faculty_member = None\n\n if faculty_member is not None:\n in_memory = BytesIO()\n zip = ZipFile(in_memory, 'a')\n\n fpath = MEDIA_ROOT + '/ec_files/'\n for fname in os.listdir(fpath):\n if faculty_member in fname:\n zip.write(fpath+fname, fname)\n\n # fix for Linux zip files read in Windows\n for file in zip.filelist:\n file.create_system = 0\n\n zip.close()\n \n response = HttpResponse(content_type=\"application/zip\")\n response['Content-Disposition'] = 'attachment; filename=' + faculty_member + '_ec_files.zip'\n\n in_memory.seek(0)\n response.write(in_memory.read())\n\n return response\n else:\n return HttpResponse('Unable to serve extra credit files. Please try again later or contact the ACM team.')\n\n\nclass GenerateDomJudgeTSV(View):\n\n def get(self, request):\n tasks.generate_contest_files.delay()\n messages.info(request, 'Generate Contest TSVs task scheduled. Refresh page in a few seconds use download link.', fail_silently=True)\n\n return redirect('admin_dashboard')\n\n\nclass GenerateExtraCreditReports(View):\n\n def get(self, request):\n tasks.generate_ec_reports.delay()\n messages.info(request, 'Generate extra credit reports task scheduled. Refresh page in a few seconds use download and email links.', fail_silently=True)\n\n return redirect('admin_dashboard')\n\n\n@staff_member_required\[email protected]\ndef dashboard(request):\n context = {}\n\n if request.method == 'POST':\n walkin_form = forms.GenerateWalkinForm(request.POST)\n file_form = forms.ResultsForm(request.POST, request.FILES)\n checkin_form = forms.CheckinUsersForm(request.POST)\n if walkin_form.is_valid():\n tasks.create_walkin_teams.delay(int(walkin_form.cleaned_data['division']), int(walkin_form.cleaned_data['total'])) \n messages.info(request, 'Create teams task scheduled.', fail_silently=True)\n elif checkin_form.is_valid():\n tasks.check_in_out_users.delay(\n int(checkin_form.cleaned_data['action']))\n messages.info(request, 'Check in/out task scheduled.',\n fail_silently=True)\n elif file_form.is_valid():\n if Contest.objects.all().count() == 0:\n file_form.save()\n tasks.process_contest_results.delay()\n messages.success(\n request, 'Results uploaded.', fail_silently=True)\n else:\n try:\n contest = Contest.objects.all().first()\n except:\n messages.error(\n request, 'Failed to upload results. Please try again.', fail_silently=True)\n else:\n messages.success(\n request, str(file_form.cleaned_data['results']), fail_silently=True)\n contest.results = request.FILES['results']\n contest.save()\n tasks.process_contest_results.delay()\n messages.success(\n request, 'Results uploaded.', fail_silently=True)\n\n return redirect('admin_dashboard')\n else:\n walkin_form = forms.GenerateWalkinForm()\n file_form = forms.ResultsForm()\n checkin_form = forms.CheckinUsersForm()\n \n '''try:\n contest = Contest.objects.all().first().get()\n except:\n context['ec_available'] = False\n else:\n context['ec_available'] = contest.ec_processed'''\n\n '''if Team.objects.exclude(questions_answered=0).count() > 0:\n context['dj_results_processed'] = True\n else:\n context['dj_results_processed'] = False'''\n \n if len(os.listdir(MEDIA_ROOT + '/uploads/')) > 0:\n context['dj_results_processed'] = True\n else:\n context['dj_results_processed'] = False\n\n if len(os.listdir(MEDIA_ROOT + '/ec_files/')) > 0:\n context['ec_files_available'] = True\n else:\n context['ec_files_available'] = False\n\n if len(os.listdir(MEDIA_ROOT + '/contest_files/')) > 0:\n context['dj_files_available'] = True\n else:\n context['dj_files_available'] = False\n \n \n context['users_registered'] = User.objects.all().count()\n context['users_verified'] = User.objects.filter(is_active=True).count()\n context['added_fsu_num'] = Profile.objects.exclude(fsu_num=None).count()\n context['added_fsu_id'] = Profile.objects.exclude(fsu_id=None).count()\n context['added_courses'] = Profile.objects.exclude(courses=None).count()\n\n context['total_teams'] = Team.objects.all().count()\n context['registered_teams'] = Team.objects.exclude(name__contains='Walk-in-').count()\n context['total_walkin'] = Team.objects.filter(name__contains='Walk-in-').count()\n context['walkin_used'] = Team.objects.filter(name__contains='Walk-in-').exclude(num_members=0).count()\n\n context['num_upper_teams'] = Team.objects.filter(division=1).exclude(name__contains='Walk-in-').count()\n context['num_upper_reg_participants'] = Profile.objects.filter(team__division=1).exclude(team__name__contains='Walk-in-').count()\n context['num_upper_walkin_participants'] = Profile.objects.filter(team__division=1).filter(team__name__contains='Walk-in-').count()\n\n context['num_lower_teams'] = Team.objects.filter(division=2).exclude(name__contains='Walk-in-').count()\n context['num_lower_reg_participants'] = Profile.objects.filter(team__division=2).exclude(team__name__contains='Walk-in-').count()\n context['num_lower_walkin_participants'] = Profile.objects.filter(team__division=2).filter(team__name__contains='Walk-in-').count()\n\n context['checkin_form'] = checkin_form\n context['file_form'] = file_form\n context['gen_walkin_form'] = walkin_form\n context['courses'] = Course.objects.all()\n return render(request, 'contestadmin/dashboard.html', context)\n"
},
{
"alpha_fraction": 0.5281767845153809,
"alphanum_fraction": 0.5436463952064514,
"avg_line_length": 31.321428298950195,
"blob_id": "92e213ca3eb26e3565b16a9b38db6eb64a03be29",
"content_id": "6f1837b6c4c1237df7898936735c9fecbbc6ffae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 905,
"license_type": "permissive",
"max_line_length": 114,
"num_lines": 28,
"path": "/src/contestadmin/migrations/0001_initial.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.1 on 2021-10-10 16:36\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Contest',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('contest_date', models.DateField()),\n ('contest_doors', models.TimeField()),\n ('contest_start', models.TimeField()),\n ('contest_freeze', models.TimeField()),\n ('contest_end', models.TimeField()),\n ('contest_awards', models.TimeField()),\n ('results', models.FileField(blank=True, upload_to='uploads/')),\n ('ec_processed', models.BooleanField(default=False)),\n ],\n ),\n ]\n"
},
{
"alpha_fraction": 0.5917636752128601,
"alphanum_fraction": 0.5971351861953735,
"avg_line_length": 33.921875,
"blob_id": "ad3c725ef7e29a8266b70079833d1852e1b827f3",
"content_id": "f9e249d14684c41e408d36c5925d9f1fe2154cb5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 2234,
"license_type": "permissive",
"max_line_length": 184,
"num_lines": 64,
"path": "/src/manager/templates/manager/team_form.html",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "{% extends 'core/base.html' %}\n{% load static %}\n\n{% block title %}Manage Team{% endblock %}\n\n{% block content %}\n\t<h1 class=\"text-center\">Manage Team</h1>\n\t<!-- Messages -->\n\t{% include 'core/messages.html' %}\n\t<!-- End Messages -->\n\t<div class=\"row justify-content-center\">\n\t\t<div class=\"col col-sm-4\">\n\t\t\t<form method=\"post\">\n\t\t\t\t{% csrf_token %}\n\t\t\t\t{% for field in form %}\n\t\t\t\t\t<p>\n\t\t\t\t\t\t<h5 class=\"mb-0\">{{ field.label_tag }}</h5>\n\t\t\t\t\t\t{{ field }}<br>\n\t\t\t\t\t\t<small class=\"form-text text-muted\">\n\t\t\t\t\t\t\t{% for error in field.errors %}\n\t\t\t\t\t\t\t<p class=\"text-monospace font-weight-lighter text-danger mb-0\">-{{ error }}</p>\n\t\t\t\t\t\t\t{% endfor %}\n\t\t\t\t\t\t\t{% if field.help_text %}\n\t\t\t\t\t\t\t\t{{ field.help_text }}\n\t\t\t\t\t\t\t{% endif %}\n\t\t\t\t\t\t</small>\n\t\t\t\t\t</p>\n\t\t\t\t{% endfor %}\n\t\t\t\t<button class=\"btn btn-primary btn-sm\" type=\"submit\">Save</button>\n\t\t\t\t{% if request.user.profile.team.num_members > 1 %}\n\t\t\t\t\t<button type=\"button\" class=\"btn btn-danger btn-sm\" data-toggle=\"modal\" data-target=\"#removeTeammateModal\">Remove Members\n\t\t\t\t\t</button>\n\t\t\t\t{% endif %}\n\t\t\t\t<a class=\"btn btn-secondary btn-sm\" href=\"{% url 'manage_dashboard' %}\" role=\"button\">Cancel</a>\n\t\t\t</form>\n\t\t</div>\n\t</div>\n\n\t<!-- Modal -->\n\t<div class=\"modal fade\" id=\"removeTeammateModal\" tabindex=\"-1\" role=\"dialog\" aria-labelledby=\"removeTeammateModalCenterTitle\"\n\t\taria-hidden=\"true\">\n\t\t<div class=\"modal-dialog modal-dialog-centered\" role=\"document\">\n\t\t\t<div class=\"modal-content\">\n\t\t\t\t<div class=\"modal-header\">\n\t\t\t\t\t<h5 class=\"modal-title\" id=\"removeTeammateModalLongTitle\">Team members</h5>\n\t\t\t\t\t<button type=\"button\" class=\"close\" data-dismiss=\"modal\" aria-label=\"Close\">\n\t\t\t\t\t\t<span aria-hidden=\"true\">×</span>\n\t\t\t\t\t</button>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"modal-body\">\n\t\t\t\t\t{% for member in team_members %}\n\t\t\t\t\t\t<span>{{ member.first_name }} {{member.last_name }}</span><a class=\"btn btn-danger btn-sm ml-3\" href=\"{% url 'remove_member' username=member.username %}\" role=\"button\">Remove</a>\n\t\t\t\t\t\t{% if not forloop.last %}\n\t\t\t\t\t\t\t<br><br>\n\t\t\t\t\t\t{% endif %}\n\t\t\t\t\t{% endfor %}\n\t\t\t\t</div>\n\t\t\t\t<div class=\"modal-footer\">\n\t\t\t\t\t<button type=\"button\" class=\"btn btn-secondary\" data-dismiss=\"modal\">Close</button>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t</div>\n\t</div>\t\n{% endblock %}"
},
{
"alpha_fraction": 0.567986249923706,
"alphanum_fraction": 0.5783132314682007,
"avg_line_length": 29.63157844543457,
"blob_id": "52e8db38a8559e34f9d98caa15311425956de407",
"content_id": "556d92039527eab106e807ef46283ab392934984",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 581,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 19,
"path": "/deploy/prod/judgehosts/docker-compose.yaml",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "version: '3.8'\nservices:\n judgehost:\n image: 'acmfsu/judgehost:7.3.3'\n privileged: true\n environment:\n - CONTAINER_TIMEZONE=America/New_York\n - JUDGEDAEMON_USERNAME=judgehost\n - JUDGEDAEMON_PASSWORD=jdpw\n - DOMSERVER_BASEURL=http://domserver/\n - DOMJUDGE_CREATE_WRITABLE_TEMP_DIR=1\n volumes: \n - '/sys/fs/cgroup:/sys/fs/cgroup:ro'\n networks:\n - domserver-jh-backend\nnetworks:\n domserver-jh-backend:\n external: true\n name: domserver-jh-backend"
},
{
"alpha_fraction": 0.7319587469100952,
"alphanum_fraction": 0.7319587469100952,
"avg_line_length": 13,
"blob_id": "e9120e265b608ecc28d764bf7a7d33a8f6adb36d",
"content_id": "0df4ca93fe13b922be8d47170afea52e9a540b77",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 97,
"license_type": "permissive",
"max_line_length": 37,
"num_lines": 7,
"path": "/deploy/dev/scripts/celery/worker/entrypoint.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nset -o errexit\nset -o pipefail\nset -o nounset\n\ncelery -A contestsuite worker -l INFO"
},
{
"alpha_fraction": 0.7288888692855835,
"alphanum_fraction": 0.7333333492279053,
"avg_line_length": 31.178571701049805,
"blob_id": "298470fb3e50afe0b8e85387409026dadb00e7cb",
"content_id": "1e7ecfb95798112e261c0f5c2dda5311c345a304",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 900,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 28,
"path": "/src/register/tasks.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User\nfrom django.core.mail import send_mail\nfrom django.template.loader import render_to_string\nfrom django.utils.encoding import force_bytes\nfrom django.utils.http import urlsafe_base64_encode\n\nfrom celery import shared_task\nfrom celery.utils.log import get_task_logger\n\nfrom .tokens import account_activation_token\n\nlogger = get_task_logger(__name__)\n\n\n@shared_task\ndef send_validation_email(domain, username):\n user = User.objects.get(username=username)\n subject = 'Activate Your Programming Contest Account'\n message = render_to_string('register/account_activation_email.html', {\n 'user': user,\n 'domain': domain,\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n 'token': account_activation_token.make_token(user),\n })\n\n user.email_user(subject, message)\n\n logger.info('Validation sent to %s' % user.email)"
},
{
"alpha_fraction": 0.699999988079071,
"alphanum_fraction": 0.703529417514801,
"avg_line_length": 27.33333396911621,
"blob_id": "7315a52a246f5a4e1e2d24b73753b28075b531df",
"content_id": "5ec1f938940956a1a57207f3ed5ea57364233ee0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1700,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 60,
"path": "/src/core/views.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\n\n\nfrom announcements.models import Announcement\nfrom contestadmin.models import Contest\nfrom contestsuite.settings import CACHE_TIMEOUT\nfrom register.models import Team\nfrom manager.models import Course, Profile\n\n# Create your views here.\n\n# Display index page\ndef index(request):\n context = {}\n\n context['cache_timeout'] = CACHE_TIMEOUT\n try:\n context['contest'] = Contest.objects.first()\n except:\n context['contest'] = None\n \n context['announcements'] = (Announcement.objects.filter(status=1))[:1]\n context['courses'] = Course.objects.all() \n \n\n return render(request, 'core/index.html', context)\n\n\n# Display contact us page\ndef contact(request):\n return render(request, 'core/contact.html')\n\n\n# Display faq page\ndef faq(request):\n return render(request, 'core/faq.html')\n\n\n# Display teams page\ndef teams(request):\n context = {}\n\n teams_set = Team.objects.all()\n participants_set = Profile.objects.all()\n\n context['cache_timeout'] = CACHE_TIMEOUT\n\n # Aggregate upper division team and participant info\n upper_teams_set = teams_set.filter(division=1)\n context['upper_teams'] = upper_teams_set\n context['num_upper_teams'] = upper_teams_set.count()\n context['num_upper_participants'] = participants_set.filter(team__division=1).count()\n\n # Aggregate division team and participant info\n lower_teams_set = teams_set.filter(division=2)\n context['lower_teams'] = lower_teams_set\n context['num_lower_teams'] = lower_teams_set.count()\n context['num_lower_participants'] = participants_set.filter(team__division=2).count()\n\n return render(request, 'core/teams.html', context)\n"
},
{
"alpha_fraction": 0.7233009934425354,
"alphanum_fraction": 0.7815533876419067,
"avg_line_length": 19.700000762939453,
"blob_id": "200462608551af3c059af695b78f8a362fbcd7bd",
"content_id": "274e7226334e58839f874e8777902407649077e4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 206,
"license_type": "permissive",
"max_line_length": 41,
"num_lines": 10,
"path": "/deploy/dev/scripts/django/start.sh",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nset -o errexit\nset -o pipefail\nset -o nounset\n\npython3 manage.py migrate --noinput\npython3 manage.py collectstatic --noinput\npython3 manage.py initadmin\npython3 manage.py runserver 0.0.0.0:8000"
},
{
"alpha_fraction": 0.7327766418457031,
"alphanum_fraction": 0.736952006816864,
"avg_line_length": 62.86666488647461,
"blob_id": "66ef142ce2ea47e7d97ffcd7dea53fbe57d42050",
"content_id": "29cbb56384dea30976cf8cf18058535c21fa742d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 958,
"license_type": "permissive",
"max_line_length": 121,
"num_lines": 15,
"path": "/src/contestadmin/urls.py",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "from django.contrib.admin.views.decorators import staff_member_required\nfrom django.urls import path\n\nfrom . import views\n\nurlpatterns = [\n path('', views.dashboard, name='admin_dashboard'),\n path('dj_tsv/download', staff_member_required(views.DownloadTSVFiles.as_view()), name='download_dj_files'),\n path('dj_tsv/generate', staff_member_required(views.GenerateDomJudgeTSV.as_view()), name='gen_dj_files'),\n path('ec_files/download', staff_member_required(views.DownloadExtraCreditFiles.as_view()), name='download_ec_files'),\n path('ec_files/email_faculty', staff_member_required(views.EmailFaculty.as_view()), name='email_faculty'),\n path('ec_files/generate', staff_member_required(views.GenerateExtraCreditReports.as_view()), name='gen_ec_reports'),\n path('faculty/<uidb64>/', views.FacultyDashboard.as_view(), name='fac_ec_dashboard'),\n path('faculty/<uidb64>/download', views.FacultyDashboard.download, name='fac_ec_files_dl'),\n]\n"
},
{
"alpha_fraction": 0.4798046946525574,
"alphanum_fraction": 0.5024411678314209,
"avg_line_length": 25.83333396911621,
"blob_id": "84da50ab8de4f3224091c5db05ec2c1f9f0f9298",
"content_id": "ff2a0951d3f155e62da0f193e8949534a22a8ddd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 2253,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 84,
"path": "/deploy/dev/docker-compose.yaml",
"repo_name": "FSU-ACM/Programming-Contest-Suite-v2",
"src_encoding": "UTF-8",
"text": "version: '3.8'\n\nservices:\n django:\n build:\n context: ../..\n dockerfile: Dockerfile-dev\n image: contestsuite:dev \n # '/start' is the shell script used to run the service\n #command: /app/start\n ports:\n - 8000:8000\n environment:\n - DEBUG=True\n - SECRET_KEY=86@j2=z!=&1r_hoqboog1#*mb$jx=9mf0uw#hrs@lw&7m34sqz\n\n - CACHE_LOCATION=redis://redis:6379/0\n - CELERY_BACKEND=redis://redis:6379/1\n - CELERY_BROKER=amqp://rabbitmq:5672\n\n - SQL_HOST=mariadb\n - SQL_PORT=3306\n - SQL_DATABASE=contestsuite\n - SQL_USER=dev\n - SQL_PASSWORD=seminoles\n volumes:\n - ../../src:/app\n networks:\n - contestsuite-dev\n depends_on:\n - redis\n - rabbitmq\n - mariadb\n - celery_worker\n celery_worker:\n image: contestsuite:dev\n entrypoint: /app/entrypoint-celery\n environment:\n - DEBUG=True\n - SECRET_KEY=86@j2=z!=&1r_hoqboog1#*mb$jx=9mf0uw#hrs@lw&7m34sqz\n\n - CACHE_LOCATION=redis://redis:6379/0\n - CELERY_BACKEND=redis://redis:6379/1\n - CELERY_BROKER=amqp://rabbitmq:5672\n\n - MAIL_BACKEND=django.core.mail.backends.console.EmailBackend\n\n - SQL_HOST=mariadb\n - SQL_PORT=3306\n - SQL_DATABASE=contestsuite\n - SQL_USER=dev\n - SQL_PASSWORD=seminoles\n volumes:\n - ../../src/media:/app/media\n networks:\n - contestsuite-dev\n depends_on:\n - redis\n - rabbitmq\n - mariadb\n mariadb:\n image: mariadb:10.5-focal\n volumes:\n - django_db_data:/var/lib/mysql\n environment:\n - MARIADB_ROOT_PASSWORD=rootpw\n - MARIADB_DATABASE=contestsuite\n - MARIADB_USER=dev\n - MARIADB_PASSWORD=seminoles\n networks:\n - contestsuite-dev\n redis:\n image: redis:5-buster\n networks:\n - contestsuite-dev\n rabbitmq:\n image: rabbitmq:3\n networks:\n - contestsuite-dev \nvolumes:\n django_db_data:\nnetworks:\n contestsuite-dev:\n name: contestsuite-dev"
}
] | 46 |
davidcheon/lineupdate | https://github.com/davidcheon/lineupdate | 8d571ca1d325bd711086157f02b89a88100caa47 | dc4b01202674ef095222c239981b25fa3f64a74d | 5428a76be9980b5338a34b68775b8f78e7d1109d | refs/heads/master | 2016-08-11T13:06:07.104604 | 2015-12-10T13:51:30 | 2015-12-10T13:51:30 | 47,073,863 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6329530477523804,
"alphanum_fraction": 0.6667113900184631,
"avg_line_length": 29.34623146057129,
"blob_id": "30b9ce8060d71bdcacdea1e17b68f2c196e492af",
"content_id": "ed48d1883b13e0fded7c6d8715b2fc7773603b51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14900,
"license_type": "no_license",
"max_line_length": 296,
"num_lines": 491,
"path": "/sendcontacts.py",
"repo_name": "davidcheon/lineupdate",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n#!-*- coding:utf-8 -*-\nimport time\nimport line\nfrom line.curve import ttypes\nimport threading\nimport os\ndef checkislogin(arg):\n\tif arg:\n\t\tdef _func(func):\n\t\t\tdef wrapper(*agr1,**agr2):\n\t\t\t\treturn func(*arg1,**arg2)\n\telse:\n\t\tprint 'login first'\nclass mycontactsender(object):\n\tdef __init__(self,id,pwd,device):\n\t\tself.id=id\n\t\tself.device=device\n\t\tself.pwd=pwd\n\t\tself.groupid='c606c0578c586e81a38fa9c1a2c547f08'\n\t\tself.status=False\n\t\tself.lastsend=0\n\t\tself.mids=[]\n\t\tself.values={}\n\t\tself.authtoken=None\n\t\tself.stop=0\n\t\tself.deletecounts=None\n\t\tself.blocked={}\n\t\tself.errorcount=0\n\t\tself._login()\n\t\tself.savemids()\n\tdef savemids(self):\n\t\tif self.status:\n\t\t\tfilename=os.path.join('mids','mids-%s.txt'%(self.device.split(':')[1]))\n\t\t\tline=0\n\t\t\tif os.path.exists(filename):\n\t\t\t\twith open(filename,'r') as f:\n\t\t\t\t\tline=len(f.readlines())\n\t\t\t\tif line<len(self.contacts)-1:\n\t\t\t\t\twith open(filename,'wb+') as f:\n\t\t\t\t\t\tfor contact in self.contacts:\n\t\t\t\t\t\t\t#LINE's id\n\t\t\t\t\t\t\tif contact.id!='u085311ecd9e3e3d74ae4c9f5437cbcb5':\n\t\t\t\t\t\t\t\tf.writelines('%s\\n'%contact.id)\n\t\t\t\t\tprint 'Mids updated <%s> succeed.'%(filename)\n\t\t\telse:\n\t\t\t\t\twith open(filename,'wb+') as f:\n\t\t\t\t\t\tfor contact in self.contacts:\n\t\t\t\t\t\t\t#LINE's id\n\t\t\t\t\t\t\tif contact.id!='u085311ecd9e3e3d74ae4c9f5437cbcb5':\n\t\t\t\t\t\t\t\tf.writelines('%s\\n'%contact.id)\n\t\t\t\t\tprint 'Mids saved in <%s> succeed.'%(filename)\t\n\t\telse:\n\t\t\tprint 'login first'\t\t\n\t\t\t\n\tdef _login(self):\n\t\ttry:\n\t\t\tif self.authtoken is None:\n\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\tself.authtoken=self.client.authToken\n\t\t\telse:\n\t\t\t\tself.client=line.LineClient(authToken=self.authtoken)\n\t\t\tself.status=True\n\t\t\tself.getcontacts()\n\t\texcept Exception,e:\n\t\t\tprint 'login failed'\n\t\t\tself.status=False\n\tdef unblockcontacts(self):\n\t\tif self.status:\n\t\t\tcount=1\n\t\t\tfor id,name in self.blocked.items():\n\t\t\t\tself.client._unblockContact(id)\n\t\t\t\tprint '%d:<%s> unblock succeed.'%(count,name)\n\t\t\t\tcount+=1\n\t\t\t\ttime.sleep(3)\n\tdef getcontacts(self):\n\t\tif self.status:\n\t\t\tself.contacts=self.client.contacts\n\t\t\tfor contact in self.contacts:\n\t\t\t\tself.values[contact.id]=contact.name\n\t\t\t\tself.mids.append(contact.id)\n\t\t\tself.mids.sort()\n\tdef sendaction(self,start=None,end=None):\n\t\tif start is not None:\n\t\t\tself.lastsend=start-1\n\t\telse:\n\t\t\tstart=self.lastsend+1\n\t\tif end is None:\n\t\t\tif self.stop!=0:\n\t\t\t\tend=self.stop+1\n\t\t\telse:\n\t\t\t\tself.stop=len(self.contacts)-1\n\t\t\t\tend=self.stop+1\n\t\t\tcou=start-1\n\t\telse:\n\t\t\tself.stop=end-1\n\t\t\tcou=start\n\t\tif start <=0 or start>end or end>len(self.contacts):\n\t\t\tprint 'invalid start or end'\n\t\telse:\n\t\t\tif self.deletecounts is None:\n\t\t\t\tself.deletecounts=end-start+1\n\t\t\tif self.errorcount>0:self._login()\n\t\t\tif self.status:\n\t\t\t\tnum=0\n\t\t\t\tself.errorcount=0\n\t\t\t\tfor id in self.mids[self.lastsend:end+1]:\t\t\t\t\n\t\t\t\t\ttry:\n\t\t\t\t\t\tname=self.values[id]\n\t\t\t\t\t\t#LINE's id 1448745488229\n\t\t\t\t\t\tif id!='u085311ecd9e3e3d74ae4c9f5437cbcb5' or name!='LINE':\n\t\t\t\t\t\t\tmsg=ttypes.Message\t\t(contentType=13,hasContent=False,text=None,to=self.groupid,contentPreview=None,location=None,deliveredTime=int(time.time()*1000),createdTime=int(time.time()*1000),_from='ua112440ce9f59c46054b39ac892a8cc7', contentMetadata={'displayName': name, 'mid': id, 'seq': '13'}, \ntoType=2)\n\t\t\t\t\t\t\tself.client.groups[0].sendContactMessage(msg)\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tself.lastsend=cou\n\t\t\t\t\t\t\tself.stop=end-1\n#\t\t\t\t\t\t\tself.client._blockContact(id)\n\t\t\t\t\t\t\tself.blocked[id]=name\n\t\t\t\t\t\t\tprint '%d:<%s> sent and block succeed.'%(cou,name)\n\t\t\t\t\t\t\tcou+=1\n\t\t\t\t\t\t\tnum+=1\n\t\t\t\t\t\t\ttime.sleep(4)\n\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\tself.errorcount+=1\n\t\t\t\t\t\tself.lastsend=cou\n\t\t\t\t\t\tself.stop=end-1\n\t\t\t\t\t\tprint '%s send <%d> items failed'%(name,cou)\n\t\t\t\t\t\tprint 'Total send <%d> items succeed'%num\n\t\t\t\t\t\tbreak\n\t\t\t\tprint 'Total send <%d> items succeed'%(self.deletecounts)\n\t\t\t\tself.deleteblockedfriends(self.deletecounts)\n\t\n\tdef deleteblockedfriends(self,num):\n\t\tstatus=os.system('adb -s %s shell \"su -c \\'[ ! -d /vendor/test/shieldfriends/data ] && mkdir -p /vendor/test/shieldfriends/data && chmod -R 777 /vendor\\'\"'%(self.device))\n\t\tif status==0:\n\t\t\tstatus=os.system('adb -s %s push %s /vendor/test/shieldfriends/'%(self.device,os.path.join('D:\\\\','david','test','shieldfriends','shieldanddeletefriends.sh')))\n\t\t\tif status==0:\n\t\t\t\tstatus=os.system('adb -s %s shell sh /vendor/test/shieldfriends/shieldanddeletefriends.sh %d'%(self.device,num))\n\t\t\t\tif status==0:\n\t\t\t\t\tprint 'delete <%d> items succeed'%num\n\t\t\t\telse:\n\t\t\t\t\tprint 'delete action failed'\n\t\t\telse:\n\t\t\t\tprint 'push shell failed'\n\t\telse:\n\t\t\tprint 'mkdir failed'\nclass mycontactreceiver(object):\n\tdef __init__(self,id,pwd,counts=10000):\n\t\tself.id=id\n\t\tself.pwd=pwd\n\t\tself.status=False\n\t\tself.groupid='c606c0578c586e81a38fa9c1a2c547f08'\n\t\tself.counts=counts\n\t\tself.lastsend=0\n\t\tself.lastidindex=0\n\t\tself.errorcounts=0\n\t\tself.fromusertoken=None\n\t\tself.tousertoken=None\n\t\tself.totalsend=None\n\t\tself._login()\n\tdef _login(self):\n\t\ttry:\n\t\t\tif self.fromusertoken is None:\n\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\tself.fromusertoken=self.client.authToken\n\t\t\telse:\n\t\t\t\tself.client=line.LineClient(authToken=self.fromusertoken)\n\t\t\tself.status=True\n\t\t\tself._getcontactids()\n\t\t\tself._getgroupreceived()\n#\t\t\tself.getgroupreceivedupdate()\n\t\texcept Exception,e:\n\t\t\tself.status=False\n\t\t\tprint 'login failed'\t\n\tdef _getcontactids(self):\n\t\tself.contactids=set([n.id for n in self.client.contacts])\n\t\t#LINE's id #### acang's id ## guanhao1's id\n\t\tdeletecontactids=set(['u085311ecd9e3e3d74ae4c9f5437cbcb5','u8cd5f4f7fefc80ec159f1daee05dec84','ua112440ce9f59c46054b39ac892a8cc7'])\n\t\tself.contactids=list(self.contactids-deletecontactids)\n\tdef _getgroupreceived(self):\n\t\tself.messages=[]\n\t\tif self.status:\n\t\t\tgroup=self.client.getGroupById(self.groupid)\n\t\t\ttmps=group.getRecentMessages(count=self.counts)\n\t\t\tfor temp in tmps:\n\t\t\t\ttry:\n\t\t\t\t\tmsg=temp._message\n\t\t\t\t\tif not msg.hasContent:\n\t\t\t\t\t\tself.messages.append(msg)\n\t\t\t\texcept Exception,e:\n\t\t\t\t\tpass\n\t\t\tself.messages=list(set(self.messages))\n\t\t\tself.dicts={}\n\t\t\tfor message in self.messages:\n\t\t\t\ttry:\n\t\t\t\t\tif self.dicts.has_key(message.contentMetadata['mid']):\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tself.dicts[message.contentMetadata['mid']]=message\n\t\t\t\texcept Exception,e:\n\t\t\t\t\tpass\n\t\t\tself.messages=self.dicts.values()\n\t\t\tprint 'You got %d items'%len(self.messages)\n\tdef getgroupreceivedupdate(self):\n\t\tself.messages=[]\n\t\tif self.status:\n\t\t\tfor poll in self.client.longPoll(count=self.count):\n\t\t\t\treceiver=poll[1]\n\t\t\t\tif receiver.id==self.groupid:\n\t\t\t\t\tmsg=poll[2]._message\n\t\t\t\t\tif not msg.hasContent:\n\t\t\t\t\t\tself.messages.append(msg)\n\tdef sendcontactstofriends(self,start=None,stop=None):\n\t\tstop= len(self.messages) if stop is None else stop\n\t\tif start is None:\n\t\t\tstart=self.lastsend+1\n\t\telse:\n\t\t\tself.lastsend=start-1\n\t\tif start<=0 or start>stop or stop>len(self.messages):\n\t\t\tprint 'invalid start or stop'\n\t\telse:\n#\t\t\ttry:\n#\t\t\t\tif self.tousertoken is None:\n#\t\t\t\t\ttouser=line.LineClient(touserid,touserpwd)\n#\t\t\t\t\tself.tousertoken=touser.authToken\n#\t\t\t\telse:\n#\t\t\t\t\ttouser=line.LineClient(authToken=self.tousertoken)\n#\t\t\texcept Exception,e:\n#\t\t\t\tprint 'login failed'\n#\t\t\telse:\n#\t\t\t\ttousercontacts=touser.contacts\n#\t\t\t\ttouserids=[t.id for t in tousercontacts]\t\t\t\n\t\t\t\tto=self.contactids\n\t\t\t\tif self.errorcounts>0:\n\t\t\t\t\tif self.fromusertoken is None:\n\t\t\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\t\t\tself.fromusertoken=self.client.authToken\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.client=line.LineClient(authToken=self.fromusertoken)\n\t\t\t\tself.lastsend=start-1 if self.lastsend==0 else self.lastsend\n\t\t\t\tif self.status and self.messages[self.lastsend:stop]!=[]:\n\t\t\t\t\tfor i,id in enumerate(to[self.lastidindex:]):\n\t\t\t\t\t\tiii=2 if self.lastidindex==0 else 0\n\t\t\t\t\t\t#LINE's id\n\t\t\t\t\t\tif id!='u085311ecd9e3e3d74ae4c9f5437cbcb5' and i==iii:\n\t\t\t\t\t\t\tname=self.client.getContactById(id).name\n\t\t\t\t\t\t\tself.errorcounts=0\n\t\t\t\t\t\t\tco=self.lastsend+1\n\t\t\t\t\t\t\tprint '='*10,stop\n\t\t\t\t\t\t\tfor msg in self.messages[self.lastsend:stop]:\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tmsgid=msg.contentMetadata['mid']\n\t\t\t\t\t\t\t\t\tprint '-'*10\n\t\t\t\t\t\t\t\t\tmsg.to=id\n\t\t\t\t\t\t\t\t\t#chuangjianqunzu's id\n\t\t\t\t\t\t\t\t\tmsg._from='u0a51561594a2a774c0c64b8501f308b7'\n\t\t\t\t\t\t\t\t\tmsg.toType=0\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tself.client.groups[0].sendContactMessage(msg)\n\t\t\t\t\t\t\t\t\tprint name,co\n\t\t\t\t\t\t\t\t\tco+=1\n\t\t\t\t\t\t\t\t\ttime.sleep(5)\n\t\t\t\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\t\t\t\tprint '%s error occured:%s'%(name,str(e))\n\t\t\t\t\t\t\t\t\tself.errorcounts+=1\n\t\t\t\t\t\t\t\t\tself.lastsend=co-1\n\t\t\t\t\t\t\t\t\tself.lastidindex=i\n\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\n\t\t\t\t\t\t\tprint '%s send <%d> items succeed.'%(name,(co-start))\n\t\t\t\t\t\t\tif self.errorcounts>0:break\n\tdef sendcontactstofriendsupdate(self,touserid,touserpwd,start=None,stop=None):\n\t\tstop= len(self.messages)-1 if stop is None else stop\n\t\tif start is None:\n\t\t\tstart=self.lastsend+1\n\t\telse:\n\t\t\tself.lastsend=start-1\n\t\tif start<=0 or start>stop or stop>len(self.messages):\n\t\t\tprint 'invalid start or stop'\n\t\telse:\n\t\t\ttry:\n\t\t\t\tif self.tousertoken is None:\n\t\t\t\t\ttouser=line.LineClient(touserid,touserpwd)\n\t\t\t\t\tself.tousertoken=touser.authToken\n\t\t\t\telse:\n\t\t\t\t\ttouser=line.LineClient(authToken=self.tousertoken)\n\t\t\texcept Exception,e:\n\t\t\t\tprint 'touser login failed'\n\t\t\telse:\n\t\t\t\ttousercontacts=touser.contacts\n\t\t\t\ttouserids=[t.id for t in tousercontacts]\t\t\t\n\t\t\t\tto=self.contactids\n\t\t\t\tif self.fromusertoken is None:\n\t\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\t\tself.fromusertoken=self.client.authToken\n\t\t\t\telse:\n\t\t\t\t\tself.client=line.LineClient(authToken=self.fromusertoken)\n#\t\t\t\tself.lastsend=start-1 if self.lastsend==0 else self.lastsend\n\t\t\t\tif self.status and self.messages[self.lastsend:stop+1]!=[]:\n#\t\t\t\t\tself.getgroupreceived()\n\t\t\t\t\tfor i,id in enumerate(to[self.lastidindex:]):\n\t\t\t\t\t\tiii=2 if self.lastidindex==0 else 0\n\t\t\t\t\t\t#LINE's id\n\t\t\t\t\t\tif id!='u085311ecd9e3e3d74ae4c9f5437cbcb5' and i==iii:\n\t\t\t\t\t\t\tname=self.client.getContactById(id).name\n\t\t\t\t\t\t\tself.errorcounts=0\n#\t\t\t\t\t\t\tco=self.lastsend+1\n\t\t\t\t\t\t\tco=start\n\t\t\t\t\t\t\tnum=0\n\t\t\t\t\t\t\tfor msg in self.messages[self.lastsend:stop+1]:\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tmsgid=msg.contentMetadata['mid']\n\t\t\t\t\t\t\t\t\tif msgid not in touserids:\n\t\t\t\t\t\t\t\t\t\tmsg.to=id\n\t\t\t\t\t\t\t\t\t\t#chuangjianqunzu's id\n\t\t\t\t\t\t\t\t\t\tmsg._from='u0a51561594a2a774c0c64b8501f308b7'\n\t\t\t\t\t\t\t\t\t\tmsg.toType=0\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\tself.client.groups[0].sendContactMessage(msg)\n\t\t\t\t\t\t\t\t\t\tprint '%d:<%s> send succeed.'%(co,name)\n\t\t\t\t\t\t\t\t\t\tco+=1\n\t\t\t\t\t\t\t\t\t\tnum+=1\n\t\t\t\t\t\t\t\t\t\tself.lastidindex=i\n\t\t\t\t\t\t\t\t\t\ttime.sleep(5)\n\t\t\t\t\t\t\t\t\tself.lastidindex=i\n\t\t\t\t\t\t\t\t\tself.lastsend+=1\n\t\t\t\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\t\t\t\tprint '%s error occured:%s'%(name,str(e))\n\t\t\t\t\t\t\t\t\tself.errorcounts+=1\n#\t\t\t\t\t\t\t\t\tself.lastsend=co-1\n\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\n\t\t\t\t\t\t\tprint '%s send <%d> items succeed.'%(name,num)\n\t\t\t\t\t\t\tif self.errorcounts>0:break\n\tdef sendcontactstofriendsupdate2(self,start=None,stop=None):\n\t\tstop= len(self.messages) if stop is None else stop\n\t\tif start is None:\n\t\t\tstart=self.lastsend+1\n\t\telse:\n\t\t\tself.lastsend=start-1\n\t\tif start<=0 or start>stop or stop>len(self.messages):\n\t\t\tprint 'invalid start or stop'\n\t\telse:\n\n\t\t\tif self.status and self.messages[self.lastsend:stop]!=[]:\n\t\t\t\tif self.totalsend is None:\n\t\t\t\t\tself.totalsend=stop-start+1\n\t\t\t\tco=start\n\t\t\t\tname='<'\n\t\t\t\tfor cont in self.contactids:\n\t\t\t\t\tid=cont.id\n\t\t\t\t\tcont=self.client.getContactById(id)\n\t\t\t\t\tname+=cont.name+','\n\t\t\t\tname+='>'\n\t\t\t\tfor msg in self.messages[self.lastsend:stop]:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tmsgid=msg.contentMetadata['mid']\n\t\t\t\t\t\t#chuangjianqunzu's id\n\t\t\t\t\t\tmsg._from='u0a51561594a2a774c0c64b8501f308b7'\n\t\t\t\t\t\tmsg.toType=0\n\t\t\t\t\t\tfor id in self.contactids:\n\t\t\t\t\t\t\tmsg.to=id\n\t\t\t\t\t\t\tcontact=self.client.getContactById(id)\n\t\t\t\t\t\t\tcontact.sendContactMessage(msg)\n\t\t\t\t\t\tprint '%d:<%s> send succeed.'%(co,name)\n\t\t\t\t\t\tco+=1\n\t\t\t\t\t\ttime.sleep(5)\n\t\t\t\t\t\tself.lastsend+=1\n\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\tprint 'error occured:%s'%(str(e))\n\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\n\t\t\t\tprint 'Total send <%d> items succeed.'%(self.totalsend)\nclass myaddfriendsbymid(object):\n\tdef __init__(self,id,pwd):\n\t\tself.id=id\n\t\tself.pwd=pwd\n\t\tself.status=False\n\t\tself._login()\n\tdef _login(self):\n\t\ttry:\n\t\t\tif not self.status:\n\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\tself.status=True\n\t\t\t\tself._getreceivedmids()\n\t\texcept Exception,e:\n\t\t\tself.status=False\n\tdef _getreceivedmids(self):\n\t\tif self.status:\n\t\t\t#id=guanhao's id\n\t\t\tfromuser=self.client.getContactById('ua112440ce9f59c46054b39ac892a8cc7')\n\t\t\tif fromuser is not None:\n\t\t\t\tlinemessages=fromuser.getRecentMessages(count=100000)\n\t\t\t\tself.mids=[]\n\t\t\t\tfor linemessage in linemessages:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tif isinstance(linemessage,line.LineMessage):\n\t\t\t\t\t\t\tmid=linemessage._message.contentMetadata['mid']\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tself.mids.append(mid)\t\n\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tself.mids=None\n\tdef _getreceivedmidsupdate(self):\n\t\tif self.status:\n\t\t\t#id=guanhao's id\n\t\t\tself.mids=[]\n\t\t\tfor pol in self.client.longPoll(count=100000):\n\t\t\t\tsender=pol[0]\n\t\t\t\tmsg=pol[2]\n\t\t\t\tif sender.id=='ua112440ce9f59c46054b39ac892a8cc7':\n\t\t\t\t\ttry:\n\t\t\t\t\t\tmid=msg._message.contentMetadata['mid']\n\t\t\t\t\t\tself.mids.append(mid)\n\t\t\t\t\texcept Exception,e:\n\t\t\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tself.mids=None\n\t\t\t\n\tdef addfriendsbymids(self):\n\t\tif self.status and self.mids is not None and self.mids!=[]:\n\t\t\tflag=True\n\t\t\tfor mid in self.mids:\n\t\t\t\ttry:\n\t\t\t\t\tself.client._findAndAddContactsByMid(mid)\n\t\t\t\t\tprint 'add <%s> succeed'%mid\n#\t\t\t\t\ttime.sleep(5)\n\t\t\t\texcept Exception,e:\n\t\t\t\t\tflag=False\n\t\t\t\t\tbreak\n\t\t\tprint 'succeed' if flag else 'failed'\n\t\telse:\n\t\t\tprint 'login first or no mids'\n\t\t\t\t\t\n\nclass myaddfriendsbymidupdate(object):\n\tdef __init__(self,id,pwd):\n\t\tself.id=id\n\t\tself.pwd=pwd\n\t\tself.status=False\n\t\tself.sender='u0a51561594a2a774c0c64b8501f308b7'\n\t\tself.authtoken=None\n\t\tself._login()\n\tdef _login(self):\n\t\ttry:\n#\t\t\tif not self.status:\n\t\t\tif self.authtoken is None:\n\t\t\t\tself.client=line.LineClient(self.id,self.pwd)\n\t\t\t\tself.authtoken=self.client.authToken\n\t\t\telse:\n\t\t\t\tself.client=line.LineClient(authToken=self.authtoken)\n\t\t\tself.contactids=[contact.id for contact in self.client.contacts]\n\t\t\tself.status=True\n\t\t\tself._getreceivedmids()\n\t\texcept Exception,e:\n\t\t\tprint 'login failed:%s'%str(e)\n\tdef getaddingcontactscounts(self):\n\t\treturn len(self.mids)\n\tdef _getreceivedmids(self):\n\t\tself.mids={}\n\t\tsender=self.client.getContactById(self.sender)\n\t\tif sender is not None:\n\t\t\tmsgs=sender.getRecentMessages(count=10000)\n\t\t\tif msgs!=[]:\n\t\t\t\tfor msg in msgs:\n\t\t\t\t\tmid=msg.contentMetadata['mid']\n\t\t\t\t\tif mid not in self.contactids:\n\t\t\t\t\t\tname=msg.contentMetadata['displayName']\n\t\t\t\t\t\tself.mids[mid]=name\n\t\telse:\n\t\t\tprint 'can not find sender'\n\t\t\t\t\t\n\tdef addfriendsbymids(self):\n\t\tif self.status and self.mids!={}:\n\t\t\tcount=1\n\t\t\tfor mid,name in self.mids.items():\n\t\t\t\ttry:\n\t\t\t\t\tself.client._findAndAddContactsByMid(mid)\n\t\t\t\t\tprint '%d: add <%s> succeed'%(count,name)\n\t\t\t\t\tcount+=1\n#\t\t\t\t\ttime.sleep(5)\n\t\t\t\texcept Exception,e:\n\t\t\t\t\tprint 'error occured:%s'%str(e)\n\t\t\t\t\tbreak\n\t\telse:\n\t\t\tprint 'login first or no mids'\n\nif __name__=='__main__':\n#\tms=mycontactsender('[email protected]','521125dane')\n#\tms.sendaction()\n\tmr=myaddfriendsbymid('[email protected]','521125dane')\n\tmr.addfriendsbymids()\n"
}
] | 1 |
asimfarooq5/quotes | https://github.com/asimfarooq5/quotes | dd8add95a1ce583aac8a7f7ce5ceea9e8aecab13 | 8e2edb52a8407118e4c38e9d30343e394c0b00a7 | 78ff355b7ab3f457c00c7b0ac89324ff542a5be2 | refs/heads/master | 2022-12-24T01:56:40.944002 | 2020-10-08T12:45:11 | 2020-10-08T12:45:11 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6505211591720581,
"alphanum_fraction": 0.6574698686599731,
"avg_line_length": 31.1907901763916,
"blob_id": "c62852a2cc2a81e0025ce12ebe3b4e6fb0e19f99",
"content_id": "329eeca38369a0476efc52d5c1beb8e32553d7f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4893,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 152,
"path": "/app.py",
"repo_name": "asimfarooq5/quotes",
"src_encoding": "UTF-8",
"text": "from flask import Flask, redirect, render_template, request, session\nfrom flask_sqlalchemy import SQLAlchemy\n\nfrom flask_admin.contrib.sqla import ModelView\nimport flask_admin as admin\nfrom flask_admin import expose\nfrom flask_admin.base import AdminIndexView\nfrom flask_admin.menu import MenuLink\n\nfrom flask_restful import Resource, Api, reqparse\nfrom flask_marshmallow import Marshmallow\n\napp = Flask(__name__, )\napp.config['SECRET_KEY'] = '5791628bb0b13ce0c676dfde280ba245'\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///main.db'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n\ndb = SQLAlchemy()\nma = Marshmallow()\n\napi = Api(app)\n\n\nclass Quote(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n date = db.Column(db.String(50), nullable=True)\n author = db.Column(db.String(50), nullable=True)\n english = db.Column(db.String(1000), nullable=True)\n portuguese = db.Column(db.String(1000), nullable=True)\n spanish = db.Column(db.String(1000), nullable=True)\n background_color = db.Column(db.String(20), nullable=True)\n elements_color = db.Column(db.String(20), nullable=True)\n\n\nclass QuoteSchema(ma.SQLAlchemyAutoSchema):\n class Meta:\n model = Quote\n\n\nclass QuoteResource(Resource):\n def post(self):\n parser = reqparse.RequestParser(bundle_errors=True)\n parser.add_argument('date', type=str, help='Enter date in format(d-m-y)', required=True)\n parser.add_argument('author', type=str, help='Enter author', required=True)\n parser.add_argument('english', type=str, help='Enter quote in english', required=True)\n parser.add_argument('portoguese', type=str, help='Enter quote in portoguese', required=True)\n parser.add_argument('spanish', type=str, help='Enter quote in spanish', required=True)\n parser.add_argument('background_color', type=str, help='Enter color in #', required=True)\n parser.add_argument('elements_color', type=str, help='Enter color in #', required=True)\n args = parser.parse_args(strict=True)\n\n custom_args = {}\n for k, v in args.items():\n if v:\n custom_args.update({k: v})\n\n quote = Quote(**custom_args)\n\n db.session.add(quote)\n db.session.commit()\n\n schema = QuoteSchema()\n return schema.jsonify(quote)\n\n\nclass AuthourQouteResource(Resource):\n def get(self, author):\n quote = Quote.query.filter_by(author=author).all()\n if not quote:\n return {'message': 'No qoute found'}, 404\n\n schema = QuoteSchema(many=True)\n return schema.jsonify(quote)\n\n\nclass GetQuoteResource(Resource):\n def get(self):\n quote = Quote.query.all()\n schema = QuoteSchema(many=True)\n result = {f'Total Quotes': f'{len(quote)}',\n 'quotes': schema.dump(quote)}\n return result, 200\n\n\[email protected]('/create', methods=['POST'])\ndef create():\n dated = request.form['date']\n author = request.form['author']\n english = request.form['english']\n spanish = request.form['spanish']\n portuguese = request.form['portuguese']\n background_color = request.form['bg-color']\n elements_color = request.form['txt&ele']\n quote = Quote()\n quote.date = dated\n quote.author = author\n quote.english = english\n quote.spanish = spanish\n quote.portuguese = portuguese\n quote.background_color = background_color\n quote.elements_color = elements_color\n db.session.add(quote)\n db.session.commit()\n return redirect('/quote')\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef home():\n return redirect('/admin')\n\n\[email protected]('/login', methods=['POST'])\ndef login():\n if request.form['username'] == '[email protected]' and request.form['password'] == 'Rumi%94Ei':\n session['logged_in'] = True\n return redirect('/admin')\n return render_template('login.html')\n\n\[email protected]('/logout')\ndef logout():\n session['logged_in'] = False\n return render_template('login.html')\n\n\nclass MyAdminIndexView(AdminIndexView):\n @expose('/')\n def index(self):\n if not session.get('logged_in'):\n return render_template('login.html')\n # date = datetime.today().strftime('%d-%m-%Y')\n # quote = Quote.query.filter_by(date=date).first()\n # return self.render('admin/index.html')\n return redirect('/quote')\n\n\nclass QuoteModelView(ModelView):\n can_edit = True\n can_create = True\n\n\nadmin = admin.Admin(app, name='R', index_view=MyAdminIndexView(name=' '), url='/admin', )\nadmin.add_view(QuoteModelView(Quote, db.session, name='Quotes', url='/quote'))\nadmin.add_link(MenuLink(name='Logout', category='', url=\"/logout\"))\napi.add_resource(AuthourQouteResource, '/api/author_quotes/<author>')\napi.add_resource(GetQuoteResource, '/api/quotes/')\n\nif __name__ == '__main__':\n db.init_app(app)\n ma.init_app(app)\n db.create_all(app=app)\n app.run(host='0.0.0.0', port=7000, debug=True)\n"
},
{
"alpha_fraction": 0.8740741014480591,
"alphanum_fraction": 0.8740741014480591,
"avg_line_length": 12.5,
"blob_id": "8065cd6c91dbb8e7c769db63bd80e047658c9005",
"content_id": "77d17013cb75ea217d0c4bf0c401de82f1a976d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 135,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 10,
"path": "/requirements.txt",
"repo_name": "asimfarooq5/quotes",
"src_encoding": "UTF-8",
"text": "flask\nflask_sqlalchemy\nflask_marshmallow\nflask_admin\nflask_migrate\nflask_script\nwerkzeug\nflask_restful\nmarshmallow-sqlalchemy\nrequests\n"
}
] | 2 |
nkiyuu/banner-yatu | https://github.com/nkiyuu/banner-yatu | 21125e672721d0cbacd794f06b50babe0c654c3c | ec81d9006d97fbed005add947ec9d3da9f21e2a7 | c311c2961ac9e9d82c5b3636763bf4d55fe5131c | refs/heads/master | 2023-02-17T04:59:54.488723 | 2021-01-20T15:08:35 | 2021-01-20T15:08:35 | 330,984,661 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7673267126083374,
"alphanum_fraction": 0.7772276997566223,
"avg_line_length": 21.44444465637207,
"blob_id": "e58a6f2f88166399801b594c3543e8f1f807ff62",
"content_id": "a8e7cf3e0690e9cb4d18464736bd1465ab69a1d7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 202,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 9,
"path": "/Dockerfile",
"repo_name": "nkiyuu/banner-yatu",
"src_encoding": "UTF-8",
"text": "FROM python:3\nENV PYTHONUNBUFFERD 1\nRUN apt-get update\nRUN apt-get install -y default-mysql-client\nRUN mkdir /app\nWORKDIR /app\nADD requirements.txt /app/\nRUN pip install -r requirements.txt\nADD . /app/\n"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.8166666626930237,
"avg_line_length": 11.199999809265137,
"blob_id": "bb09c13901cb41fc449206420451c533aad92f53",
"content_id": "2929f009d2fbe869adbfbd054e37084ef16d739c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 60,
"license_type": "no_license",
"max_line_length": 15,
"num_lines": 5,
"path": "/requirements.txt",
"repo_name": "nkiyuu/banner-yatu",
"src_encoding": "UTF-8",
"text": "Django == 2.0.0\npsycopg2\nmysqlclient\ndjango-environ\ngunicorn"
},
{
"alpha_fraction": 0.5808966755867004,
"alphanum_fraction": 0.5808966755867004,
"avg_line_length": 31.0625,
"blob_id": "63e5f5cfb7aeb94fec0de8354c04991f296d240b",
"content_id": "4cbe37040a20d1d8904205d9dfca89d33473818a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 513,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 16,
"path": "/app/settings/production.py",
"repo_name": "nkiyuu/banner-yatu",
"src_encoding": "UTF-8",
"text": "from .common import *\n\nDEBUG = False\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.mysql',\n 'NAME': env.get_value('DB_NAME', cast=str, default=''),\n 'USER': env.get_value('DB_USER', cast=str, default=''),\n 'PASSWORD': env.get_value('DB_PASSWORD', cast=str, default=''),\n 'PORT': env.get_value('DB_PORT', cast=int, default=None),\n 'HOST': env.get_value('DB_HOST', cast=str, default=''),\n }\n}\n\nSECRET_KEY = env.get_value('SECRET_KEY', default='secret')\n"
},
{
"alpha_fraction": 0.612500011920929,
"alphanum_fraction": 0.637499988079071,
"avg_line_length": 14.285714149475098,
"blob_id": "a135147ccc0fcf23a72a94f9c99c59bc07a9697e",
"content_id": "63aa5b14b8fcc1f640bbd08534d8f1b5e3d1dcda",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 320,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 21,
"path": "/bin/wait-for-mysql.sh",
"repo_name": "nkiyuu/banner-yatu",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n# wait-for-postgres.sh\n\nset -e\n\nhost=\"$1\"\nshift\nuser=\"$1\"\nshift\nport=\"$1\"\nshift\ncmd=\"$@\"\n\necho \"waiting for mysql\"\nwhile ! mysql -h\"$host\" -u\"$user\" --port=\"$port\" -e status > /dev/null 2>&1; do\n >&2 echo \"MySQL is unavailable - sleeping\"\n sleep 1\ndone\n\n>&2 echo \"Mysql is up - executing command\"\nexec $cmd"
},
{
"alpha_fraction": 0.6006768345832825,
"alphanum_fraction": 0.6006768345832825,
"avg_line_length": 28.549999237060547,
"blob_id": "2e9dc04daeac1ed9f79f0959122bfe63becf0c61",
"content_id": "5eb1ea3e372e45c5e5e00bd8048e2f8b02bd1757",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 591,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 20,
"path": "/app/settings/local.py",
"repo_name": "nkiyuu/banner-yatu",
"src_encoding": "UTF-8",
"text": "from .common import *\nimport os\nimport environ\n\nenviron.Env.read_env(os.path.join(BASE_DIR, '.env'))\n\nDEBUG = True\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.mysql',\n 'NAME': env.get_value('DB_NAME', cast=str, default=''),\n 'USER': env.get_value('DB_USER', cast=str, default=''),\n 'PASSWORD': env.get_value('DB_PASSWORD', cast=str, default=''),\n 'PORT': env.get_value('DB_PORT', cast=int, default=None),\n 'HOST': env.get_value('DB_HOST', cast=str, default=''),\n }\n}\n\nSECRET_KEY = env.get_value('SECRET_KEY', default='secret')\n"
}
] | 5 |
juno249/ex-py-elasticsearch | https://github.com/juno249/ex-py-elasticsearch | 211160260e11bfa7e0bc5cdd959037012904c306 | 136394b426a38769577df8e64f713c8df7456e11 | 0c50806e92917fee8b8cac25aa92bc06c6bab45c | refs/heads/master | 2023-05-12T10:37:54.255831 | 2019-10-29T19:33:15 | 2019-10-29T19:33:15 | 218,366,069 | 0 | 0 | null | 2019-10-29T19:21:59 | 2019-10-29T19:33:47 | 2023-05-01T20:37:04 | CSS | [
{
"alpha_fraction": 0.3030303120613098,
"alphanum_fraction": 0.5757575631141663,
"avg_line_length": 16,
"blob_id": "ffb89aef10386cc8874312c11a598286857096b3",
"content_id": "d8911012a8e812c87ef5e0889ed2927b74703ebd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 33,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 2,
"path": "/gunicorn_config.py",
"repo_name": "juno249/ex-py-elasticsearch",
"src_encoding": "UTF-8",
"text": "bind = \"0.0.0.0::8005\"\nworker = 5"
},
{
"alpha_fraction": 0.751599133014679,
"alphanum_fraction": 0.7622601389884949,
"avg_line_length": 32.53571319580078,
"blob_id": "11aece0f1f5d18f5098814ddd4eb5368338ad0fa",
"content_id": "9d16e0081519137531c1414ddbca16d9e8f11c68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 938,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 28,
"path": "/Dockerfile",
"repo_name": "juno249/ex-py-elasticsearch",
"src_encoding": "UTF-8",
"text": "# Python support can be specified down to the minor or micro version\n# (e.g. 3.6 or 3.6.3).\n# OS Support also exists for jessie & stretch (slim and full).\n# See https://hub.docker.com/r/library/python/ for all supported Python\n# tags from Docker Hub.\nFROM python:3\n\n# Copying the requirements.txt first to leverage Docker cache\nCOPY ./requirements.txt /app/requirements.txt\n\n# WORKDIR is nothing but current directory (cd app)\nWORKDIR /app\n\n# Install the requirements in the current directory.\nRUN pip install -r requirements.txt\n\n# Copying the entire application to the docker container in the app directory.\nCOPY . /app\n\n# Setting environmental path to app directory. path environment variables tells shell,\n# which directories to search for executable files.\nENV PATH /app:$PATH\n\n# It executes the command python app.py in the app directory.\n# start gunicorn\nCMD [\"gunicorn\",\"--config\",\"/app/gunicorn_config.py\",\"app:app\"]\n\nEXPOSE 8005"
}
] | 2 |
ericbhanson/cashtag_analyzer | https://github.com/ericbhanson/cashtag_analyzer | 4a07b47c3a8f82191832b0dd67a09fe0caa0fb87 | f4f2cd31a0b92304d334ed00e6a62939f8123644 | 40146b76a4fdacbf59a59bc2921d5e26d2eed89a | refs/heads/master | 2021-05-11T02:55:41.966165 | 2018-01-24T16:19:02 | 2018-01-24T16:19:02 | 117,898,911 | 4 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7135888338088989,
"alphanum_fraction": 0.7170731425285339,
"avg_line_length": 26.615385055541992,
"blob_id": "18b5ab0b9bb15a662ee0446eba11727cdd7306c9",
"content_id": "29b4f285ad77c3d709bfb5b524d48e425057afa8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1435,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 52,
"path": "/cashtag_analyzer/__init__.py",
"repo_name": "ericbhanson/cashtag_analyzer",
"src_encoding": "UTF-8",
"text": "import sqlalchemy\nimport sys\nimport yaml\n\ndef connect_to_db(db_settings):\n\tprotocol = db_settings['protocol']\n\tuser = db_settings['user']\n\tpassword = db_settings['password']\n\thost = db_settings['host']\n\tdbname = db_settings['dbname']\n\tengine = sqlalchemy.create_engine(protocol + '://' + user + ':' + password + '@' + host + '/' + dbname + '?charset=utf8mb4',\n\t\t\t\t\t\t\t\t\t pool_recycle=30)\n\tdb_connection = engine.connect()\n\n\treturn db_connection\n\n\ndef get_row_count(db_connection, table):\n\tselect_query = table.select()\n\tresults = db_connection.execute(select_query)\n\tresults_text = '{} row(s) are currently in MySQL database.'.format(len(results.fetchall()))\n\n\treturn results_text\n\n\ndef get_table(db_connection, table_name):\n\ttable = sqlalchemy.Table(table_name, sqlalchemy.MetaData(), autoload=True, autoload_with=db_connection)\n\n\treturn table\n\n\ndef insert_data(db_connection, data_to_insert, table):\n\tinsert_query = table.insert(data_to_insert)\n\n\ttry:\n\t\tdb_connection.execute(insert_query)\n\n\texcept sqlalchemy.exc.DBAPIError:\n\t\traise\n\n\telse:\n\t\tresults_text = 'Post-INSERT row count: ' + get_row_count(db_connection, table)\n\t\tprint(results_text)\n\t\tprint('Results collected and available for analysis.')\n\n\ndef load_settings(file_location=sys.argv[1], file_name='settings.yaml'):\n\twith open(file_location + file_name, 'rb') as settings_file:\n\t\tyaml_settings = settings_file.read()\n\t\tsettings = yaml.load(yaml_settings)\n\n\treturn settings"
},
{
"alpha_fraction": 0.7404098510742188,
"alphanum_fraction": 0.7461902499198914,
"avg_line_length": 81.73912811279297,
"blob_id": "e22eb06eff892a2270349de7b773c10ed02bf8ee",
"content_id": "24d981affea20d822c907693cdffaf0d548a22af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1903,
"license_type": "no_license",
"max_line_length": 286,
"num_lines": 23,
"path": "/README.md",
"repo_name": "ericbhanson/cashtag_analyzer",
"src_encoding": "UTF-8",
"text": "# Cashtag Analyzer\nA Python module to analyze Twitter cashtags. For a given list of Twitter screen names, it will:\n* Connect to the Twitter API.\n* Download that screen name's Tweets.\n* Extract those Tweets that contain a [cashtag](http://money.cnn.com/2012/07/31/technology/twitter-cashtag/index.htm) and store them in a database.\n* Determine which cashtags are traded on a specified exchange.\n* Get market data for the time period around the timestamp of the Tweet.\n* Store both the Tweets and the market data in a database for further analysis.\n\nMakes use of a YAML-based settings file (see included sample) to specify the following user-specific parameters:\n* exchange_options: \n * exchange_id: The ccxt ID of the exchange to query for market data. See the [CCXT wiki](https://github.com/ccxt/ccxt/wiki/Exchange-Markets) for a list of supported exchanges and their IDs.\n * limit: The number of candlesticks to collect from the exchange API.\n * timeframe: The size of the candles (i.e. 1 minute, 5 minutes, 1 hour, etc.). The timeframes attribute of each ccxt.exchange lists the supported timeframes for a given exchange. See the [CCXT](https://github.com/ccxt/ccxt/wiki/Manual#exchange-structure) for additional information.\n * mysql_connection:\n * dbname: The name of the MySQL database where Tweet data will be stored.\n * host: The MySQL database host name.\n * password: The MySQL database password.\n * results_table: The name of the table where market data for each cashtag will be stored.\n * tweets_table: The name of the table where the Tweet data will be stored.\n * user: The MySQL database user name.\n * screen_names: a comma-separated list of Twitter screen names the script should query for cashtags.\n * twitter_api: the consumer_key, consumer_secret, access_token, and access_token_secret keys provided by [Twitter](https://apps.twitter.com).\n"
},
{
"alpha_fraction": 0.749903678894043,
"alphanum_fraction": 0.749903678894043,
"avg_line_length": 36.62318801879883,
"blob_id": "f0222b6087d582a34670ceff56b24438a0911818",
"content_id": "529a0365cc60171edbbcc0cdd6ba35f2b4c75d44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2595,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 69,
"path": "/cashtag_analyzer/tweet_collector.py",
"repo_name": "ericbhanson/cashtag_analyzer",
"src_encoding": "UTF-8",
"text": "import cashtag_analyzer\nimport re\nimport tweepy\n\n\n# Connect to the Twitter API using the authorization information provided in the settings file.\ndef connect_to_twitter(twitter_settings):\n\taccess_token = twitter_settings['access_token']\n\taccess_token_secret = twitter_settings['access_token_secret']\n\tconsumer_key = twitter_settings['consumer_key']\n\tconsumer_secret = twitter_settings['consumer_secret']\n\tauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n\tauth.set_access_token(access_token, access_token_secret)\n\tapi = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n\n\treturn api\n\n\n# Get the timeline for each user in the screen name list and examine their Tweets for cashtags using the re module.\n# Assemble information about those cashtagged Tweets in a list for storage in a database.\ndef get_cashtag_tweets(screen_name, twitter_api):\n\tcashtag_tweets_list = []\n\ttimeline = tweepy.Cursor(twitter_api.user_timeline, screen_name=screen_name, include_rts=False).items()\n\n\tfor status in timeline:\n\t\ttweet_text = status.text\n\t\tregex_result = re.findall('\\$([A-Z]+)', tweet_text)\n\n\t\tif regex_result:\n\t\t\tcreated_at = status.created_at\n\t\t\tcashtags = ', '.join(regex_result)\n\t\t\tname = status.user.screen_name\n\t\t\ttweet_id = status.id\n\t\t\tcashtag_tweets_dict = {'cashtags': cashtags, 'created_at': created_at,\n\t\t\t\t\t\t\t\t 'screen_name': name, 'tweet_id': tweet_id, 'tweet_text': tweet_text}\n\t\t\tcashtag_tweets_list.append(cashtag_tweets_dict)\n\n\t\t\tprint(cashtag_tweets_dict)\n\n\treturn cashtag_tweets_list\n\n\n# Load the settings from the settings file.\nsettings = cashtag_analyzer.load_settings()\ntweets_table = settings['mysql_connection']['tweets_table']\n\n# Connect to the database.\ndb_connection = cashtag_analyzer.connect_to_db(settings['mysql_connection'])\ntable = cashtag_analyzer.get_table(db_connection, tweets_table)\n\n# Connect to Twitter's API.\ntwitter_api = connect_to_twitter(settings['twitter_api'])\n\n# Load the list of screen names to examined from the settings file.\nscreen_names = sorted(settings['screen_names'])\n\nfor screen_name in screen_names:\n\n\t# Get the list of cashtagged Tweets and store them in a list.\n\tcashtag_tweets_list = get_cashtag_tweets(screen_name, twitter_api)\n\n\tif (cashtag_tweets_list):\n\n\t\t# As a sanity check, get the number of rows in the table before executing the INSERT statement.\n\t\tresults_text = 'Pre-INSERT row count: ' + cashtag_analyzer.get_row_count(db_connection, table)\n\t\tprint(results_text)\n\n\t\t# Insert the list of cashtagged Tweets into the database.\n\t\tcashtag_analyzer.insert_data(db_connection, cashtag_tweets_list, table)"
},
{
"alpha_fraction": 0.7228582501411438,
"alphanum_fraction": 0.7273983359336853,
"avg_line_length": 37.09022521972656,
"blob_id": "dbfc2f9081b981064ec35d86175d22ecb2677418",
"content_id": "793537f8fdead014fca9aa48b8fc787791f41a76",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5066,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 133,
"path": "/cashtag_analyzer/market_data_collector.py",
"repo_name": "ericbhanson/cashtag_analyzer",
"src_encoding": "UTF-8",
"text": "import cashtag_analyzer\t\t# Import the modules from the __init__ script.\nimport ccxt\t\t\t\t\t# Import ccxt to connect to exchange APIs.\nimport collections\t\t\t# Import collections to create lists within dictionaries on the fly.\nimport datetime\t\t\t\t# Import datetime for the timedelta and utcfromtimestamp functions.\nimport numpy\t\t\t\t# Import numpy to compare the contents of lists.\nimport re\t\t\t\t\t# Import re to split up the lists of symbols into individual items.\nimport sqlalchemy\t\t\t# Import sqlalchemy to do specific data selection from the MySQL database.\n\n\n# Determines what symbols in the cashtag list are traded on the selected exchange.\ndef create_match_list(exchange, twitter_base_list, twitter_dict):\n\tprint('Checking list of cashtags against supported symbols in {}...'.format(exchange.name))\n\n\tmatch_list = []\n\tbase_set = set()\n\tbase_dict = collections.defaultdict(list)\n\tmarkets = exchange.load_markets()\n\n\tfor symbol in markets:\n\t\tbase = markets[symbol]['base']\n\t\tbase_set.add(base)\n\t\tbase_dict[base].append(symbol)\n\n\tbase_list = list(base_set)\n\tmatch = numpy.isin(base_list, twitter_base_list, assume_unique=True)\n\n\tfor i in range(len(base_list)):\n\n\t\tif (match[i] == True):\n\n\t\t\tfor created_at in twitter_dict[base_list[i]]:\n\t\t\t\tmatch_list.append([created_at, base_list[i], base_dict[base_list[i]]])\n\n\tprint('Supported symbols check complete.')\n\n\treturn match_list\n\n\n# Queries the exchange for market data for the time period around the Tweet each symbol in the match list.\ndef create_market_data_list(exchange, match_list, limit=2, timeframe='1d'):\n\tprint('Getting market data for each cashtag...')\n\n\tmarket_data_list = []\n\n\tfor i in range(len(match_list)):\n\t\tbase = match_list[i][1]\n\t\tcreated_at = match_list[i][0]\n\t\tsince = int((created_at - datetime.timedelta(days=1)).timestamp() * 1000)\n\t\tsymbols = match_list[i][2]\n\n\t\tfor symbol in symbols:\n\t\t\tuohlcv_list = exchange.fetch_ohlcv(symbol, limit=limit, since=since, timeframe=timeframe)\n\n\t\t\tif (uohlcv_list and len(uohlcv_list) == 2):\n\n\t\t\t\tfor uohlcv in uohlcv_list:\n\t\t\t\t\tprint(since, uohlcv)\n\t\t\t\t\tcandle_ts = datetime.datetime.utcfromtimestamp(uohlcv[0] // 1000)\n\t\t\t\t\tclose_price = float(uohlcv[4])\n\t\t\t\t\thigh_price = float(uohlcv[2])\n\t\t\t\t\tlow_price = float(uohlcv[3])\n\t\t\t\t\topen_price = float(uohlcv[1])\n\t\t\t\t\tvolume = float(uohlcv[5])\n\t\t\t\t\tuohlcv_dict = {'base': base, 'candle_ts': candle_ts, 'close': close_price, 'high': high_price,\n\t\t\t\t\t\t\t\t 'low': low_price, 'open': open_price, 'symbol': symbol, 'tweet_ts': created_at,\n\t\t\t\t\t\t\t\t 'volume': volume}\n\t\t\t\t\tmarket_data_list.append(uohlcv_dict)\n\n\tprint('Market data collection complete.')\n\n\treturn market_data_list\n\n\n# Get a list of cashtags for the current screen name and turn it into a list (for direct processing) and a dictionary\n# (for lookup purposes during the direct processing).\ndef create_twitter_lists(screen_name, table):\n\tprint('Creating list of cashtags...')\n\n\tselect_query = table.select(whereclause=\"`screen_name` = '{}'\".format(screen_name))\n\tresults = db_connection.execute(select_query)\n\ttwitter_base_set = set()\n\ttwitter_dict = collections.defaultdict(list)\n\n\tfor result in results.fetchall():\n\t\tregex_result = re.findall('(\\w+)', result[0])\n\n\t\tfor r in regex_result:\n\t\t\ttwitter_base_set.add(r)\n\t\t\ttwitter_dict[r].append(result['created_at'])\n\n\ttwitter_base_list = list(twitter_base_set)\n\n\tprint('Cashtag list created.')\n\n\treturn twitter_base_list, twitter_dict\n\n\n# Load the settings from the settings file and turn them into variables.\nsettings = cashtag_analyzer.load_settings()\nexchange_id = settings['exchange_options']['exchange_id']\nlimit = settings['exchange_options']['limit']\nresults_table = settings['mysql_connection']['results_table']\ntimeframe = settings['exchange_options']['timeframe']\ntweets_table = settings['mysql_connection']['tweets_table']\n\n# Dynamically load the exchange method from the ccxt module.\nexchange_method = getattr(ccxt, exchange_id)\nexchange = exchange_method()\n\n# Connect to the database.\ndb_connection = cashtag_analyzer.connect_to_db(settings['mysql_connection'])\ntable = cashtag_analyzer.get_table(db_connection, tweets_table)\n\n# Select a list of screen names from the database.\nselect_query = sqlalchemy.select([table.c['screen_name']]).distinct()\nresults = db_connection.execute(select_query)\n\n# Loop through the screen name list and collect market data for each cashtag.\nfor result in results:\n\tscreen_name = result[0]\n\n\tprint('Getting results for screen name {}...'.format(screen_name))\n\n\ttwitter_base_list, twitter_dict = create_twitter_lists(screen_name, table)\n\tmatch_list = create_match_list(exchange, twitter_base_list, twitter_dict)\n\tmarket_data_list = create_market_data_list(exchange, match_list, limit=limit, timeframe=timeframe)\n\n\t# As a sanity check, get the number of rows in the table before executing the INSERT statement and print the results.\n\tresults_text = 'Pre-INSERT row count: ' + cashtag_analyzer.get_row_count(db_connection, table)\n\tprint(results_text)\n\n\t# Insert the market data into the database.\n\tcashtag_analyzer.insert_data(db_connection, market_data_list, table)\n"
}
] | 4 |
rstorf25/finance_equations | https://github.com/rstorf25/finance_equations | 8b425bd8037d9b170489cdaccdf8e1d04cf738db | 3ce0008ac97e935d2fb72f4a555ed51f400ea71d | f20c835d9132247ee39860b6cde2c35866e8ea14 | refs/heads/main | 2023-04-20T07:04:17.436676 | 2021-04-23T15:42:52 | 2021-04-23T15:42:52 | 360,931,990 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5924528241157532,
"alphanum_fraction": 0.6188679337501526,
"avg_line_length": 19.58333396911621,
"blob_id": "941582cc73aba0a9268025015f4fd21abd15a104",
"content_id": "5658de41dc0083beb88888d328d141fda728d6b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 265,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 12,
"path": "/finance_equations/cash_flow.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "###Simple Cash Flow###\r\n# Cash flow shows how much you earn in relation to how much you spend.\r\n###Variables###\r\n# i = income\r\n# e = expenses\r\n\r\ndef simple_cash_flow(i,e):\r\n cf = i - e\r\n return cf\r\n\r\ntestscf= simple_cash_flow(1000,850)\r\nprint(testscf)\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5138461589813232,
"alphanum_fraction": 0.5876923203468323,
"avg_line_length": 19.53333282470703,
"blob_id": "ca5503f460e8e648612a93e1ae1a0b3fd525de3b",
"content_id": "02e12c8781e428db48137a02f709004f0af70c8c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 325,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 15,
"path": "/finance_equations/simple_interest.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "#Variables\r\n# p = principal\r\n#r = annual interest rate\r\n# t = number of pay periods\r\n\r\ndef simple_interest(p,r,t):\r\n si = p*(1+((r/12*t)))\r\n round = round(si,2)\r\n si = round\r\n return si\r\n\r\n#Example use\r\n# p = $1000, r = .045 or 4.5% Annual, t = 36 months\r\ntestsi = simple_interest(1000,.045,36)\r\nprint(testsi)\r\n\r\n"
},
{
"alpha_fraction": 0.6108312606811523,
"alphanum_fraction": 0.6486145853996277,
"avg_line_length": 22.75,
"blob_id": "adfe69f2e6acaad1ae913f104ebe88c30d38dbfe",
"content_id": "73ed01065ad8505397e48b47531ab9975f54c4a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 794,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 32,
"path": "/finance_equations/compounding_annual_growth_rate.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "###Compounding Annual Growth Rate###\r\n###As the economy moves up and down,\r\n# so do investors' returns. To determine your yearly\r\n# growth rate over several years on an investment,\r\n# use the compound annual growth rate, CAGR.\r\n\r\n#Think of CAGR as the rate an investment would\r\n# grow if the rate were constant.\r\n\r\n#Variables#\r\n#carg = Compound Annual Growth Rate\r\n#ev = Ending Value\r\n#bv = Beginning Value\r\n# n = number of years\r\n\r\ndef CAGR(ev,bv,n):\r\n cagr = ((ev/bv)**(1/n)) - 1\r\n return cagr\r\n\r\ndef CAGR_percent(ev,bv,n):\r\n cagr = ((ev / bv) ** (1 / n)) - 1\r\n percent = cagr *100\r\n cagr = percent\r\n rounded = round(cagr,2)\r\n cagr = rounded\r\n return cagr\r\n\r\ntestcagr = CAGR(19000,10000,3)\r\nprint(testcagr)\r\n\r\ntestperc= CAGR_percent(19000,10000,3)\r\nprint(testperc)\r\n\r\n"
},
{
"alpha_fraction": 0.64371258020401,
"alphanum_fraction": 0.6706587076187134,
"avg_line_length": 29.3125,
"blob_id": "57cad3237e945280318cbb365c97ca09996d0860",
"content_id": "e4e140bfc487dd5a4801e7eba97ed7978e74142c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1004,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 32,
"path": "/finance_equations/Present_value_of_annuity.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "### Present Value of an Annuity###\r\n###The present value of an annuity equates a series of\r\n# payments in the future to a lump sum today by using the\r\n# time value of money (inflation)—a dollar today is worth more\r\n# than a dollar tomorrow.\r\n\r\n#Example: Deciding whether to take a pension or lottery prize as an annuity or a lump sum.\r\n\r\n###Variables ###\r\n# p = present value of an annuity\r\n# pmt = dollar amount of each annuity payment\r\n# r = interest rate or discount rate\r\n# Number of periods in which payments will be made\r\n\r\n\r\ndef present_value_annuity(pmt, r, n):\r\n p = (pmt * ((1 - (1 / ((1 + r) ** n))) / r))\r\n rounded = round(p, 2)\r\n p = rounded\r\n return p\r\n\r\n#Example\r\n#Assume a person has the opportunity to receive an ordinary\r\n# annuity that pays $50,000 per year for the next 25 years,\r\n# with a 6% discount rate, or take a $650,000 lump-sum payment.\r\n# Which is the better option?\r\npmt = 50000\r\nr = .06\r\nn = 25\r\n\r\ntestpva = present_value_annuity(pmt, r, n)\r\nprint(testpva)\r\n"
},
{
"alpha_fraction": 0.5534883737564087,
"alphanum_fraction": 0.5953488349914551,
"avg_line_length": 20.63157844543457,
"blob_id": "fb51c1f3b42d7f0aaed9c5b4b3c3e553a2d533db",
"content_id": "ed11b85760fe2c4c28ea41a7a2f45626863bd4e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 430,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 19,
"path": "/finance_equations/amortization.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "#Variable Explanation\r\n# p = principal\r\n#r = Annaual Interest Rate\r\n#t = number of pay periods (Months)\r\n# n = payments per period\r\n\r\ndef monthly_payments(p,r,t,n):\r\n monthly_rate = r/12\r\n r = monthly_rate\r\n top = p * (r/n)\r\n bottom = (1-(1+(r/n))**(-n*t))\r\n mp = top / bottom\r\n round = round(mp,2)\r\n mp = round\r\n return mp\r\n\r\n#Example Use\r\nmortgage = monthly_payments(200000,.045,360,1)\r\nprint(mortgage)\r\n"
},
{
"alpha_fraction": 0.5825603008270264,
"alphanum_fraction": 0.6122449040412903,
"avg_line_length": 21.521739959716797,
"blob_id": "03ddb7acf42ec676bf6c44cae593d49072f7ee4e",
"content_id": "b7923769b4032fb34f4cad659d7208eaca40eef6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 539,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 23,
"path": "/finance_equations/future_value_of_annuity.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "###Future Value of an Ordinary Annuity\r\n### The time value of money is also an important\r\n# concept for the future value of an annuity,\r\n# or the worth of your payments down the line.\r\n\r\n###Variables###\r\n# fv = future value\r\n# pmt = payment\r\n# r = interest rate 5% = .05\r\n# t = number of payments 12 = 1 year\r\n\r\npmt = 1000\r\nr = .05\r\nt = 5\r\n\r\ndef future_value_annunity(pmt,r,t):\r\n fv = pmt * ((((1 + r) ** t) - 1) / r)\r\n rounded = round(fv, 2)\r\n fv = rounded\r\n return fv\r\n\r\ntestfv = future_value_annunity(pmt,r,t)\r\nprint(testfv)"
},
{
"alpha_fraction": 0.6202783584594727,
"alphanum_fraction": 0.6520874500274658,
"avg_line_length": 24.578947067260742,
"blob_id": "a13efe5c4620fe90477bb5ba4d9106b53bc03f5b",
"content_id": "b9b0d812100e70cd32986e4682a8b33a367b75ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 503,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 19,
"path": "/finance_equations/compound_interest.py",
"repo_name": "rstorf25/finance_equations",
"src_encoding": "UTF-8",
"text": "###Variables###\r\n# p = principal\r\n# r = interest rate annual\r\n# n = compoundings per period example monthly = 12\r\n# t = number of periods = Number of years in the future\r\n\r\n###What is Compound Interest?###\r\n# The compound interest is the interest earned\r\n# on the principal, and any interest accrued\r\n# in the past.\r\n\r\ndef compound_interest(p,r,n,t):\r\n ci = p*(1+(r/n))**(n*t)\r\n rounded = round(ci,2)\r\n ci = rounded\r\n return ci\r\n\r\ntestci = compound_interest(10000,.045,12,7.5)\r\nprint(testci)"
}
] | 7 |
anukaal/hacktober | https://github.com/anukaal/hacktober | cd75f1e0fd960114e714b172e6c7f5e16fdc0daf | f3fd19914810af301ac93c4423d94d4e52a6ffe7 | 5e50fdd44287a01872a8aeecb0bbf0d262d13a00 | refs/heads/master | 2020-08-08T23:48:40.211319 | 2019-10-09T15:25:56 | 2019-10-09T15:25:56 | 213,950,666 | 0 | 1 | null | 2019-10-09T15:10:31 | 2019-10-09T15:40:30 | 2019-10-09T15:40:29 | Python | [
{
"alpha_fraction": 0.7058823704719543,
"alphanum_fraction": 0.7058823704719543,
"avg_line_length": 16,
"blob_id": "ae66c159342a2155dc5a3ca755b20195b2fc94ea",
"content_id": "3b3c45d9d7bc042fc62ef6d43264d7a39dff8cfc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17,
"license_type": "no_license",
"max_line_length": 16,
"num_lines": 1,
"path": "/anurag.py",
"repo_name": "anukaal/hacktober",
"src_encoding": "UTF-8",
"text": "print(\"anukaal\")\n"
}
] | 1 |
alromh87/pyNBIS | https://github.com/alromh87/pyNBIS | e83c2e7f3ed8dcdf4cbe106ccbeb7bcca698b22b | 1fdae83a5f9649c819645b4d41993a28a433b646 | 0f77b2607a3f9f7b8402db33d7f405073782d54e | refs/heads/master | 2017-12-05T18:59:18.991938 | 2017-07-10T01:16:01 | 2017-07-10T01:16:01 | 80,237,602 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6767676472663879,
"alphanum_fraction": 0.6929292678833008,
"avg_line_length": 60.875,
"blob_id": "7c0d7cc347eb561802b93012c229fbe7fb534fd3",
"content_id": "e04dd2c5b4ea0371c090e10e018a2a94ca4587d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 495,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 8,
"path": "/python_nbis/empaquetar.sh",
"repo_name": "alromh87/pyNBIS",
"src_encoding": "UTF-8",
"text": "#rm wsq_wrap.c\necho \"WSQ\"\nswig -python -Wall -I../install_dir/include/ wsq.i &&\ngcc -fPIC -Wall -shared wsq_wrap.c -o _wsq.so -L../install_dir/lib -lwsq -lioutil -lfet -ljpegl -lutil -I/usr/include/python2.7/ -lpython2.7 -I../install_dir/include/\n\necho \"NFIQ\"\nswig -python -Wall -I../install_dir/include/ nfiq.i &&\ngcc -fPIC -Wall -shared nfiq_wrap.c -o _nfiq.so -L../install_dir/lib -lnfiq -lmindtct -lmlp -lcblas -lioutil -lutil -I/usr/include/python2.7/ -lpython2.7 -I../install_dir/include/\n"
},
{
"alpha_fraction": 0.5832402110099792,
"alphanum_fraction": 0.6044692993164062,
"avg_line_length": 27.838708877563477,
"blob_id": "979c8659bf4faa31c150f96d2de65893ce50bc87",
"content_id": "8de079cac5c6e316526e320b1202fc444b895de4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 895,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 31,
"path": "/python_nbis/test-pyNBIS.py",
"repo_name": "alromh87/pyNBIS",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport array\nimport wsq\nimport nfiq\n\nfrom PIL import Image\n\nfrom subprocess import call\n\nhuella_raw =\"fingerprint.raw\"\nwith open(huella_raw, \"rb\") as image_file:\n huella = image_file.read()\n w = 256\n h = 400\n# img = Image.frombuffer('L', [w, h], huella, \"raw\", 'L', 0, 1)\n# img.show()\n\n bitrate = 2.25\n bitrate = 2\n depth = 8\n ppi = 500\n result = nfiq.comp_nfiq(huella, w, h, depth, depth)\n print \"Scan quality from pyNBIS: \", result, \" vs quality from nfiq: \"\n call([\"../install_dir/bin/nfiq\", \"-d\", huella_raw, \"-raw\", \"%d,%d,%d\"%(w, h,depth)])\n result = wsq.wsq_encode_mem(bitrate, huella, w, h, depth, ppi, \"\")\n print result[:2]\n with open(huella_raw+\"_py\", 'wb') as raw_file:\n raw_file.write(result[2])\n call([\"../install_dir/bin/cwsq\", \"%d\"%bitrate, \"wsq\", huella_raw, \"-raw_in\", \"%d,%d,%d,%d\"%(w, h, depth, ppi)])\n\n"
},
{
"alpha_fraction": 0.5133275985717773,
"alphanum_fraction": 0.5150473117828369,
"avg_line_length": 34.24242401123047,
"blob_id": "ae1285148fd40e62f22298e2de51c6dd85fc785e",
"content_id": "c38c512d217c2d98bad508fb4cf3b2eb7a15bd69",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1163,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 33,
"path": "/python_nbis/setup.py",
"repo_name": "alromh87/pyNBIS",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\n\"\"\"\nsetup.py file for NBIS SWIG wrapper\n\"\"\"\n\nfrom setuptools import setup, Extension\n\nNBIS_DIR='../install_dir/'\nNBIS_INCLUDE_DIR=NBIS_DIR+'include/'\nNBIS_LIB_DIR=NBIS_DIR+'lib/'\n\nwsq_module = Extension('_wsq', ['wsq.i'], swig_opts=['-I'+NBIS_INCLUDE_DIR],\n include_dirs=[NBIS_INCLUDE_DIR],\n library_dirs=[NBIS_LIB_DIR],\n libraries=['wsq', 'ioutil', 'fet', 'jpegl', 'util'],\n )\nnfiq_module = Extension('_nfiq', ['nfiq.i'], swig_opts=['-I'+NBIS_INCLUDE_DIR],\n include_dirs=[NBIS_INCLUDE_DIR],\n library_dirs=[NBIS_LIB_DIR],\n libraries=['nfiq', 'mindtct', 'mlp', 'cblas', 'ioutil', 'util'],\n )\nsetup (name = \"pyNBIS\",\n# use_scm_version=True,\n setup_requires=['setuptools_scm'],\n# version\t\t= '0.1',\n author\t\t= \"Alejandro Romero <[email protected]>\",\n author_email\t= \"[email protected]\",\n description\t= \"\"\"SIWG based Python wrapper for NBIS\"\"\",\n# packages\t\t= [\"pyNBIS\"],\n ext_modules\t= [wsq_module, nfiq_module],\n py_modules\t= [\"wsq\", \"nfiq\"],\n )\n"
},
{
"alpha_fraction": 0.6111111044883728,
"alphanum_fraction": 0.6304348111152649,
"avg_line_length": 20.736841201782227,
"blob_id": "7e795c425f9e2a2d7c9609905454a8cf4d1cd0f9",
"content_id": "9839f1c92aab080480817796edc8c60a037aeb58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 414,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 19,
"path": "/python_nbis/Makefile",
"repo_name": "alromh87/pyNBIS",
"src_encoding": "UTF-8",
"text": "NBIS_INSTALL_DIR=$(realpath ../install_dir)\n\nifeq (\"$(shell arch)\", \"x86_64\")\n HOST_ARCH=--64\nelse\n HOST_ARCH=--32\nendif\n\nall:\n\tpython setup.py build\nNBIS:\n\tmkdir -p ../install_dir\n\tcd ../; ./setup.sh $(NBIS_INSTALL_DIR) $(HOST_ARCH); make config;\tmake it;\tmake install LIBNBIS=no\n\n\t@echo $(NBIS_INSTALL_DIR)\n\tls $(NBIS_INSTALL_DIR)\nclean:\n\trm -r *.c *.so *.pyc dist build *.egg-info/\n#//\trm *.py // este no!!!\n\n"
}
] | 4 |
giveclarity/NPSP | https://github.com/giveclarity/NPSP | 37ddb5b0b7458651d567307ce700c1da2e623012 | e6fe9250422f3defc52177d285606aa3d06c6971 | 6f723e088e37c638ef8e3564d1e0f1f887d5876b | refs/heads/master | 2023-02-15T02:27:23.662296 | 2020-06-18T01:25:32 | 2020-06-18T01:25:32 | 273,267,997 | 0 | 0 | BSD-3-Clause | 2020-06-18T15:01:21 | 2020-06-18T15:01:24 | 2020-06-18T16:01:02 | null | [
{
"alpha_fraction": 0.6353428959846497,
"alphanum_fraction": 0.6393159627914429,
"avg_line_length": 49.043479919433594,
"blob_id": "c661208bec15b159cd9d3a07f36f5ad081d07bec",
"content_id": "1e24b57fdaf96677fd0a37bca81d86adcf02aae0",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5789,
"license_type": "permissive",
"max_line_length": 124,
"num_lines": 115,
"path": "/robot/Cumulus/resources/GiftEntryPageObject.py",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "import time\nimport re\nfrom cumulusci.robotframework.utils import capture_screenshot_on_error\nfrom cumulusci.robotframework.pageobjects import BasePage\nfrom cumulusci.robotframework.pageobjects import pageobject\nfrom BaseObjects import BaseNPSPPage\nfrom NPSP import npsp_lex_locators\nfrom logging import exception\n\nOID_REGEX = r\"^(%2F)?([a-zA-Z0-9]{15,18})$\"\n@pageobject(\"Custom\", \"GE_Gift_Entry\")\nclass GiftEntryPage(BaseNPSPPage, BasePage):\n\n \n def _go_to_page(self):\n \"\"\"To go to Gift Entry page\"\"\"\n url_template = \"{root}/lightning/n/{object}\"\n name = self._object_name\n object_name = \"{}{}\".format(self.cumulusci.get_namespace_prefix(), name)\n url = url_template.format(root=self.cumulusci.org.lightning_base_url, object=object_name)\n self.selenium.go_to(url)\n self.salesforce.wait_until_loading_is_complete()\n self.selenium.wait_until_page_contains(\"Templates\")\n\n def _is_current_page(self):\n \"\"\"\n Verifies that current page is Gift Entry landing page\n \"\"\"\n self.selenium.wait_until_location_contains(\"GE_Gift_Entry\", timeout=60, \n message=\"Current page is not Gift Entry landing page\")\n locator=npsp_lex_locators[\"gift_entry\"][\"id\"].format(\"datatable Batches\") \n self.selenium.wait_until_page_contains_element(locator) \n\n def click_gift_entry_button(self,title):\n \"\"\"clicks on Gift Entry button identified with title\"\"\"\n locator=npsp_lex_locators[\"gift_entry\"][\"button\"].format(title)\n self.selenium.wait_until_page_contains_element(locator)\n self.selenium.click_element(locator) \n\n def enter_value_in_field(self,**kwargs):\n \"\"\"Enter value in specified field\"\"\"\n for key,value in kwargs.items():\n if key=='Description':\n locator=npsp_lex_locators[\"gift_entry\"][\"field_input\"].format(key,\"textarea\")\n self.selenium.wait_until_page_contains_element(locator)\n self.salesforce._populate_field(locator, value)\n else:\n locator=npsp_lex_locators[\"gift_entry\"][\"field_input\"].format(key,\"input\")\n self.selenium.wait_until_page_contains_element(locator)\n self.salesforce._populate_field(locator, value) \n\n def select_template_action(self,name,action):\n \"\"\"From the template table, select template with name and select an action from the dropdown\"\"\"\n locator=npsp_lex_locators[\"gift_entry\"][\"actions_dropdown\"].format(name)\n self.selenium.click_element(locator)\n element=self.selenium.get_webelement(locator)\n status=element.get_attribute(\"aria-expanded\")\n if status==\"false\":\n self.selenium.wait_until_page_contains(\"Clone\") \n self.selenium.click_link(action)\n if action==\"Edit\" or action==\"Clone\":\n self.selenium.wait_until_page_contains(\"Gift Entry Template Information\")\n elif action==\"Delete\":\n self.selenium.wait_until_page_does_not_contain(name) \n\n def select_object_group_field(self,object_group,field):\n \"\"\"Select the specified field under specified object group \n to add the field to gift entry form and verify field is added\"\"\"\n locator=npsp_lex_locators[\"gift_entry\"][\"form_object_dropdown\"].format(object_group)\n self.selenium.scroll_element_into_view(locator)\n self.selenium.click_element(locator)\n element=self.selenium.get_webelement(locator)\n status=element.get_attribute(\"aria-expanded\")\n if status==\"false\":\n time.sleep(2) \n field_checkbox=npsp_lex_locators[\"gift_entry\"][\"object_field_checkbox\"].format(field) \n self.selenium.scroll_element_into_view(field_checkbox) \n self.selenium.click_element(field_checkbox)\n field_label=object_group+': '+field\n self.selenium.wait_until_page_contains(field_label)\n\n def verify_template_is_not_available(self,template):\n \"\"\"Verify that a gift template is not available for selection while creating a new batch\"\"\"\n field=npsp_lex_locators[\"adv_mappings\"][\"field_mapping\"].format(\"Template\")\n self.selenium.click_element(field)\n element=self.selenium.get_webelement(field)\n status=element.get_attribute(\"aria-activedescendant\")\n if status is not None:\n self.selenium.page_should_not_contain(template)\n else:\n self.selenium.wait_until_page_contains(\"Default Gift Entry Template\")\n self.selenium.page_should_not_contain(template) \n self.selenium.click_button(\"Cancel\")\n\n def get_template_record_id(self,template):\n \"\"\" Parses the current url to get the object id of the current record.\n Expects url format like: [a-zA-Z0-9]{15,18}\n \"\"\"\n locator=npsp_lex_locators[\"link-text\"].format(template)\n self.selenium.wait_until_page_contains_element(locator)\n element = self.selenium.get_webelement(locator)\n e=element.get_attribute(\"href\")\n print(f\"url is {e}\")\n for part in e.split(\"=\"):\n oid_match = re.match(OID_REGEX, part)\n if oid_match is not None:\n return oid_match.group(2)\n raise AssertionError(\"Could not parse record id from url: {}\".format(e))\n\n def store_template_record_id(self,template):\n \"\"\" Parses the template href to get the object id of the current record.\n Expects url format like: [a-zA-Z0-9]{15,18}\n \"\"\"\n id=self.get_template_record_id(template) \n self.salesforce.store_session_record(\"Form_Template__c\",id) \n \n\n \n\n \n\n"
},
{
"alpha_fraction": 0.5909591913223267,
"alphanum_fraction": 0.591116726398468,
"avg_line_length": 33.510868072509766,
"blob_id": "02122de490c360ecd740b483b5d192cd0b5b4411",
"content_id": "295c653f70e674c789ba4fc19b9a0da2efcfb4fc",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 6349,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 184,
"path": "/src/lwc/geReviewDonations/geReviewDonations.js",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "import { LightningElement, api, track, wire } from 'lwc';\nimport { NavigationMixin } from 'lightning/navigation';\nimport { getRecord } from 'lightning/uiRecordApi';\nimport { handleError } from 'c/utilTemplateBuilder';\nimport { deepClone } from 'c/utilCommon';\nimport geLabelService from 'c/geLabelService';\n\nconst PAYMENT = 'payment';\nconst OPPORTUNITY = 'opportunity';\n\nexport default class geReviewDonations extends NavigationMixin(LightningElement) {\n\n CUSTOM_LABELS = geLabelService.CUSTOM_LABELS;\n\n @api donorId;\n @api dedicatedListenerEventName = 'geDonationMatchingEvent';\n @api selectedDonation;\n @api opportunities;\n\n @track donationType;\n @track donor;\n\n @wire(getRecord, { recordId: '$donorId', optionalFields: ['Account.Name', 'Contact.Name'] })\n wiredGetRecordMethod({ error, data }) {\n if (data) {\n this.donor = data;\n } else if (error) {\n handleError(error);\n }\n }\n\n get reviewDonationsComputedClass() {\n let baseClass = ['slds-box', 'slds-theme_shade', 'slds-m-bottom_small'];\n\n if (this.hasSelectedDonation) {\n baseClass.push('slds-box_extension-2');\n } else {\n baseClass.push('slds-box_extension');\n }\n\n return baseClass.join(' ');\n }\n\n get isUpdatingPayment() {\n return this.donationType === PAYMENT ? true : false;\n }\n\n get isUpdatingOpportunity() {\n return this.donationType === OPPORTUNITY &&\n !this.selectedDonation.hasOwnProperty('applyPayment') &&\n !this.selectedDonation.hasOwnProperty('new') ?\n true :\n false;\n }\n\n get isApplyingNewPayment() {\n return this.donationType === OPPORTUNITY &&\n this.selectedDonation.hasOwnProperty('applyPayment') ?\n true :\n false;\n }\n\n get isCreatingNewOpportunity() {\n return this.donationType === OPPORTUNITY &&\n this.selectedDonation.hasOwnProperty('new') ?\n true :\n false;\n }\n\n get hasSelectedDonation() {\n return this.donationType ? true : false;\n }\n\n get reviewDonationsMessage() {\n if (this.donor) {\n if (this.isCreatingNewOpportunity) {\n return this.CUSTOM_LABELS.geBodyMatchingNewOpportunity;\n }\n\n if (this.isApplyingNewPayment) {\n return this.CUSTOM_LABELS.geBodyMatchingApplyNewPayment;\n }\n\n if (this.isUpdatingOpportunity || this.isUpdatingPayment) {\n return this.CUSTOM_LABELS.geBodyMatchingUpdatingDonation;\n }\n }\n\n return this.CUSTOM_LABELS.geBodyMatchingPendingDonation;\n }\n\n get hasDonorLink() {\n return this.isApplyingNewPayment ||\n this.isUpdatingOpportunity ||\n this.isUpdatingPayment ?\n true :\n false;\n }\n\n /*******************************************************************************\n * @description Method constructs and dispatches an object (modalConfig) as part\n * of an event to the parent component. This object (modalConfig) is then used to\n * configure the modal created by the aura overlay library in the parent aura\n * component.\n * \n * modalConfig has two main properties, componentProperties and\n * modalProperties. componentProperties holds all the data for public (@api decorated)\n * properties in the lightning web component that's to be created within the modal\n * body. modalProperties holds all the data for the actual modal created by the \n * overlay library.\n */\n handleReviewDonations() {\n const donorRecordName = this.donor ? this.donor.fields.Name.value : '';\n const modalHeader = geLabelService.format(\n this.CUSTOM_LABELS.geHeaderMatchingReviewDonations,\n [donorRecordName]);\n const modalConfig = {\n componentProperties: {\n opportunities: deepClone(this.opportunities),\n dedicatedListenerEventName: this.dedicatedListenerEventName,\n selectedDonationId: this.hasSelectedDonation ? this.selectedDonation.Id : undefined\n },\n modalProperties: {\n cssClass: 'slds-modal_large',\n header: modalHeader,\n componentName: 'geDonationMatching',\n showCloseButton: true\n }\n };\n\n this.dispatchEvent(new CustomEvent('togglemodal', { detail: modalConfig }));\n }\n\n /*******************************************************************************\n * @description Method receives an event from the child geDonationMatching\n * component and sets the currently selected donation along with its type.\n * \n * @param {object} event: Custom Event object received from child component.\n */\n handleReceiveEvent(event) {\n if (event.detail.hasOwnProperty(PAYMENT)) {\n this.selectedDonation = event.detail.payment;\n this.donationType = PAYMENT;\n } else if (event.detail.hasOwnProperty(OPPORTUNITY)) {\n this.selectedDonation = event.detail.opportunity;\n this.donationType = OPPORTUNITY;\n } else {\n this.selectedDonation = this.donationType = undefined;\n }\n\n const detail = {\n selectedDonation: deepClone(this.selectedDonation),\n donationType: deepClone(this.donationType)\n }\n\n this.dispatchEvent(new CustomEvent('changeselecteddonation', { detail }));\n }\n\n /*******************************************************************************\n * @description Method generates a record detail page url based on the currently\n * selected donor (Account or Contact) and either opens a new tab or a new window\n * depending on the user's browser settings.\n */\n navigateToRecord() {\n this[NavigationMixin.GenerateUrl]({\n type: 'standard__recordPage',\n attributes: {\n recordId: this.selectedDonation.Id,\n actionName: 'view',\n },\n })\n .then(url => {\n window.open(url, '_blank');\n })\n .catch(error => {\n handleError(error);\n });\n }\n\n @api\n resetDonationType() {\n this.donationType = undefined;\n }\n}"
},
{
"alpha_fraction": 0.7561655044555664,
"alphanum_fraction": 0.7611815333366394,
"avg_line_length": 52.96992492675781,
"blob_id": "a176759ce06f64358a1e6602f794747b0d20577e",
"content_id": "e1e2de784975f5b9695593d29ab7e415849ba23b",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7177,
"license_type": "permissive",
"max_line_length": 150,
"num_lines": 133,
"path": "/robot/Cumulus/resources/ObjectMangerPageObject.py",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "import time\nfrom cumulusci.robotframework.utils import capture_screenshot_on_error\nfrom cumulusci.robotframework.pageobjects import BasePage\nfrom cumulusci.robotframework.pageobjects import pageobject\nfrom selenium.webdriver.common.keys import Keys\nfrom BaseObjects import BaseNPSPPage\nfrom NPSP import npsp_lex_locators\nfrom logging import exception\n\n\n@pageobject(\"Custom\", \"ObjectManager\")\nclass ObjectManagerPage(BaseNPSPPage, BasePage):\n\t\n\t@capture_screenshot_on_error\n\tdef open_fields_and_relationships(self, object_name):\n\t\t\"\"\"To go to object manager page for a specific object\"\"\"\n\t\turl_template = \"{root}/lightning/setup/ObjectManager/home\"\n\t\turl = url_template.format(root=self.cumulusci.org.lightning_base_url, object=object_name)\n\t\tself.selenium.go_to(url)\n\t\tsearch_button = npsp_lex_locators['object_manager']['input'].format(\"globalQuickfind\")\n\t\tself.salesforce.wait_until_loading_is_complete()\n\t\tself.selenium.wait_until_page_contains_element(search_button)\n\t\tself.selenium.get_webelement(search_button).send_keys(object_name)\n\t\tself.selenium.get_webelement(search_button).send_keys(Keys.ENTER)\n\t\tobject = npsp_lex_locators['object_manager']['object_result'].format(object_name)\n\t\tleftnavoption = npsp_lex_locators['link-text'].format(\"Fields & Relationships\")\n\t\tself.selenium.wait_until_page_contains_element(object)\n\t\tself.selenium.click_element(object)\n\t\tself.selenium.wait_until_location_contains(\"Details/view\", timeout=90)\n\t\tself.selenium.click_element(leftnavoption)\n\t\t\t\n\t@capture_screenshot_on_error\n\tdef create_currency_field(self,field_name):\n\t\t\"\"\"Creates a currency field by taking in the field name\"\"\"\n\t\tcurrency_locator=npsp_lex_locators['object_manager']['input'].format(\"dtypeC\")\n\t\tnext_button=npsp_lex_locators['object_manager']['button'].format(\"Next\")\n\t\tsave_button=npsp_lex_locators['object_manager']['button'].format(\"Save\")\n\t\tself.selenium.wait_until_page_contains_element(currency_locator,timeout=60)\n\t\tself.selenium.click_element(currency_locator)\n\t\ttime.sleep(1)\n\t\tself.selenium.click_element(next_button)\n\t\tself.salesforce.populate_field('Field Label', field_name)\n\t\tself.salesforce.populate_field('Length', '16')\n\t\tself.salesforce.populate_field('Decimal Places', '2')\n\t\tself.salesforce.populate_field('Description', \"This is a custom field generated during automation\")\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(save_button)\n\t\tself.selenium.wait_until_location_contains(\"FieldsAndRelationships/view\", timeout=90, message=\"Fields And Relationships page did not load in 1 min\")\n\t\t\n\t@capture_screenshot_on_error\n\tdef create_formula_field(self,field_name,formula):\n\t\t\"\"\" Creates a formula field by providing the field_name, formula and forumla fields\"\"\"\n\t\tformula_locator = npsp_lex_locators['object_manager']['input'].format(\"dtypeZ\")\n\t\tnext_button = npsp_lex_locators['object_manager']['button'].format(\"Next\")\n\t\tsave_button = npsp_lex_locators['object_manager']['button'].format(\"Save\")\n\t\tcheckbox_option = npsp_lex_locators['object_manager']['input'].format(\"fdtypeB\")\n\t\tformula_txtarea = npsp_lex_locators['object_manager']['formula_txtarea'].format(\"CalculatedFormula\")\n\t\tcheck_syntax = npsp_lex_locators['object_manager']['button'].format(\"Check Syntax\")\n\t\t\n\t\tself.selenium.wait_until_page_contains_element(formula_locator,60)\n\t\tself.selenium.click_element(formula_locator)\n\t\ttime.sleep(1)\n\t\tself.selenium.click_element(next_button)\n\t\tself.salesforce.populate_field('Field Label', field_name)\n\t\tself.selenium.wait_until_page_contains_element(checkbox_option,60)\n\t\tself.selenium.click_element(checkbox_option)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.wait_until_page_contains_element(formula_txtarea,60)\n\t\tself.selenium.get_webelement(formula_txtarea).send_keys(formula)\n\t\tself.selenium.click_element(check_syntax)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(save_button)\n\t\tself.selenium.wait_until_location_contains(\"FieldsAndRelationships/view\", timeout=90,\n\t\t\t\t\t\t\t\t\t\t\t\t message=\"Detail page did not load in 1 min\")\n\t\t\n\tdef create_lookup_field(self,field_name, related):\n\t\t\"\"\"Creates a Lookpup field by taking in the inputs field_name and related field\"\"\"\n\t\tlookup_locator = npsp_lex_locators['object_manager']['input'].format(\"dtypeY\")\n\t\tnext_button = npsp_lex_locators['object_manager']['button'].format(\"Next\")\n\t\tsave_button = npsp_lex_locators['object_manager']['button'].format(\"Save\")\n\t\toption = npsp_lex_locators['object_manager']['select_related_option'].format(related)\n\t\tfield_label = npsp_lex_locators['object_manager']['input'].format(\"MasterLabel\")\n\t\trelated = npsp_lex_locators['object_manager']['select_related'].format(\"DomainEnumOrId\")\n\t\tself.selenium.wait_until_page_contains_element(lookup_locator,60)\n\t\tself.selenium.click_element(lookup_locator)\n\t\ttime.sleep(1)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.wait_until_page_contains_element(related,60)\n\t\tself.selenium.scroll_element_into_view(related)\n\t\tself.selenium.get_webelement(related).click()\n\t\tself.selenium.click_element(option)\n\t\ttime.sleep(2)\n\t\tself.selenium.click_element(next_button)\n\t\tself.salesforce.populate_field('Field Label', field_name)\n\t\tself.salesforce.populate_field('Description', \"This is a custom field generated during automation\")\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(next_button)\n\t\tself.selenium.click_element(save_button)\n\t\tself.selenium.wait_until_location_contains(\"FieldsAndRelationships/view\", timeout=90,\n\t\t\t\t\t\t\t\t\t\t\t\t message=\"Detail page did not load in 1 min\")\n\t\n\t@capture_screenshot_on_error\n\tdef create_custom_field(self, type, field_name, related_to=None, formula=None):\n\t\t\"\"\"Ensure that the custom field does not exist prior and Creates a custom field based on type paramenter and the field_name\n\t\t IF the custom field exists it will not create the custom field and exits out of object manager\n\t\t \"\"\"\n\t\tsearch_button = npsp_lex_locators['object_manager']['input'].format(\"globalQuickfind\")\n\t\tself.selenium.wait_until_page_contains_element(search_button,60)\n\t\tself.selenium.get_webelement(search_button).send_keys(field_name)\n\t\tself.selenium.get_webelement(search_button).send_keys(Keys.ENTER)\n\t\ttime.sleep(1)\n\t\tself.builtin.log(formula)\n\t\tself.salesforce.wait_until_loading_is_complete()\n\t\tsearch_results = npsp_lex_locators['object_manager']['search_result'].format(field_name)\n\t\tcount = len(self.selenium.get_webelements(search_results))\n\t\tif count == 1:\n\t\t\treturn\n\t\telse:\n\t\t\tlocator = npsp_lex_locators['button-with-text'].format(\"New\")\n\t\t\tself.selenium.wait_until_page_contains_element(locator,60)\n\t\t\tself.selenium.get_webelement(locator).click()\n\t\t\tself.salesforce.wait_until_loading_is_complete()\n\t\t\tself.npsp.wait_for_locator('frame_new', 'vfFrameId', 'vfFrameId')\n\t\t\tself.npsp.choose_frame('vfFrameId')\n\t\t\tif type.lower() == 'lookup':\n\t\t\t\tself.create_lookup_field(field_name,related_to)\n\t\t\telif type.lower() == 'currency':\n\t\t\t\tself.create_currency_field(field_name)\n\t\t\telif type.lower() == 'formula':\n\t\t\t\tself.create_formula_field(field_name,formula)"
},
{
"alpha_fraction": 0.6423237919807434,
"alphanum_fraction": 0.645966112613678,
"avg_line_length": 55.04081726074219,
"blob_id": "01ee1489a5cca4274a06bab77412069f334ba3a9",
"content_id": "bef664e424de5633ac0620770fff02ad28edf4c1",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5491,
"license_type": "permissive",
"max_line_length": 155,
"num_lines": 98,
"path": "/robot/Cumulus/resources/RecurringDonationsPageObject.py",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "from cumulusci.robotframework.pageobjects import ListingPage\nfrom cumulusci.robotframework.pageobjects import DetailPage\nfrom cumulusci.robotframework.pageobjects import pageobject\nfrom cumulusci.robotframework.utils import capture_screenshot_on_error\nfrom BaseObjects import BaseNPSPPage\nfrom NPSP import npsp_lex_locators\nfrom datetime import datetime\nfrom dateutil.relativedelta import relativedelta\n\n@pageobject(\"Listing\", \"npe03__Recurring_Donation__c\")\nclass RDListingPage(BaseNPSPPage, ListingPage):\n object_name = \"npe03__Recurring_Donation__c\"\n\n\n\n@pageobject(\"Details\", \"npe03__Recurring_Donation__c\")\nclass RDDetailPage(BaseNPSPPage,DetailPage ):\n object_name = \"npe03__Recurring_Donation__c\"\n \n def _is_current_page(self):\n \"\"\" Verify we are on the Account detail page\n by verifying that the url contains '/view'\n \"\"\"\n self.selenium.wait_until_location_contains(\"/view\", timeout=60, message=\"Record view did not open in 1 min\")\n self.selenium.location_should_contain(\"/lightning/r/npe03__Recurring_Donation__c/\",message=\"Current page is not a Recurring Donations record view\")\n \n def refresh_opportunities(self):\n \"\"\"Clicks on more actions dropdown and click the given title\"\"\"\n locator=npsp_lex_locators['link-contains'].format(\"more actions\")\n self.selenium.click_element(locator)\n self.selenium.wait_until_page_contains(\"Refresh Opportunities\")\n link_locator=npsp_lex_locators['link'].format('Refresh_Opportunities','Refresh_Opportunities')\n \n def click_actions_button(self,button_name):\n \"\"\"Clicks on action button based on API version\"\"\"\n if self.npsp.latest_api_version == 47.0:\n self.selenium.click_link(button_name)\n else:\n self.selenium.click_button(button_name)\n \n @capture_screenshot_on_error\n def validate_field_values_under_section(self, section=None, **kwargs):\n \"\"\"Based on the section name , navigates to the sections and validates the key. value pair values passed in kwargs.\n If the section is current schedule, waits for the Current schedule section card on the side bar\n Validates the display fields in the card match with the values passed in the key value pair\"\"\"\n \n if section == \"Current Schedule\":\n active_schedule_card = npsp_lex_locators[\"erd\"][\"active_schedules_card\"].format(section)\n number_fields = ['Amount','Installment Frequency']\n date_fields = ['Effective Date']\n self.selenium.wait_until_element_is_visible(active_schedule_card,60)\n for label, value in kwargs.items():\n if label in number_fields:\n locator = npsp_lex_locators[\"erd\"][\"formatted_number\"].format(label)\n actual_value=self.selenium.get_webelement(locator).text\n elif label in date_fields:\n locator = npsp_lex_locators[\"erd\"][\"formatted_date\"].format(label)\n actual_value=self.selenium.get_webelement(locator).text\n else:\n locator = npsp_lex_locators[\"erd\"][\"formatted_text\"].format(label)\n actual_value=self.selenium.get_webelement(locator).text\n \n if self.npsp.check_if_element_exists(locator):\n print(f\"element exists {locator}\")\n actual_value=self.selenium.get_webelement(locator).text\n print(f\"actual value is {actual_value}\")\n self.builtin.log(f\"actual value is {actual_value}\")\n assert value == actual_value, \"Expected {} value to be {} but found {}\".format(label,value, actual_value)\n else:\n self.builtin.log(\"element Not found\")\n else:\n for label, value in kwargs.items():\n self.npsp.navigate_to_and_validate_field_value(label, \"contains\", value, section)\n \n \n \n @capture_screenshot_on_error\n def validate_upcoming_schedules(self, num_payments,startdate,dayofmonth):\n \"\"\"Takes in the parameter (number of payments) and the donation start date\n verifies that the payment schedules created on UI reflect the total number\n verifies that the next payment dates are reflected correctly for all the schedules\"\"\"\n \n installmentrow = npsp_lex_locators[\"erd\"][\"installment_row\"]\n installments = self.selenium.get_webelements(installmentrow)\n count = len(installments)\n print(f\"Number of installments created is {count}\")\n assert count == int(num_payments), \"Expected installments to be {} but found {}\".format(num_payments, count)\n if count == int(num_payments):\n i = 1\n while i < count:\n datefield = npsp_lex_locators[\"erd\"][\"installment_date\"].format(i)\n installment_date = self.selenium.get_webelement(datefield)\n date_object = datetime.strptime(startdate, '%m/%d/%Y').date()\n expected_date = (date_object+relativedelta(months=+i)).replace(day=int(dayofmonth))\n actual_date=self.selenium.get_webelement(installment_date).text\n formatted_actual = datetime.strptime(actual_date, '%m/%d/%Y').date()\n assert formatted_actual == expected_date, \"Expected date to be {} but found {}\".format(expected_date,formatted_actual)\n i=i+1"
},
{
"alpha_fraction": 0.5769299268722534,
"alphanum_fraction": 0.5799013376235962,
"avg_line_length": 40.7568244934082,
"blob_id": "a7316c990b0dc06012cb2a9e23ca643f48b7e574",
"content_id": "5297da710f4d1be2509c2cc0efdb068787773a43",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 16827,
"license_type": "permissive",
"max_line_length": 136,
"num_lines": 403,
"path": "/src/lwc/bdiObjectMappings/bdiObjectMappings.js",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "import { LightningElement, track, api } from 'lwc';\nimport { ShowToastEvent } from 'lightning/platformShowToastEvent';\nimport getObjectMappings from '@salesforce/apex/BDI_ManageAdvancedMappingCtrl.getObjectMappings';\nimport getObjectOptions from '@salesforce/apex/BDI_ManageAdvancedMappingCtrl.getObjectOptions';\nimport createDataImportObjectMapping\n from '@salesforce/apex/BDI_ManageAdvancedMappingCtrl.createDataImportObjectMapping';\nimport { registerListener, unregisterAllListeners, fireEvent} from 'c/pubsubNoPageRef';\nimport getNamespaceWrapper\n from '@salesforce/apex/BDI_ManageAdvancedMappingCtrl.getNamespaceWrapper';\n\nimport stgUnknownError from '@salesforce/label/c.stgUnknownError';\nimport bdiOMUIChildParentLabel from '@salesforce/label/c.bdiOMUIChildParentLabel';\nimport bdiOMUIGroupNameLabel from '@salesforce/label/c.bdiOMUIGroupNameLabel';\nimport bdiOMUIImportStatusLabel from '@salesforce/label/c.bdiOMUIImportStatusLabel';\nimport bdiOMUILinkToRecordLabel from '@salesforce/label/c.bdiOMUILinkToRecordLabel';\nimport bdiOMUIObjectNameLabel from '@salesforce/label/c.bdiOMUIObjectNameLabel';\nimport bdiOMUIOfGroupLabel from '@salesforce/label/c.bdiOMUIOfGroupLabel';\nimport bdiOMUIThroughFieldLabel from '@salesforce/label/c.bdiOMUIThroughFieldLabel';\nimport bdiFMUISuccessful from '@salesforce/label/c.bdiFMUISuccessful';\nimport bdiFMUIUnsuccessful from '@salesforce/label/c.bdiFMUIUnsuccessful';\nimport bdiFMUIUpdate from '@salesforce/label/c.bdiFMUIUpdate';\nimport bdiFMUITryAgain from '@salesforce/label/c.bdiFMUITryAgain';\n\nimport bdiAdvancedMapping from '@salesforce/label/c.bdiAdvancedMapping';\nimport bdiOMUICreateNewObjectGroup from '@salesforce/label/c.bdiOMUICreateNewObjectGroup';\nimport bdiOMUIObjectGroupsTitle from '@salesforce/label/c.bdiOMUIObjectGroupsTitle';\nimport bdiOMUIPageDescriptionPt1 from '@salesforce/label/c.bdiOMUIPageDescriptionPt1';\nimport bdiOMUIPageDescriptionPt2 from '@salesforce/label/c.bdiOMUIPageDescriptionPt2';\nimport bdiOMUIPageDescriptionPt3 from '@salesforce/label/c.bdiOMUIPageDescriptionPt3';\nimport bdiOMUIViewFieldMappingsLabel from '@salesforce/label/c.bdiOMUIViewFieldMappingsLabel';\nimport bgeActionDelete from '@salesforce/label/c.bgeActionDelete';\nimport stgBtnEdit from '@salesforce/label/c.stgBtnEdit';\nimport stgHelpAdvancedMapping3 from '@salesforce/label/c.stgHelpAdvancedMapping3';\n\nimport bdiOMUILongDeployment from '@salesforce/label/c.bdiOMUILongDeployment';\nimport bdiFMUILongDeploymentLink from '@salesforce/label/c.bdiFMUILongDeploymentLink';\nimport bdiFMUILongDeploymentMessage from '@salesforce/label/c.bdiFMUILongDeploymentMessage';\n\nexport default class bdiObjectMappings extends LightningElement {\n @track displayObjectMappings = true;\n @track isLoading = true;\n @track isModalOpen = false;\n @track columns = [];\n @track objectMappings;\n\n @api objectMapping;\n @api objectOptions;\n @api shouldRender;\n\n\n @track npspSettingsURL = '/lightning/n/npsp__NPSP_Settings'; \n\n deploymentTimer;\n deploymentTimeout = 10000;\n\n diObjectMappingSetDevName;\n npspNS;\n namespace;\n namespaceWrapper;\n \n customLabels = {\n bdiOMUIChildParentLabel,\n bdiOMUIGroupNameLabel,\n bdiOMUIImportStatusLabel,\n bdiOMUILinkToRecordLabel,\n bdiOMUIObjectNameLabel,\n bdiOMUIOfGroupLabel,\n bdiOMUIThroughFieldLabel,\n bdiFMUISuccessful,\n bdiFMUIUnsuccessful,\n bdiFMUITryAgain,\n bdiAdvancedMapping,\n bdiOMUICreateNewObjectGroup,\n bdiOMUIObjectGroupsTitle,\n bdiOMUIPageDescriptionPt1,\n bdiOMUIPageDescriptionPt2,\n bdiOMUIPageDescriptionPt3,\n bdiOMUIViewFieldMappingsLabel,\n bgeActionDelete,\n stgBtnEdit,\n stgHelpAdvancedMapping3,\n bdiOMUILongDeployment,\n bdiFMUILongDeploymentLink,\n bdiFMUILongDeploymentMessage\n };\n\n constructor() {\n super();\n this.columns =[\n {label: this.customLabels.bdiOMUIGroupNameLabel, fieldName: 'MasterLabel', type: 'text'},\n {label: this.customLabels.bdiOMUIObjectNameLabel, fieldName: 'Object_API_Name', type: 'text'},\n {label: this.customLabels.bdiOMUIChildParentLabel, fieldName: 'Relationship_To_Predecessor', type: 'text', fixedWidth: 150},\n {label: this.customLabels.bdiOMUIOfGroupLabel, fieldName: 'Predecessor_Label_Name', type: 'text'},\n {label: this.customLabels.bdiOMUIThroughFieldLabel, fieldName: 'Relationship_Field', type: 'text'},\n {label: this.customLabels.bdiOMUILinkToRecordLabel, fieldName: 'Imported_Record_Field_Name', type: 'text'},\n {label: this.customLabels.bdiOMUIImportStatusLabel, fieldName: 'Imported_Record_Status_Field_Name', type: 'text'},\n {type: 'action', typeAttributes: { rowActions: this.getRowActions }}];\n }\n\n /*******************************************************************************\n * @description Called when the component is first loaded to set up listeners and \n * prepare data.\n */\n connectedCallback() {\n registerListener('showobjectmappings', this.handleShowObjectMappings, this);\n registerListener('showfieldmappings', this.handleShowFieldMappings, this);\n registerListener('deploymentResponse', this.handleDeploymentResponse, this);\n registerListener('startDeploymentTimeout', this.handleDeploymentTimeout, this);\n registerListener('refresh', this.refresh, this);\n\n this.retrieveObjectMappings();\n this.retrieveObjectOptions();\n this.getPackageNamespace();\n }\n\n /*******************************************************************************\n * @description Called when the component is unloaded to unregister event listeners.\n */\n disconnectedCallback() {\n unregisterAllListeners(this);\n }\n\n /*******************************************************************************\n * @description retrieves the namespace prefix\n */\n getPackageNamespace() {\n getNamespaceWrapper()\n .then((data) => {\n this.namespaceWrapper = data;\n this.namespace = data.currentNamespace;\n this.npspNS = data.npspNamespace;\n //if we are not in a namespaced npsp org then remove the prefix from\n //the page url.\n if (this.namespace !== this.npspNS) {\n let newPrefix;\n\n if (this.namespace) {\n newPrefix = this.namespace + '__';\n } else {\n newPrefix = '';\n }\n\n this.npspSettingsURL = this.npspSettingsURL.replace(this.npspNS +'__',newPrefix);\n }\n })\n .catch((error) => {\n this.handleError(error);\n }); \n }\n\n /*******************************************************************************\n * @description Refreshes object mappings data. Usually called after save/delete.\n */\n @api\n refresh() {\n if (this.displayObjectMappings) {\n this.isLoading = true;\n this.retrieveObjectMappings();\n }\n }\n\n /*******************************************************************************\n * @description Call apex method 'getObjectMappings' to get\n * a list of all non-deleted object mappings\n */\n retrieveObjectMappings() {\n getObjectMappings()\n .then((data) => {\n this.objectMappings = data;\n this.diObjectMappingSetDevName = this.objectMappings[0].Data_Import_Object_Mapping_Set_Dev_Name;\n this.isLoading = false;\n })\n .catch((error) => {\n this.isLoading = false;\n this.handleError(error);\n });\n }\n\n /*******************************************************************************\n * @description Call apex method 'getObjectOptions' to get\n * a list of all objects that will be valid for creating object mappings on.\n */\n retrieveObjectOptions() {\n getObjectOptions()\n .then(result => {\n this.objectOptions = result;\n })\n .catch(error => {\n this.error = error;\n this.handleError(error);\n });\n }\n \n /*******************************************************************************\n * @description shows the object mappings component and refreshes the data\n */\n handleShowObjectMappings() {\n this.displayObjectMappings = true;\n this.refresh();\n }\n\n /*******************************************************************************\n * @description Shows the field mappings component and passes in the selected\n * object mapping.\n */\n handleShowFieldMappings(event) {\n this.objectMapping = event.objectMapping;\n this.displayObjectMappings = false;\n }\n\n /*******************************************************************************\n * @description Opens the object mapping modal passing in the relevant details\n */\n handleOpenModal() {\n if (this.displayObjectMappings) {\n fireEvent(this.pageRef, 'openModal', \n { objectMapping: null, row: undefined });\n }\n }\n\n /*******************************************************************************\n * @description Action handler for datatable row actions (i.e. edit, delete)\n *\n * @param event: Event containing row details of the action\n */\n handleRowAction(event) {\n const actionName = event.detail.action.name;\n const row = event.detail.row;\n let rowString;\n switch (actionName) {\n case 'goToFieldMappings':\n fireEvent(this.pageRef,'showfieldmappings', {objectMapping:row});\n break;\n\n case 'delete':\n this.isLoading = true;\n \n row.Is_Deleted = true;\n row.Data_Import_Object_Mapping_Set = this.diObjectMappingSetDevName;\n\n rowString = JSON.stringify(row);\n\n createDataImportObjectMapping({objectMappingString: rowString})\n .then((deploymentId) => {\n this.handleDeleteDeploymentId(deploymentId);\n })\n .catch((error) => {\n this.isLoading = false;\n this.showToast(\n 'Error',\n '{0}. {1}. {2}.',\n 'error',\n 'sticky',\n [error.body.exceptionType, error.body.message, error.body.stackTrace]);\n });\n break;\n\n case 'edit':\n fireEvent(this.pageRef,'openModal', {\n row: row });\n break;\n\n default:\n }\n }\n\n /*******************************************************************************\n * @description Dynamically gets the appropriate row actions depending on whether \n * it is a core object mapping.\n */\n getRowActions(row, doneCallback) {\n\n const actions = [\n { label: bdiOMUIViewFieldMappingsLabel, name: 'goToFieldMappings' }\n ];\n\n if (row.Relationship_To_Predecessor !== 'No Predecessor'\n && row.MasterLabel !== 'Opportunity Contact Role 1' \n && row.MasterLabel !== 'Opportunity Contact Role 2' \n && row.MasterLabel !== 'GAU Allocation 1' \n && row.MasterLabel !== 'GAU Allocation 2') {\n actions.push({ label: stgBtnEdit, name: 'edit' });\n actions.push({ label: bgeActionDelete, name: 'delete' });\n }\n\n setTimeout(() => {\n doneCallback(actions); \n }, 0);\n }\n \n /*******************************************************************************\n * @description Handles the timeout toast of deployments whenever a deployment\n * that's registered with platformEventListener takes 10 seconds or longer to\n * send out a response.\n */\n handleDeploymentTimeout(event) {\n if (this.displayObjectMappings) {\n let that = this;\n this.deploymentTimer = setTimeout(function() {\n that.isLoading = false;\n fireEvent(this.pageRef, 'closeModal', {});\n\n let url =\n '/lightning/setup/DeployStatus/page?' +\n 'address=%2Fchangemgmt%2FmonitorDeploymentsDetails.apexp%3FasyncId%3D' +\n event.deploymentId +\n '%26retURL%3D%252Fchangemgmt%252FmonitorDeployment.apexp';\n\n that.showToast(\n bdiOMUILongDeployment,\n bdiFMUILongDeploymentMessage + ' {0}',\n 'warning',\n 'sticky',\n [{url, label: bdiFMUILongDeploymentLink}]);\n }, this.deploymentTimeout, that);\n }\n } \n\n /*******************************************************************************\n * @description Listens for an event from the platformEventListener component.\n * Upon receiving an event refreshes the field mappings records, closes the modal,\n * and creates a toast.\n *\n * @param {object} platformEvent: Object containing the platform event payload\n */\n handleDeploymentResponse(platformEvent) {\n if (this.displayObjectMappings) {\n clearTimeout(this.deploymentTimer);\n fireEvent(this.pageRef, 'refresh', {});\n fireEvent(this.pageRef, 'closeModal', {});\n\n const payload = platformEvent.response.data.payload;\n const status = payload.Status__c || payload.npsp__Status__c;\n\n const successful = bdiFMUISuccessful.charAt(0).toUpperCase() + bdiFMUISuccessful.slice(1);\n const unsuccessful = bdiFMUIUnsuccessful.charAt(0).toUpperCase() + bdiFMUIUnsuccessful.slice(1);\n const successMessage = `${successful} ${bdiOMUIObjectGroupsTitle} ${bdiFMUIUpdate}.`;\n const failMessage = `${unsuccessful} ${bdiOMUIObjectGroupsTitle} ${bdiFMUIUpdate}. ${bdiFMUITryAgain}.`;\n const succeeded = status === 'Succeeded';\n \n this.showToast(\n `${succeeded ? successMessage : failMessage}`,\n '',\n succeeded ? 'success' : 'error');\n }\n }\n /*******************************************************************************\n * @description Creates and dispatches a CustomEvent 'deployment' for deletion\n * letting the platformEventListener know that we have an id to register and monitor.\n * After dispatching the CustomEvent, start the deployment timeout.\n *\n * @param {string} deploymentId: Custom Metadata Deployment Id\n */\n handleDeleteDeploymentId(deploymentId) {\n if (this.displayObjectMappings) {\n const deploymentEvent = new CustomEvent('deployment', {\n bubbles: true,\n composed: true,\n detail: {deploymentId}\n });\n this.dispatchEvent(deploymentEvent);\n\n this.handleDeploymentTimeout({ deploymentId: deploymentId });\n }\n }\n\n /*******************************************************************************\n * @description Creates and dispatches a ShowToastEvent\n *\n * @param {string} title: Title of the toast, dispalyed as a heading.\n * @param {string} message: Message of the toast. It can contain placeholders in\n * the form of {0} ... {N}. The placeholders are replaced with the links from\n * messageData param\n * @param {string} mode: Mode of the toast\n * @param {array} messageData: List of values that replace the {index} placeholders\n * in the message param\n */\n showToast(title, message, variant, mode, messageData) {\n const event = new ShowToastEvent({\n title: title,\n message: message,\n variant: variant,\n mode: mode,\n messageData: messageData\n });\n this.dispatchEvent(event);\n }\n\n /*******************************************************************************\n * @description Creates and dispatches an error toast\n *\n * @param {object} error: Event holding error details\n */\n handleError(error) {\n if (error && error.status && error.body) {\n this.showToast(`${error.status} ${error.statusText}`, error.body.message, 'error', 'sticky');\n } else if (error && error.name && error.message) {\n this.showToast(`${error.name}`, error.message, 'error', 'sticky');\n } else {\n this.showToast(stgUnknownError, '', 'error', 'sticky');\n }\n }\n}"
},
{
"alpha_fraction": 0.5747800469398499,
"alphanum_fraction": 0.5835776925086975,
"avg_line_length": 27.41666603088379,
"blob_id": "1112cacb200f7b918deff0116f5f9abfed33dd42",
"content_id": "aa42d8c037438ab183ad7acbdb14d777c675dc17",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 341,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 12,
"path": "/tasks/multicurrency.py",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "from cumulusci.tasks.salesforce import BaseSalesforceApiTask\n\nclass ConfigureCAD(BaseSalesforceApiTask):\n\n def _run_task(self):\n self.sf.CurrencyType.create({\n 'IsoCode': 'CAD',\n 'IsCorporate': False,\n 'IsActive': True,\n 'DecimalPlaces': 2,\n 'ConversionRate': 1.3,\n })\n"
},
{
"alpha_fraction": 0.5510355830192566,
"alphanum_fraction": 0.5523358583450317,
"avg_line_length": 32.44099426269531,
"blob_id": "0e7bfe7c151a7e4ca5bcc1bee2932fa2280d4e98",
"content_id": "ed9476d963f7cc67f605bee88b3fca57dc2c45e7",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 10767,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 322,
"path": "/src/lwc/geBatchGiftEntryTable/geBatchGiftEntryTable.js",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "import {LightningElement, api, track} from 'lwc';\nimport getDataImportModel\n from '@salesforce/apex/BGE_DataImportBatchEntry_CTRL.getDataImportModel';\nimport getDataImportRows\n from '@salesforce/apex/BGE_DataImportBatchEntry_CTRL.getDataImportRows';\nimport GeFormService from 'c/geFormService';\nimport STATUS_FIELD from '@salesforce/schema/DataImport__c.Status__c';\nimport FAILURE_INFORMATION_FIELD\n from '@salesforce/schema/DataImport__c.FailureInformation__c';\nimport DONATION_AMOUNT from '@salesforce/schema/DataImport__c.Donation_Amount__c';\nimport {deleteRecord} from 'lightning/uiRecordApi';\nimport {handleError} from 'c/utilTemplateBuilder';\nimport runBatchDryRun from '@salesforce/apex/BGE_DataImportBatchEntry_CTRL.runBatchDryRun';\nimport geDonorColumnLabel from '@salesforce/label/c.geDonorColumnLabel';\nimport geDonationColumnLabel from '@salesforce/label/c.geDonationColumnLabel';\nimport bgeActionDelete from '@salesforce/label/c.bgeActionDelete';\nimport geBatchGiftsCount from '@salesforce/label/c.geBatchGiftsCount';\nimport geBatchGiftsTotal from '@salesforce/label/c.geBatchGiftsTotal';\nimport commonOpen from '@salesforce/label/c.commonOpen';\nimport { isNotEmpty, isUndefined } from 'c/utilCommon';\n\nexport default class GeBatchGiftEntryTable extends LightningElement {\n @api batchId;\n @track ready = false;\n\n _batchLoaded = false;\n @track data = [];\n @track hasData;\n\n _columnsLoaded = false;\n _columns = [\n {label: 'Status', fieldName: STATUS_FIELD.fieldApiName, type: 'text'},\n {label: 'Errors', fieldName: FAILURE_INFORMATION_FIELD.fieldApiName, type: 'text'},\n {\n label: geDonorColumnLabel, fieldName: 'donorLink', type: 'url',\n typeAttributes: {label: {fieldName: 'donorName'}}\n },\n {\n label: geDonationColumnLabel, fieldName: 'matchedRecordUrl', type: 'url',\n typeAttributes: {label: {fieldName: 'matchedRecordLabel'}}\n }\n ];\n _actionsColumn = {\n type: 'action',\n typeAttributes: {\n rowActions: [\n {label: commonOpen, name: 'open'},\n {label: bgeActionDelete, name: 'delete'}\n ],\n menuAlignment: 'auto'\n }\n };\n _columnsBySourceFieldApiName = {};\n\n\n @api title;\n @api total;\n @api expectedTotal;\n @api count;\n @api expectedCount;\n @api userDefinedBatchTableColumnNames;\n @track isLoaded = true;\n\n @api\n handleSectionsRetrieved(sections) {\n if (!this._batchLoaded) {\n this.loadBatch(sections);\n }\n }\n\n setReady() {\n this.ready = this._columnsLoaded && this._batchLoaded;\n }\n\n loadBatch(sections) {\n getDataImportModel({batchId: this.batchId})\n .then(\n response => {\n const dataImportModel = JSON.parse(response);\n this.setTableProperties(dataImportModel);\n this.buildColumnsFromSections(sections);\n this.batchLoaded();\n }\n )\n .catch(\n error => {\n handleError(error);\n }\n );\n }\n\n batchLoaded() {\n this._batchLoaded = true;\n this.setReady();\n }\n\n setTableProperties(dataImportModel) {\n this._count = dataImportModel.totalCountOfRows;\n this._total = dataImportModel.totalRowAmount;\n dataImportModel.dataImportRows.forEach(row => {\n this.data.push(Object.assign(row, row.record));\n });\n this.data = [...this.data];\n this.hasData = this.data.length > 0 ? true : false;\n }\n\n get columns() {\n if (!this._columnsLoaded) return [];\n if (this._columnsLoaded) return [...this.computedColumns, this._actionsColumn];\n }\n\n get computedColumns() {\n const hasUserDefinedColumns =\n this.userDefinedBatchTableColumnNames && this.userDefinedBatchTableColumnNames.length > 0;\n if (hasUserDefinedColumns) {\n return this.getUserDefinedColumns();\n }\n\n return this.getAllColumns();\n }\n\n getAllColumns() {\n let allColumns = [];\n for (const columnValue in this._columnsBySourceFieldApiName) {\n allColumns.push(this._columnsBySourceFieldApiName[columnValue]);\n }\n return allColumns;\n }\n\n getUserDefinedColumns() {\n let userDefinedColumns = [];\n this.userDefinedBatchTableColumnNames.forEach(columnName => {\n if (isUndefined(this._columnsBySourceFieldApiName[columnName])) return;\n userDefinedColumns.push(this._columnsBySourceFieldApiName[columnName]);\n });\n return userDefinedColumns;\n }\n\n buildColumnsFromSections(sections) {\n this.addSpecialCasedColumns();\n if (!sections) return;\n\n sections.forEach(\n section => {\n section.elements\n .filter(e => e.elementType === 'field')\n .forEach(\n element => {\n const fieldWrapper = GeFormService.getFieldMappingWrapper(element.dataImportFieldMappingDevNames[0]);\n if (isNotEmpty(fieldWrapper)) {\n const column = {\n label: element.customLabel,\n fieldName: fieldWrapper.Source_Field_API_Name,\n type: GeFormService.getInputTypeFromDataType(\n element.dataType\n ) === 'date' ? 'date-local' :\n GeFormService.getInputTypeFromDataType(element.dataType)\n };\n\n this._columnsBySourceFieldApiName[column.fieldName] = column;\n }\n }\n );\n }\n );\n\n this.columnsLoaded();\n }\n\n /**\n * @description Adds special cased columns to the map of columns. These\n * four special cased fields are the Donor, Donation, Status,\n * Failure Information fields. Donor and Donation are derived\n * fields and constructed in the BGE_DataImportBatchEntry_CTRL\n * class. Status and Failure Information are fields on the\n * DataImport__c object.\n */\n addSpecialCasedColumns() {\n this._columnsBySourceFieldApiName[this._columns[0].fieldName] = this._columns[0];\n this._columnsBySourceFieldApiName[this._columns[1].fieldName] = this._columns[1];\n this._columnsBySourceFieldApiName[this._columns[2].fieldName] = this._columns[2];\n this._columnsBySourceFieldApiName[this._columns[3].fieldName] = this._columns[3];\n }\n\n columnsLoaded() {\n this._columnsLoaded = true;\n this.setReady();\n }\n\n @api\n upsertData(dataRow, idProperty) {\n const existingRowIndex = this.data.findIndex(row =>\n row[idProperty] === dataRow[idProperty]\n );\n\n if (existingRowIndex !== -1) {\n this.data.splice(existingRowIndex, 1, dataRow);\n this.data = [...this.data];\n } else {\n this.data = [dataRow, ...this.data];\n if (this.hasData == false) {\n this.hasData = true;\n }\n }\n }\n\n handleRowActions(event) {\n switch (event.detail.action.name) {\n case 'open':\n this.loadRow(event.detail.row);\n break;\n case 'delete':\n deleteRecord(event.detail.row.Id).then(() => {\n this.deleteDIRow(event.detail.row);\n }).catch(error => {\n handleError(error);\n }\n );\n break;\n }\n }\n\n deleteDIRow(rowToDelete) {\n const isRowToDelete = row => row.Id == rowToDelete.Id;\n const index = this.data.findIndex(isRowToDelete);\n this.data.splice(index, 1);\n this.data = [...this.data];\n this.dispatchEvent(new CustomEvent('delete', {\n detail: {\n amount: rowToDelete[DONATION_AMOUNT.fieldApiName]\n }\n }));\n }\n\n loadMoreData(event) {\n event.target.isLoading = true;\n const disableInfiniteLoading = function () {\n this.enableInfiniteLoading = false;\n }.bind(event.target);\n\n const disableIsLoading = function () {\n this.isLoading = false;\n }.bind(event.target);\n\n getDataImportRows({batchId: this.batchId, offset: this.data.length})\n .then(rows => {\n rows.forEach(row => {\n this.data.push(\n Object.assign(row, row.record)\n );\n }\n );\n this.data = [...this.data];\n if (this.data.length >= this.count) {\n disableInfiniteLoading();\n }\n disableIsLoading();\n })\n .catch(error => {\n handleError(error);\n });\n }\n\n @api\n runBatchDryRun(callback) {\n runBatchDryRun({\n batchId: this.batchId,\n numberOfRowsToReturn: this.data.length\n })\n .then(result => {\n const dataImportModel = JSON.parse(result);\n this._count = dataImportModel.totalCountOfRows;\n this._total = dataImportModel.totalRowAmount;\n dataImportModel.dataImportRows.forEach((row, idx) => {\n this.upsertData(\n Object.assign(row, row.record), 'Id');\n });\n })\n .catch(error => {\n handleError(error);\n })\n .finally(() => {\n callback();\n });\n }\n\n get geBatchGiftsCountLabel() {\n return geBatchGiftsCount;\n }\n\n get geBatchGiftsTotalLabel() {\n return geBatchGiftsTotal;\n }\n\n loadRow(row) {\n this.dispatchEvent(new CustomEvent('loaddata', {\n detail: row\n }));\n }\n\n /**\n * @description Internal setters used to communicate the current count and total\n * up to the App, which needs them to keep track of whether the batch's\n * expected totals match.\n */\n set _count(count) {\n this.dispatchEvent(new CustomEvent('countchanged', {\n detail: {\n value: count\n }\n }));\n }\n\n set _total(total) {\n this.dispatchEvent(new CustomEvent('totalchanged', {\n detail: {\n value: total\n }\n }));\n }\n\n}"
},
{
"alpha_fraction": 0.6537467837333679,
"alphanum_fraction": 0.6537467837333679,
"avg_line_length": 44.55882263183594,
"blob_id": "186f96ab362af79fcac9d2be0ac7534ccc6a1746",
"content_id": "71e1d34b4b1581791103da4b6fcd9728162519ee",
"detected_licenses": [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1548,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 34,
"path": "/src/lwc/utilCustomErrors/exceptionDataError.js",
"repo_name": "giveclarity/NPSP",
"src_encoding": "UTF-8",
"text": "class ExceptionDataError extends Error {\n\n /*******************************************************************************\n * @description Custom error class to transform any kind of apex exception\n * into something that the `handleCatchOnSave` method in geFormRenderer can\n * consume.\n *\n * @param {object} apexException: An exception of some kind received from apex\n */\n constructor(apexException) {\n super();\n if (apexException.body && apexException.body.exceptionType) {\n this.exceptionType = apexException.body.exceptionType;\n this.errorMessage = apexException.body.message;\n this.stackTrace = apexException.body.stackTrace;\n this.isUserDefinedException = apexException.body.isUserDefinedException;\n this.DMLErrorMessageMapping = {};\n this.DMLErrorFieldNameMapping = {};\n\n } else if (apexException.body && apexException.body.message) {\n // This looks like an instance of the apex wrapper class ERR_ExceptionData\n const apexExceptionWrapper = JSON.parse(apexException.body.message);\n this.exceptionType = apexExceptionWrapper.exceptionType;\n this.errorMessage = apexExceptionWrapper.errorMessage;\n this.DMLErrorMessageMapping = apexExceptionWrapper.DMLErrorMessageMapping;\n this.DMLErrorFieldNameMapping = apexExceptionWrapper.DMLErrorFieldNameMapping;\n\n } else {\n this.message = apexException;\n }\n }\n}\n\nexport default ExceptionDataError;"
}
] | 8 |
ESEGroup/Brasil | https://github.com/ESEGroup/Brasil | 8c64507f1748b92551fb12cda2c2583fba8db116 | 803ab0a1fc3922c21f9d7bb08c994380f986862c | 972985319be87409e2943920699deae99693a619 | refs/heads/master | 2021-01-12T10:42:41.193972 | 2016-12-20T02:19:08 | 2016-12-20T02:19:08 | 72,641,683 | 0 | 2 | null | 2016-11-02T13:19:14 | 2016-11-06T22:06:21 | 2016-11-09T23:56:00 | HTML | [
{
"alpha_fraction": 0.7215686440467834,
"alphanum_fraction": 0.7215686440467834,
"avg_line_length": 53.64285659790039,
"blob_id": "6de3f9df9d0ba24a051d736477b671ca9384b88c",
"content_id": "a58d845503bf29b9d1a87bd852fc2fb9748f8dc1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 765,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 14,
"path": "/app/models/__init__.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "#https://code.djangoproject.com/wiki/CookBookSplitModelsToFiles\nfrom .usuario import Usuario\nfrom .recurso import Recurso\nfrom .agendamento import Agendamento\nfrom .cadastro import Cadastro\nfrom .notificador import Notificador\nfrom .notificador_cadastro import NotificadorCadastro\nfrom .notificador_agendamento import NotificadorAgendamento\nfrom .busca import Busca, BuscaRecurso, BuscaUsuario\nfrom .cadastro_recurso import CadastroRecurso\nfrom .cadastro_usuario import CadastroUsuario\nfrom .cadastro_agendamento import CadastroAgendamento\nfrom .gerenciador import GerenciadorAgendamento\nfrom .settingsgroups import SettingsUserGroups\n"
},
{
"alpha_fraction": 0.5789473652839661,
"alphanum_fraction": 0.5926773548126221,
"avg_line_length": 18,
"blob_id": "fa13e82d09088c449e77054e846ad5cc529ca9e7",
"content_id": "937bbceb8baaa16d02761a3f1c4440eb223b0a63",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 437,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 23,
"path": "/app/models/cadastro.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\nclass Cadastro (models.Model):\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n\n acesso = None\n\n def cadastrar ():\n return False\n\n def atualizar ():\n return False\n\n def deletar ():\n return False\n\n def notificar ():\n return False\n\n class Meta:\n abstract = True\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.5592885613441467,
"alphanum_fraction": 0.5604743361473083,
"avg_line_length": 29.119047164916992,
"blob_id": "9c03a7e4efb0d0deeb9c88a1c22f12aa0cc71b3c",
"content_id": "f0a34a9d8565cc0d99750b580dff8ea989cc1418",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2536,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 84,
"path": "/app/models/busca.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom app.models import Recurso, Usuario\nimport json\n\nclass Busca (models.Model):\n\n acesso = None\n params = \"{}\"\n # {\n # 'type': 'match'/'complex',\n # (if type == match) 'id': integer id,\n # (if type == 'complex') 'texto' : queried text or '' (empty)\n # (if type == 'complex') 'categorias' : string array,\n # (if type == 'complex') 'enderecos' : string array,\n # (if type == 'complex') 'disponibilidades' : string array\n #}\n resultados_busca = \"{}\"\n lista_resultados = []\n\n def busca (json):\n return False\n\n def buscar ():\n return False\n\n class Meta:\n abstract = True\n managed = False\n app_label = 'app'\n\n\nclass BuscaRecurso (Busca):\n\n def buscar (self):\n\n if self.params == '{}':\n return None\n\n # creating dynamic object from json\n query = lambda:None\n query.__dict__ = json.loads(self.params)\n\n # returns single resource, if found\n if query.type == 'match':\n try:\n return Recurso.objects.get(patrimonio = query.id)\n except ObjectDoesNotExist:\n return \"DoesNotExist ERROR\"\n\n # returns list of resources\n elif query.type == 'complex':\n # Remember: querySets are lazy – the act of creating a QuerySet doesn’t involve any database activity.\n # You can stack filters together all day long, and Django won’t actually run the query until the QuerySet is evaluated.\n res = Recurso.objects.exclude(nome=\"\")\n if query.texto.strip():\n # this is actually an OR\n res = res.filter(nome__icontains=query.texto) | res.filter(descricao__icontains=query.texto)\n if len(query.categorias) > 0:\n res = res.filter(categoria__in=query.categorias)\n if len(query.enderecos) > 0:\n res = res.filter(endereco__in=query.enderecos)\n if len(query.disponibilidades) > 0:\n res = res.filter(estado__in=query.disponibilidades)\n try:\n return res\n except DoesNotExist:\n return \"DoesNotExist ERROR\"\n\n return None\n\n class Meta:\n managed = False\n app_label = 'app'\n\n\nclass BuscaUsuario (Busca):\n\n def buscar ():\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.4797728955745697,
"alphanum_fraction": 0.509226381778717,
"avg_line_length": 16.07878875732422,
"blob_id": "02c01cabcbfdd5b8a579b729811f55e3f15e1e73",
"content_id": "91e56f48cdcb13cfcc11fe2217326c6977fe73e8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 2841,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 165,
"path": "/static/javascript/fakeresources.js",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "fakeresources = [\n {\n name: \"res1\",\n department : \"dep1\",\n id: 1,\n status: \"disponível\",\n description: \"Alguma descrição do recurso res1\",\n category: \"A\",\n },\n {\n name: \"res2\",\n department : \"dep1\",\n id: 2,\n status: \"disponível\",\n description: \"desc\",\n category: \"A\",\n },\n {\n name: \"res3\",\n department : \"dep1\",\n id: 3,\n status: \"disponível\",\n description: \"desc\",\n category: \"A\",\n },\n {\n name: \"res4\",\n department : \"dep1\",\n id: 4,\n status: \"disponível\",\n description: \"desc\",\n category: \"B\",\n },\n {\n name: \"res5\",\n department : \"dep1\",\n id: 5,\n status: \"disponível\",\n description: \"desc\",\n category: \"B\",\n },\n {\n name: \"res6\",\n department : \"dep1\",\n id: 6,\n status: \"disponível\",\n description: \"desc\",\n category: \"C\",\n },\n {\n name: \"res7\",\n department : \"dep4\",\n id: 7,\n status: \"disponível\",\n description: \"desc\",\n category: \"D\",\n },\n {\n name: \"res8\",\n department : \"dep4\",\n id: 8,\n status: \"disponível\",\n description: \"desc\",\n category: \"D\",\n },\n {\n name: \"res9\",\n department : \"dep4\",\n id: 9,\n status: \"disponível\",\n description: \"desc\",\n category: \"D\",\n },\n {\n name: \"res10\",\n department : \"dep4\",\n id: 10,\n status: \"disponível\",\n description: \"desc\",\n category: \"D\",\n },\n {\n name: \"res11\",\n department : \"dep4\",\n id: 11,\n status: \"disponível\",\n description: \"desc\",\n category: \"D\",\n },\n {\n name: \"res12\",\n department : \"dep4\",\n id: 12,\n status: \"disponível\",\n description: \"desc\",\n category: \"E\",\n },\n {\n name: \"res13\",\n department : \"dep4\",\n id: 13,\n status: \"disponível\",\n description: \"desc\",\n category: \"E\",\n },\n {\n name: \"res14\",\n department : \"dep2\",\n id: 14,\n status: \"disponível\",\n description: \"desc\",\n category: \"E\",\n },\n {\n name: \"res15\",\n department : \"dep3\",\n id: 15,\n status: \"disponível\",\n description: \"desc\",\n category: \"E\",\n },\n {\n name: \"res16\",\n department : \"dep3\",\n id: 16,\n status: \"disponível\",\n description: \"desc\",\n category: \"E\",\n },\n {\n name: \"res17\",\n department : \"dep3\",\n id: 17,\n status: \"disponível\",\n description: \"desc\",\n category: \"F\",\n },\n {\n name: \"res18\",\n department : \"dep3\",\n id: 18,\n status: \"disponível\",\n description: \"desc\",\n category: \"F\",\n },\n {\n name: \"res19\",\n department : \"dep2\",\n id: 19,\n status: \"indisponível\",\n description: \"desc\",\n category: \"F\",\n },\n {\n name: \"res20\",\n department : \"dep2\",\n id: 20,\n status: \"manutenção\",\n description: \"desc\",\n category: \"F\",\n },\n];\n\nfilteredfakeresources = fakeresources;\nsortedfakeresources = filteredfakeresources;\n"
},
{
"alpha_fraction": 0.2916666567325592,
"alphanum_fraction": 0.4313725531101227,
"avg_line_length": 16.69565200805664,
"blob_id": "58bce7562dde038a257d48d439c25949267195e9",
"content_id": "a0b13a8b8f35f4201db9606f5791d3d109691fb4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 409,
"license_type": "permissive",
"max_line_length": 33,
"num_lines": 23,
"path": "/static/javascript/fakeavailability.js",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "/**\n * Created by Caio on 13/11/2016.\n */\nfakeavailability = {\n name: \"res1\",\n status: \"ON\",\n dates: [{\n username : \"GHT\",\n date: \"18/10/2016\",\n return: \"20/10/2016\",\n },\n {\n username : \"Fábio\",\n date: \"26/10/2016\",\n return: \"27/10/2016\",\n },\n {\n username : \"Hilmer\",\n date: \"29/10/2016\",\n return: \"29/10/2016\",\n }]\n\n};\n\n"
},
{
"alpha_fraction": 0.7251461744308472,
"alphanum_fraction": 0.7309941649436951,
"avg_line_length": 33.20000076293945,
"blob_id": "501b64e1b54399268970de7a7e54d9e77c894d6b",
"content_id": "76f2f3d35cff4884c684302cdbf53c32a9796e40",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Python",
"length_bytes": 171,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 5,
"path": "/env/bin/django-admin.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "#!/home/selene/Documentos/Brasil-agendae-backend/env/bin/python3\nfrom django.core import management\n\nif __name__ == \"__main__\":\n management.execute_from_command_line()\n"
},
{
"alpha_fraction": 0.6562045812606812,
"alphanum_fraction": 0.6875908374786377,
"avg_line_length": 22.575342178344727,
"blob_id": "d7e9176ea2d3e1a6989728b48b2d9b53a645aa3a",
"content_id": "10d26b788ef1cc7142db44b8d5361a6e30a1c1c5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3498,
"license_type": "permissive",
"max_line_length": 157,
"num_lines": 146,
"path": "/app/README.md",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "#Serviços REST\n\n##Geral\nPara o mozilla aconselho a extensão [restclient](https://addons.mozilla.org/pt-br/firefox/addon/restclient/)\n\n\n##Login\nRecebe usuário e senha, e caso autenticado, devolve um token para o acesso\n- Endereço: http://localhost:8000/ws/login/\n- Método: POST\n\nExemplo:\n\nEntrada:\n```\nHeader: Content-Type: application/json\nBody: {\"username\":\"usertest\",\"password\":\"m1m2m3m4\"}\n```\nSaída\n```\n{\"token\": \"6f1483b1dd4983620ef17a588de1d0e83dcf236f\"}\n```\nSaída (Exceção)\n```\n{\"non_field_errors\":[\"Impossível fazer login com as credenciais fornecidas.\"]}\n```\n \n##Template \nDeve ser usado para qualquer requisição após o login\n- Endereço: http://localhost:8000/ws/{{ nome }}/\n- Métodos: Variados, sempre há pelo menos POST\n\nEntrada:\n```\nHeader: Content-Type: application/json\nHeader: Authorization: Token 0135ba7654e857b61832705002e5a2ad9e76423e\n```\nSaída (Exceção)\n```\n{\"non_field_errors\":[\"Unexpected error:\" + stacktrace]}\n\n```\n\n##Logout\nRecebe token de um usuário e o renova internamente\n- Endereço: http://localhost:8000/ws/logout/\n- Método: POST, DELETE\n\nExemplo:\n\nEntrada:\n```\nBody: vazio ou qualquer\n```\nSaída\n```\n{\"status\":\"sucesso\"}\n```\n##Cadastro de Usuários\nCadastro para Funcionários, Administradores e SuperAdministradores\n- Endereços:http://localhost:8000/ws/cadastro/funcionario/\n http://localhost:8000/ws/cadastro/administrador/\n http://localhost:8000/ws/cadastro/superadministrador/\n\n- Método: POST, GET\n\nURLs diferentes para níveis de acesso diferentes definidos pelo token\nExemplo:\n\nEntrada:\n```\nBody: {\"username\":\"any\", \"email\":\"[email protected]\", \"first_name\" : \" \", \"last_name\" : \" \", \"registro\" : \"58\", \"departamento\" : \"CT\"}\n```\nSaída\n```\n{\"status\":\"sucesso\"}\n```\n##Atualização de Usuários\nAtualização de informações para Funcionários, Administradores e SuperAdministradores\n- Endereço: http://localhost:8000/ws/update/funcionario/\n http://localhost:8000/ws/update/administrador/\n http://localhost:8000/ws/update/superadministrador/\n- Método: POST, GET\n\nURLs diferentes para níveis de acesso diferentes definidos pelo token\nExemplo:\n\nEntrada:\n```\nBody: {\"pk\":\"6\",\"username\":\"any\", \"password\":\"pass\",\"email\":\"[email protected]\", \"first_name\" : \" \", \"last_name\" : \" \", \"registro\" : \"58\", \"departamento\" : \"CT\"}\n```\nSaída\n```\n{\"status\":\"sucesso\"}\n```\n\n##Deleção de Usuários\nTorna Funcionários, Administradores ou SuperAdministradores inativos\n- Endereço: http://localhost:8000/ws/delete/funcionario/\n http://localhost:8000/ws/delete/administrador/\n http://localhost:8000/ws/delete/superadministrador/\n- Método: POST, GET\n\nURLs diferentes para níveis de acesso diferentes definidos pelo token\nExemplo:\n\nEntrada:\n```\nBody: {\"pk\":\"6\",\"username\":\"any\", \"password\":\"pass\",\"email\":\"[email protected]\", \"first_name\" : \" \", \"last_name\" : \" \", \"registro\" : \"58\", \"departamento\" : \"CT\"}\n```\nSaída\n```\n{\"status\":\"sucesso\"}\n```\n\n##Cadastro de Agendamentos\nCadastro para Agendamentos\n- Endereço: http://localhost:8000/ws/cadastro/agendamento/\n- Método: POST, GET\n\nExemplo:\n\nEntrada:\n```\n{\"username\":\"usertest\", \"patrimonio\":\"777474\",\"inicio\":\"2006-10-25 14:30:59\",\"periodo\":\"7\"}\n```\nSaída\n```\n{\"status\":\"sucesso\",\"PrimaryKey\":\"6\"}\n```\n\n##Deleção de Agendamentos\nCadastro para Agendamentos\n- Endereço: http://localhost:8000/ws/cadastro/agendamento/\n- Método: POST, GET\n\nExemplo:\n\nEntrada:\n```\n{\"pk\":\"6\",\"username\":\"usertest\", \"patrimonio\":\"777474\",\"inicio\":\"2006-10-25 14:30:59\",\"periodo\":\"7\"}\n```\nSaída\n```\n{\"status\":\"sucesso\"}\n```"
},
{
"alpha_fraction": 0.650586724281311,
"alphanum_fraction": 0.6675358414649963,
"avg_line_length": 39.3684196472168,
"blob_id": "a17f7c9805188c2c8ccd3754ac099c70c248684e",
"content_id": "2ab84c21ec8b953dfcf251a71c63011abfde3e6d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 767,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 19,
"path": "/app/models/usuario.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User\nfrom django.db import models\n\nclass Usuario (models.Model):\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n user = models.OneToOneField(User, default=0, related_name='profile',on_delete=models.CASCADE)\n registro = models.PositiveIntegerField()\n #nome = models.CharField(max_length=200)\n departamento = models.CharField(max_length=200)\n #estado = models.PositiveSmallIntegerField()\n #tipo_perfil = models.PositiveSmallIntegerField()\n #email = models.EmailField()\n\n def __str__(self):\n return self.user.username + ' - '+ self.user.first_name\n\n class Meta:\n db_table = 'Usuarios'\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.7103448510169983,
"alphanum_fraction": 0.7310344576835632,
"avg_line_length": 17.25,
"blob_id": "37166dd4b22719bb697f64e76fcac8b2c5157755",
"content_id": "02f08c2bd424514d2722f702677e58048f1f756a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 145,
"license_type": "permissive",
"max_line_length": 39,
"num_lines": 8,
"path": "/app/models/settingsgroups.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\n#primary keys\n\nclass SettingsUserGroups(models.Model):\n SuperAdminGroup = 1\n AdminGroup = 2\n FuncGroup = 3"
},
{
"alpha_fraction": 0.5432432293891907,
"alphanum_fraction": 0.5945945978164673,
"avg_line_length": 28.600000381469727,
"blob_id": "94db268800f45d048b0aacb8b656a6d4544c32a4",
"content_id": "6bd3fd8165b5eb3c6fbd95a3e53d960c63477aac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 749,
"license_type": "permissive",
"max_line_length": 184,
"num_lines": 25,
"path": "/app/migrations/0004_auto_20161115_1756.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-15 19:56\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app', '0003_auto_20161113_1922'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='recurso',\n name='categoria',\n field=models.CharField(max_length=200),\n ),\n migrations.AlterField(\n model_name='recurso',\n name='estado',\n field=models.CharField(choices=[('Disponível', 'Disponível'), ('Indisponível', 'Indisponível'), ('Em manutenção', 'Em manutenção')], default='Indisponível', max_length=13),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5972871780395508,
"alphanum_fraction": 0.6031879782676697,
"avg_line_length": 37.7210693359375,
"blob_id": "6d283fa0d830d60b31c5d699c824c8435b19edcf",
"content_id": "4394ec6fbb425d3c42ba0c6a679903b891c991eb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13063,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 337,
"path": "/app/views.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "import sys\nimport json\nfrom django.shortcuts import render\nfrom django.http import HttpResponse, Http404\nfrom django.conf import settings\nfrom app.models import BuscaRecurso, Recurso, CadastroRecurso\nfrom django.core import serializers\nfrom rest_framework.response import Response\nfrom rest_framework import status\nfrom rest_framework.decorators import api_view, authentication_classes, permission_classes\nfrom rest_framework.authentication import TokenAuthentication\nfrom app.permissions import AllowAll,AdminOnly,SuperAdminOnly\nfrom app.models import CadastroUsuario, SettingsUserGroups, Usuario, CadastroAgendamento\nfrom rest_framework.authtoken.models import Token\n\n#static pages\n\ndef index(request):\n return render(request, 'app/index.html', {})\n\ndef catalog(request):\n return render(request, 'app/catalog.html', {})\n\ndef about(request):\n return render(request, 'app/about.html', {})\n\ndef people(request):\n return render(request, 'app/people.html', {})\n\ndef person(request):\n return render(request, 'app/person.html', {})\n\ndef newResource(request):\n return render(request, 'app/new_resource.html', {})\n\n# ajax\n\ndef searchCatalog(request):\n if request.method == 'GET':\n\n texto = str(request.GET.get('texto')) or ''\n categorias = str(request.GET.get('categorias')) if str(request.GET.get('categorias')) != 'None' else '[]'\n enderecos = str(request.GET.get('enderecos')) if str(request.GET.get('enderecos')) != 'None' else '[]'\n disponibilidades = str(request.GET.get('disponibilidades')) if str(request.GET.get('disponibilidades')) != 'None' else '[]'\n\n s = BuscaRecurso()\n s.params = '{\"type\": \"complex\",'\n s.params += '\"texto\": \"' + texto + '\",'\n s.params += '\"categorias\": ' + categorias + ','\n s.params += '\"enderecos\": ' + enderecos + ','\n s.params += '\"disponibilidades\": ' + disponibilidades + ' }'\n res = s.buscar()\n\n if res == \"DoesNotExist ERROR\":\n raise Http404(\"Nenhum recurso possui o número de patrimônio buscado!\")\n\n response_data = {}\n response_data['result'] = serializers.serialize('json', res)\n #print()\n #print (s.params)\n #print (response_data)\n return HttpResponse(\n json.dumps(response_data),\n content_type=\"application/json\"\n )\n else:\n return HttpResponse(\n json.dumps({\"Info\": \"request method not supported\"}),\n content_type=\"application/json\"\n )\n\ndef resource(request, id):\n # search\n s = BuscaRecurso()\n s.params = '{\"type\": \"match\", \"id\": ' + str(id) + '}'\n res = s.buscar()\n # go to 404 if not found\n if res == \"DoesNotExist ERROR\":\n raise Http404(\"Nenhum recurso possui o número de patrimônio buscado!\")\n # set the data\n #context = {'id': id}\n context = {'res': res}\n # do it\n return render(request, 'app/resource.html', context)\n\ndef createNewResource(request):\n if request.method == 'GET':\n\n nome = str(request.GET.get('nome'))\n patrimonio = str(request.GET.get('patrimonio'))\n endereco = str(request.GET.get('endereco'))\n categoria = str(request.GET.get('categoria'))\n descricao = str(request.GET.get('descricao'))\n\n cr = CadastroRecurso()\n if (cr.cadastrar (nome, patrimonio, endereco, categoria, descricao)):\n response_data = {}\n response_data['result'] = patrimonio\n response_data['error'] = \"\"\n return HttpResponse(\n json.dumps(response_data),\n content_type=\"application/json\"\n )\n else:\n response_data = {}\n response_data['result'] = '\"\"'\n response_data['error'] = \"Resource already exists\"\n return HttpResponse(\n json.dumps(response_data),\n content_type=\"application/json\"\n )\n else:\n return HttpResponse(\n json.dumps({\"Info\": \"request method not supported\"}),\n content_type=\"application/json\"\n )\n\ndef updateResource(request,patrimonio):\n if request.method == 'GET':\n\n nome = str(request.GET.get('nome'))\n endereco = str(request.GET.get('endereco'))\n categoria = str(request.GET.get('categoria'))\n descricao = str(request.GET.get('descricao'))\n estado = str(request.GET.get('estado'))\n patrimonio = str(request.GET.get('patrimonio'))\n\n cr = CadastroRecurso()\n if(cr.atualizar (patrimonio, nome, descricao, endereco, categoria, estado)):\n response_data = {}\n response_data['result'] = patrimonio\n response_data['error'] = \"\"\n return HttpResponse(\n json.dumps(response_data),\n content_type=\"application/json\"\n )\n else:\n response_data = {}\n response_data['result'] = '\"\"'\n response_data['error'] = \"Resource not found\"\n return HttpResponse(\n json.dumps(response_data),\n content_type=\"application/json\"\n )\n else:\n return HttpResponse(\n json.dumps({\"Info\": \"request method not supported\"}),\n content_type=\"application/json\"\n )\n\n\nfrom .models import Usuario\n@api_view(['POST','GET'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((AllowAll,))\ndef getInfoUsuario(request):\n data ={}\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (\"{\\\"user:\\\"\\\"\",str(request.user),\"\\\"}\")\n try:\n request.usuario = Usuario.objects.get(user=request.user)\n data = {\n 'username:': request.usuario.user.username,\n 'group:':request.usuario.user.groups.all()[0].name,\n 'first_name': request.usuario.user.first_name,\n 'last_name': request.usuario.user.last_name,\n 'is_active': request.usuario.user.is_active,\n 'last_login': request.usuario.user.last_login,\n 'date_joined': request.usuario.user.date_joined,\n 'departamento':request.usuario.departamento,\n 'registro': request.usuario.registro\n }\n\n return Response(data,status=status.HTTP_202_ACCEPTED)\n except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n return Response(data,status=status.HTTP_400_BAD_REQUEST)\n finally:\n if settings.DEBUG: print (\"Output: \",data)\n\n@api_view(['POST','DELETE'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((AllowAll,))\ndef logout(request):\n data ={}\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (\"{\\\"user:\\\"\\\"\",str(request.user),\"\\\"}\")\n try:\n t=Token.objects.get(user=request.user)\n t.delete()\n Token.objects.create(user=request.user)\n data = {\"status\":\"sucesso\"}\n return Response(data,status=status.HTTP_202_ACCEPTED)\n except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n return Response(data,status=status.HTTP_400_BAD_REQUEST,exception=True)\n finally:\n if settings.DEBUG: print (\"Output: \",data)\n\n\n@api_view(['POST','GET'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((AdminOnly,))\ndef CadastroFuncionario(request,typeOp):\n settingsUserGroups = SettingsUserGroups()\n jsonInput=json.loads(request.body.decode(\"utf-8\"))\n data ={}\n group = settingsUserGroups.FuncGroup\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (jsonInput)\n try:\n cad = CadastroUsuario()\n cad.parser(jsonInput)\n cad.solicitante = request.user\n if not(cad.has_permission()):\n data = {\"detail\": \"Você não tem permissão para executar essa ação.\"}\n return Response(data,status=status.HTTP_401_UNAUTHORIZED)\n\n if typeOp == \"cadastrar\" or typeOp == \"cadastro\" or typeOp == \"create\":\n data[\"PrimaryKey\"] = cad.cadastrar(group=group)\n elif typeOp == \"atualizar\" or typeOp == \"atualizacao\" or typeOp == \"update\":\n cad.atualizar()\n elif typeOp == \"deletar\" or typeOp == \"delecao\" or typeOp == \"delete\":\n cad.deletar()\n else:\n return Response(data,status=status.HTTP_404_NOT_FOUND)\n\n data[\"status\"] = \"sucesso\"\n return Response(data,status=status.HTTP_202_ACCEPTED)\n except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n return Response(data,status=status.HTTP_400_BAD_REQUEST,exception=True)\n finally:\n if settings.DEBUG: print (\"Output: \",data)\n\n@api_view(['POST','GET'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((SuperAdminOnly,))\ndef CadastroAdministrador(request,typeOp):\n settingsUserGroups = SettingsUserGroups()\n jsonInput=json.loads(request.body.decode(\"utf-8\"))\n data ={}\n group = settingsUserGroups.AdminGroup\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (jsonInput)\n try:\n cad = CadastroUsuario()\n cad.parser(jsonInput)\n cad.solicitante = request.user\n\n if typeOp == \"cadastrar\" or typeOp == \"cadastro\" or typeOp == \"create\":\n data[\"PrimaryKey\"] = cad.cadastrar(group=group)\n elif typeOp == \"atualizar\" or typeOp == \"atualizacao\" or typeOp == \"update\":\n cad.atualizar()\n elif typeOp == \"deletar\" or typeOp == \"delecao\" or typeOp == \"delete\":\n cad.deletar()\n else:\n return Response(data,status=status.HTTP_404_NOT_FOUND)\n\n data[\"status\"] = \"sucesso\"\n return Response(data,status=status.HTTP_202_ACCEPTED)\n except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n return Response(data,status=status.HTTP_400_BAD_REQUEST,exception=True)\n finally:\n if settings.DEBUG: print (\"Output: \",data)\n\n@api_view(['POST','GET'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((SuperAdminOnly,))\ndef CadastroSuperAdministrador(request,typeOp):\n settingsUserGroups = SettingsUserGroups()\n jsonInput=json.loads(request.body.decode(\"utf-8\"))\n data ={}\n group = settingsUserGroups.SuperAdminGroup\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (jsonInput)\n try:\n cad = CadastroUsuario()\n cad.parser(jsonInput)\n cad.solicitante = request.user\n\n if typeOp == \"cadastrar\" or typeOp == \"cadastro\" or typeOp == \"create\":\n data[\"PrimaryKey\"] = cad.cadastrar(group=group)\n elif typeOp == \"atualizar\" or typeOp == \"atualizacao\" or typeOp == \"update\":\n cad.atualizar()\n else:\n return Response(data,status=status.HTTP_404_NOT_FOUND)\n\n data[\"status\"] = \"sucesso\"\n return Response(data,status=status.HTTP_202_ACCEPTED)\n except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n return Response(data,status=status.HTTP_400_BAD_REQUEST,exception=True)\n finally:\n if settings.DEBUG: print (\"Output: \",data)\n\n@api_view(['POST','GET'])\n@authentication_classes((TokenAuthentication,))\n@permission_classes((AllowAll,))\ndef CadastroAgendamentoController(request,typeOp):\n settingsUserGroups = SettingsUserGroups()\n jsonInput=json.loads(request.body.decode(\"utf-8\"))\n data ={}\n if settings.DEBUG:\n print (\"Input: {\\\"function\\\":\\\"\",str(sys._getframe().f_code.co_name),\"} \",end=\"\")\n print (jsonInput)\n #try:\n cad = CadastroAgendamento()\n cad.parser(jsonInput)\n cad.solicitante = request.user\n if not(cad.has_permission()):\n data = {\"detail\": \"Você não tem permissão para executar essa ação.\"}\n return Response(data,status=status.HTTP_401_UNAUTHORIZED)\n\n if typeOp == \"cadastrar\" or typeOp == \"cadastro\" or typeOp == \"create\":\n data[\"PrimaryKey\"] = cad.cadastrar()\n elif typeOp == \"deletar\" or typeOp == \"delecao\" or typeOp == \"delete\":\n cad.deletar()\n else:\n return Response(data,status=status.HTTP_404_NOT_FOUND)\n \n\n data[\"status\"] = \"sucesso\"\n return Response(data,status=status.HTTP_202_ACCEPTED)\n #except:\n data = {\"non_field_errors\":[\"Unexpected error:\" + str(sys.exc_info()[0])]}\n # return Response(data,status=status.HTTP_400_BAD_REQUEST,exception=True)\n #finally:\n if settings.DEBUG: print (\"Output: \",data) \n\n# Create your views here.\n"
},
{
"alpha_fraction": 0.5429113507270813,
"alphanum_fraction": 0.550972044467926,
"avg_line_length": 39.17142868041992,
"blob_id": "d2face260b6b60a40b0806fff175b2ed368f6f4f",
"content_id": "149d8104ee436ea63992486089980c30c256fcda",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4225,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 105,
"path": "/app/tests/teste_recurso.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.test import TestCase\nfrom app.models import BuscaRecurso, CadastroRecurso\nfrom random import randint\n\n\nclass RecursoTests( TestCase ):\n\n test_registers_number = 0\n nome = \"nome\"\n patrimonio_inicial = 1\n endereco = \"endereco\"\n categoria = \"000\"\n descricao = \"test resource\"\n\n def setUp (self):\n self.test_registers_number = randint(10, 100)\n print (\"Testing for \" + str(self.test_registers_number) + \" resources\")\n patrimonio = self.patrimonio_inicial\n cr = CadastroRecurso()\n for i in range(self.test_registers_number):\n cr.cadastrar (self.nome, patrimonio, self.endereco, self.categoria, self.descricao)\n patrimonio += 1\n\n#=====[ testes de cadastro ]====================================================\n\n def test_cadastro(self):\n patrimonio = self.patrimonio_inicial + self.test_registers_number + 1\n\n cr = CadastroRecurso()\n\n # simple (pass)\n self.assertEqual(cr.cadastrar (self.nome, patrimonio, self.endereco, self.categoria, self.descricao), True)\n # same id (fail)\n self.assertEqual(cr.cadastrar (self.nome, patrimonio, self.endereco, self.categoria, self.descricao), False)\n # delete last register\n self.assertEqual(cr.deletar (patrimonio), True)\n self.assertEqual(cr.deletar (patrimonio), False)\n # 200 character name (pass)\n patrimonio += 1\n nome = \"nxe40kpXTvCPa0T88aJSXemKYWZDXv06ssZfE4gW0xsJgsHKLRWIgamYlYceoZ5hcHGVDAeLZQNJm4tEJxcVypHhV0liPtI9mInlcm0MQemP1qS9qPf1I8bVgniH3Y2OFXF5tOPmX4NTz2q73YfL660sMYtz7JVQQZfBR8jchSUEo2PRrOBFHuxj52rNMy2ToJ49BvMP\"\n self.assertEqual(cr.cadastrar (self.nome, patrimonio, self.endereco, self.categoria, self.descricao), True)\n self.assertEqual(cr.deletar (patrimonio), True)\n # 201 character name (pass)\n patrimonio += 1\n nome += \"A2323\"\n self.assertEqual(cr.cadastrar (self.nome, patrimonio, self.endereco, self.categoria, self.descricao), True)\n self.assertEqual(cr.deletar (patrimonio), True)\n\n#=====[ testes de busca ]=======================================================\n\n def test_busca(self):\n # search all\n s = BuscaRecurso()\n s.params = '{\"type\": \"complex\",'\n s.params += '\"texto\": \"\",'\n s.params += '\"categorias\": [],'\n s.params += '\"enderecos\": [],'\n s.params += '\"disponibilidades\": [] }'\n res = s.buscar()\n self.assertEqual(len(res), self.test_registers_number)\n # search all by one of the categories\n s.params = '{\"type\": \"complex\",'\n s.params += '\"texto\": \"\",'\n s.params += '\"categorias\": [\"000\"],'\n s.params += '\"enderecos\": [],'\n s.params += '\"disponibilidades\": [] }'\n res = s.buscar()\n self.assertEqual(len(res), self.test_registers_number)\n s = BuscaRecurso()\n s.params = '{\"type\": \"complex\",'\n s.params += '\"texto\": \"\",'\n s.params += '\"categorias\": [],'\n s.params += '\"enderecos\": [],'\n s.params += '\"disponibilidades\": [\"Indisponível\"] }'\n res = s.buscar()\n self.assertEqual(len(res), self.test_registers_number)\n s = BuscaRecurso()\n s.params = '{\"type\": \"complex\",'\n s.params += '\"texto\": \"\",'\n s.params += '\"categorias\": [],'\n s.params += '\"enderecos\": [\"endereco\"],'\n s.params += '\"disponibilidades\": [] }'\n res = s.buscar()\n self.assertEqual(len(res), self.test_registers_number)\n\n\n#=====[ testes de atualização e deleção ]=======================================\n\n def test_edit(self):\n patrimonio = self.patrimonio_inicial\n cr = CadastroRecurso()\n for i in range(self.test_registers_number):\n self.assertEqual(cr.atualizar ( patrimonio, \"novo nome\", \"\", \"novo endereco\", \"001\", \"Disponível\"), True)\n s = BuscaRecurso()\n s.params = '{\"type\": \"match\", \"id\": ' + str(patrimonio) + '}'\n res = s.buscar()\n self.assertEqual(res.nome, \"novo nome\")\n self.assertEqual(res.estado, \"Disponível\")\n self.assertEqual(res.descricao, \"\")\n self.assertEqual(res.endereco, \"novo endereco\")\n self.assertEqual(res.categoria, \"001\")\n cr.deletar(patrimonio)\n patrimonio += 1\n\n#===============================================================================\n"
},
{
"alpha_fraction": 0.6588628888130188,
"alphanum_fraction": 0.6605350971221924,
"avg_line_length": 25,
"blob_id": "e36ca524d83f5ecbc3fa0801f02544f78eff7bd9",
"content_id": "f41b78c75c49e6c9296a1916b31e9c8429446eb3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 598,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 23,
"path": "/app/serializers.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User, Group\nfrom rest_framework import serializers\nfrom .models import Usuario\n\n\nclass UserSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n #fields = '__all__'\n exclude = ('is_staff','user_permissions','password','is_superuser')\n\nclass UsarioSerializer(serializers.ModelSerializer):\n user = UserSerializer()\n class Meta:\n model = Usuario\n # depth = 1\n fields = '__all__'\n\n\nclass GroupSerializer(serializers.ModelSerializer):\n class Meta:\n model = Group\n fields = ('name')\n"
},
{
"alpha_fraction": 0.5406758189201355,
"alphanum_fraction": 0.5844805836677551,
"avg_line_length": 29.730770111083984,
"blob_id": "91eb61a0baad8a36619b90d8c1d9b325719ff9f3",
"content_id": "0235cc8791632a2d07e5b1954b8b492c43bb39c3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 799,
"license_type": "permissive",
"max_line_length": 164,
"num_lines": 26,
"path": "/app/migrations/0006_auto_20161219_1317.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-12-19 15:17\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app', '0005_auto_20161119_1727'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='SettingsUserGroups',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n ),\n migrations.AlterField(\n model_name='agendamento',\n name='estado',\n field=models.CharField(choices=[('Agendado', 'Agendado'), ('Cancelado', 'Cancelado'), ('Confirmado', 'Confirmado')], default='Agendado', max_length=12),\n ),\n ]\n"
},
{
"alpha_fraction": 0.702479362487793,
"alphanum_fraction": 0.7520661354064941,
"avg_line_length": 59.5,
"blob_id": "823bd8b03c123d359318c8eadc79a410a021f9e0",
"content_id": "601e5cfb896055f1b612f91a09859e6011825f84",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 121,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 2,
"path": "/app/models.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n"
},
{
"alpha_fraction": 0.6064247488975525,
"alphanum_fraction": 0.6115091443061829,
"avg_line_length": 28.026845932006836,
"blob_id": "3e3fa198a43aa3e3da49d126bba681432f5e2466",
"content_id": "0d2ce7ad597864acd90d5bda5b5a259f6cbe7d4e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 4328,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 149,
"path": "/static/javascript/resources.js",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "/**\n * Created by Caio on 11/11/2016.\n */\n\nvar editInfo = false;\nvar editSchedule = false;\n\nfunction getResourceInfo(id) {\n\n\n var html = '';\n var it;\n\n //Fill fields\n //getResourceById\n var resource = data.resources[id - 1];\n var field_name = document.getElementById('text-resource-name');\n var field_loc = document.getElementById('text-resource-location');\n var field_id = document.getElementById('text-resource-id');\n var field_cat = document.getElementById('text-resource-cat');\n var field_desc = document.getElementById('text-resource-desc');\n\n field_name.value = resource.name;\n field_loc.value = resource.department;\n field_id.value = resource.id;\n field_cat.value = resource.category;\n field_desc.value = resource.description;\n document.getElementById('resourceName').innerHTML = resource.name;\n\n\n\n //History table\n html += '<table id=\"history-table\" class=\"resourceTable\">';\n html += '<tr>';\n\n html += '<td>Funcionário</td>';\n html += '<td>Agendamento</td>';\n html += '<td>Retorno</td>';\n html += '</tr>';\n\n for(it = 0 ; it < fakehistory.length; it += 1){\n html += '<tr>';\n html += '<td>' + fakehistory[it].username + '</td>';\n html += '<td>' + fakehistory[it].date + '</td>';\n html += '<td>' + fakehistory[it].return + '</td>';\n html += '</tr>';\n }\n\n html += '</table>';\n document.getElementById('table-history').innerHTML = html;\n\n //Fill Time Picker\n html='';\n for(it=0 ; it <schedule.length ; it+=2){\n\n }\n\n\n\n //Calendar\n new Kalendae('date-picker', {\n months: 1,\n mode: 'single',\n selected: Kalendae.moment(),\n subscribe: {\n 'change': function (date) {\n //getDateSchedules\n console.log(date, this.getSelected());\n document.getElementById('date-title').innerHTML = '<b>'+this.getSelected()+'</b>';\n fillTableSchedule(this.getSelected());\n }\n }\n });\n\n\n //Availability\n //getAvailability\n document.getElementById('resource-availability').onclick = function() { \n if (document.getElementById('resource-availability').checked){\n document.getElementById('label-switch-agendamento').innerHTML='<b>Agendamento <font color=\"green\">ON</font></b>';\n } else {\n document.getElementById('label-switch-agendamento').innerHTML='<b>Agendamento <font color=\"red\">OFF</font></b>';\n }\n }\n if(fakeavailability.status === \"ON\"){\n document.getElementById('resource-availability').checked=true;\n document.getElementById('label-switch-agendamento').innerHTML='<b>Agendamento <font color=\"green\">ON</font></b>';\n } else {\n document.getElementById('resource-availability').checked=false;\n document.getElementById('label-switch-agendamento').innerHTML='<b>Agendamento <font color=\"red\">OFF</font></b>';\n }\n\n\n}\n\nfunction fillTableSchedule(date){\n\n}\n\nfunction blankResource(){\n // document.getElementById(\"title-resource\").style.display = 'none';\n\n document.getElementById(\"resourceName\").innerHTML = 'Novo recurso';\n document.getElementById(\"button-salvar\").style.display = 'none';\n\n document.getElementById(\"resource-schedule-box\").style.display = 'none';\n document.getElementById(\"resource-history-box\").style.display = 'none';\n document.getElementById(\"resource-info-container-buttons\").style.display = 'none';\n\n document.getElementById(\"card-info-center\").style = 'display: flex;justify-content: center; width: auto;'; \n document.getElementById(\"resource-container\").style = 'width:50vh'; \n document.getElementById(\"text-container\").style = 'padding:0'; \n \n\n document.getElementById(\"button-cadastrar-recurso\").style = 'display: flex; justify-content:center';\n \n \n \n\n\n \n}\n\n\nfunction getResource(id) {\n\n // var id = parseInt(location.search.substr(1).split(\"&\")[0].split(\"=\")[1]);\n //if normal\n // if (id > 0) {\n // getResourceInfo(id);\n // scheduleResource(id);\n // }\n\n\n //if admin\n if (id > 0) {\n getResourceInfo(id);\n // scheduleResource(id);\n // getResourceHistory(id);\n // adminOn();\n } else {\n blankResource();\n // editOnResourceData();\n\n }\n\n\n\n}\n\n\n"
},
{
"alpha_fraction": 0.48140949010849,
"alphanum_fraction": 0.5018225908279419,
"avg_line_length": 37.10185241699219,
"blob_id": "b7f8a9384c78bdcbdeaa8391ad82031bac375314",
"content_id": "7e4dcd42a0e3e763ac381a2c0c8446b19c59b6ff",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4123,
"license_type": "permissive",
"max_line_length": 502,
"num_lines": 108,
"path": "/app/migrations/0001_initial.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-13 18:58\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='BancoAcesso',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='BuscaRecurso',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='BuscaUsuario',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='CadastroRecurso',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='CadastroUsuario',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='NotificadorAgendamento',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='NotificadorCadastro',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ],\n options={\n 'managed': False,\n },\n ),\n migrations.CreateModel(\n name='Recurso',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('nome', models.CharField(max_length=200)),\n ('endereco', models.CharField(max_length=200)),\n ('descricao', models.CharField(max_length=200)),\n ('patrimonio', models.PositiveIntegerField()),\n ('estado', models.CharField(choices=[('000', 'Desconhecido'), ('001', 'Monitor'), ('002', 'Projetor'), ('003', 'Acessório para computador'), ('004', 'Computador'), ('005', 'Material de escritório'), ('006', 'Móvel'), ('007', 'Sala de Aula'), ('008', 'Auditório'), ('009', 'Imóvel'), ('010', 'Equipamento laboratorial'), ('011', 'Equipamento de limpeza'), ('012', 'Equipamento métrico'), ('013', 'Equipamento de manutenção'), ('014', 'Outro equipamento')], default='000', max_length=3)),\n ],\n options={\n 'db_table': 'Recursos',\n },\n ),\n migrations.CreateModel(\n name='Usuario',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('registro', models.PositiveIntegerField()),\n ('nome', models.CharField(max_length=200)),\n ('departamento', models.CharField(max_length=200)),\n ('estado', models.PositiveSmallIntegerField()),\n ('tipo_perfil', models.PositiveSmallIntegerField()),\n ('email', models.EmailField(max_length=254)),\n ],\n options={\n 'db_table': 'Usuarios',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.6254205703735352,
"alphanum_fraction": 0.6328971982002258,
"avg_line_length": 34.66666793823242,
"blob_id": "1fd7bf9664de6012e630844ac2d798b12cdb3c5e",
"content_id": "5b4a911a14a4629c6a66a228c2d763dc7c8033ab",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2677,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 75,
"path": "/app/models/cadastro_agendamento.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Cadastro, Usuario, Agendamento, Recurso\nfrom django.contrib.auth.models import User, Group\nfrom .settingsgroups import SettingsUserGroups\n\nclass CadastroAgendamento(Cadastro):\n\n notificador = None #NotificadorAgendamento\n solicitante = Usuario()\n settingsUserGroups = SettingsUserGroups()\n agendamento = Agendamento()\n agendamentoTemplate = Agendamento()\n agendamentoTemplate.usuario = Usuario()\n agendamentoTemplate.recurso = Recurso()\n\n\n def has_permission(self):\n if self.solicitante.groups.all()[0].pk == self.settingsUserGroups.SuperAdminGroup:\n return True\n elif self.solicitante.groups.all()[0].pk == self.settingsUserGroups.AdminGroup:\n usuario = Usuario.objects.get(user=self.solicitante)\n if self.agendamentoTemplate.usuario.departamento == usuario.departamento:\n return True\n else:\n return False\n elif self.solicitante.groups.all()[0].pk == self.settingsUserGroups.FuncGroup:\n if self.agendamentoTemplate.usuario.user == self.solicitante:\n return True\n else:\n return False\n else:\n return False\n\n return True \n \n def parser (self,json):\n #model : {\"pk\":\"9\",\"username\":\"anything\", \"patrimonio\":\"7\",\"inicio\":\"2006-10-25 14:30:59\",\"periodo\":\"7\"}\n self.agendamentoTemplate.inicio = json[\"inicio\"]\n self.agendamentoTemplate.periodo = json[\"periodo\"]\n user = User.objects.get(username=json[\"username\"])\n self.agendamentoTemplate.usuario = Usuario.objects.get(user=user)\n self.agendamentoTemplate.recurso = Recurso.objects.get(patrimonio=int(json[\"patrimonio\"]))\n if 'pk' in json.keys() and json['pk'] != '':\n self.agendamentoTemplate.pk=int(json['pk'])\n self.agendamento = Agendamento.objects.get(pk=self.agendamentoTemplate.pk)\n \n \n def cadastrar (self):\n self.agendamento = Agendamento.objects.create(\n usuario= self.agendamentoTemplate.usuario,\n recurso = self.agendamentoTemplate.recurso,\n inicio = self.agendamentoTemplate.inicio,\n periodo = self.agendamentoTemplate.periodo,\n estado = \"Agendado\"\n )\n\n return self.agendamento.pk\n #falta notificação\n\n def deletar (self):\n self.agendamento.estado = \"Cancelado\"\n self.agendamento.save()\n\n '''\n def atualizar (self):\n \n\n \n\n def notificar (self):\n return False\n '''\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.5238515734672546,
"alphanum_fraction": 0.5865724086761475,
"avg_line_length": 44.279998779296875,
"blob_id": "461400abcf7a358fca057a5723d1255c47392566",
"content_id": "e9d581b1944710fe2e1a8da2fb869de7ef984c6f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1144,
"license_type": "permissive",
"max_line_length": 492,
"num_lines": 25,
"path": "/app/migrations/0003_auto_20161113_1922.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-13 19:22\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app', '0002_agendamento'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='recurso',\n name='categoria',\n field=models.CharField(choices=[('000', 'Desconhecido'), ('001', 'Monitor'), ('002', 'Projetor'), ('003', 'Acessório para computador'), ('004', 'Computador'), ('005', 'Material de escritório'), ('006', 'Móvel'), ('007', 'Sala de Aula'), ('008', 'Auditório'), ('009', 'Imóvel'), ('010', 'Equipamento laboratorial'), ('011', 'Equipamento de limpeza'), ('012', 'Equipamento métrico'), ('013', 'Equipamento de manutenção'), ('014', 'Outro equipamento')], default='000', max_length=3),\n ),\n migrations.AlterField(\n model_name='recurso',\n name='estado',\n field=models.CharField(choices=[('AV', 'Disponível'), ('UV', 'Indisponível'), ('MA', 'Em manutenção')], default='UV', max_length=2),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7745762467384338,
"alphanum_fraction": 0.7762711644172668,
"avg_line_length": 30.864864349365234,
"blob_id": "9b7b4537a813fe68d75a1fb6eb4bd46aeb7032e8",
"content_id": "100171577fae34cc6c4162c96038ed8da0879130",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1180,
"license_type": "permissive",
"max_line_length": 157,
"num_lines": 37,
"path": "/app/permissions.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from rest_framework.permissions import BasePermission\nfrom rest_framework.compat import is_authenticated\nfrom django.contrib.auth.models import User, Group\nfrom .models import SettingsUserGroups\n\nsettingsUserGroups = SettingsUserGroups()\n\nclass AllowAll(BasePermission):\n\n #Allows access only to authenticated users.\n\n\tdef has_permission(self, request, view):\n\t\treturn request.user and is_authenticated(request.user)\n\nclass AdminOnly(BasePermission):\n \n #Allows access only to authenticated Admins.\n\tdef has_permission(self, request, view):\n\t\t#group = request.user.groups.all()[0].pk\n\n\t\tif request.user and is_authenticated(request.user):\n\t\t\treturn request.user.groups.filter(pk=settingsUserGroups.SuperAdminGroup).exists() or request.user.groups.filter(pk=settingsUserGroups.AdminGroup).exists()\n\t\telse:\n\t\t\treturn False\n\n\nclass SuperAdminOnly(BasePermission):\n \n #Allows access only to authenticated SuperAdmin.\n\n\tdef has_permission(self, request, view):\n\t\t#group = request.user.groups.all()[0].pk\n\t\t\n\t\tif request.user and is_authenticated(request.user):\n\t\t\treturn request.user.groups.filter(pk=settingsUserGroups.SuperAdminGroup).exists()\n\t\telse:\n\t\t\treturn False\n\n"
},
{
"alpha_fraction": 0.5093677043914795,
"alphanum_fraction": 0.5497658252716064,
"avg_line_length": 28.96491241455078,
"blob_id": "a174fd44938d9150a28cf455a982bf6ea3489478",
"content_id": "7ea285bdacab209284362418b79517ab37d3da8f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1725,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 57,
"path": "/app/models/recurso.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\nclass Recurso(models.Model):\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n\n nome = models.CharField(max_length=200)\n endereco = models.CharField(max_length=200)\n descricao = models.CharField(max_length=200)\n patrimonio = models.PositiveIntegerField()\n\n # state choices:\n ESTADO_CHOICES = (\n ('Disponível', 'Disponível'),\n ('Indisponível', 'Indisponível'),\n ('Em manutenção', 'Em manutenção'),\n )\n estado = models.CharField(\n max_length = 13,\n choices = ESTADO_CHOICES,\n default = 'Indisponível',\n )\n\n categoria = models.CharField(max_length=200)\n # category choices:\n #CATEGORIA_CHOICES = (\n # # unknown\n # ('000', 'Desconhecido'),\n # # classroom equipments\n # ('001', 'Monitor'),\n # ('002', 'Projetor'),\n # ('003', 'Acessório para computador'),\n # ('004', 'Computador'),\n # ('005', 'Material de escritório'),\n # ('006', 'Móvel'),\n # # rooms and buildings\n # ('007', 'Sala de Aula'),\n # ('008', 'Auditório'),\n # ('009', 'Imóvel'),\n # # other equipments\n # ('010', 'Equipamento laboratorial'),\n # ('011', 'Equipamento de limpeza'),\n # ('012', 'Equipamento métrico'),\n # ('013', 'Equipamento de manutenção'),\n # ('014', 'Outro equipamento'),\n #)\n #categoria = models.CharField(\n # max_length = 3,\n # choices = CATEGORIA_CHOICES,\n # default = '000',\n #)\n\n def __str__(self):\n return self.nome + ' - '+ str(self.patrimonio)\n\n class Meta:\n db_table = 'Recursos'\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.5916030406951904,
"alphanum_fraction": 0.5954198241233826,
"avg_line_length": 15.375,
"blob_id": "40cb603fbb6ae3ff2b9b53ac7804c28ed124e4d3",
"content_id": "1792cc5f1c8765fc884b4522f3ec3ee3114537b3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 262,
"license_type": "permissive",
"max_line_length": 32,
"num_lines": 16,
"path": "/app/models/gerenciador.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\nclass GerenciadorAgendamento ():\n\n acesso = None\n notificador = None\n\n def dataUltimaChecagem ():\n return 0\n\n def rotina ():\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.7475685477256775,
"alphanum_fraction": 0.7634836435317993,
"avg_line_length": 38.68421173095703,
"blob_id": "0cd10d137b04654361909af54adb07b32747a0fe",
"content_id": "1105326cc756999a31ef2df6e3a96292022db3ce",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2326,
"license_type": "permissive",
"max_line_length": 270,
"num_lines": 57,
"path": "/README.md",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# Brasil - Agendae\n\n\n## Sobre o projeto\n\nO projeto Agendaê é um sistema de agendamento de recursos acadêmicos desenvolvido no contexto da matédia de Engenharia de Software *(EEL873)*, ministrada pelo professor Guilherme Horta Travassos na Universidade Federal do Rio de Janeiro em 2016.2.\n\nAo longo do curso foram levantados diversos requisitos e estabelecidas inumeras restrições e alterações. Todo essetrabalho culminou no conteúdo desse repositório, que em sua totalidade contém a implementação prática do projeto suas minúcias de especificação na aba Wiki.\n\n## Como utilizar\n\n### Requisitos Técnicos\n\nAs seguintes tecnologias são necessárias para o bom funcionamento do sistema:\n\n- Python 3\n\nTodas as outras dependências e bibliotecas externas já estão acopladas nesse repositório e não necessitam de maior atenção do usuário. O sistema de base de dados utilizado foi o `sqlite3` fornecido pelo próprio Django.\n\n#### Inicializando o ambiente\n\n- Faça download o clone o repositório para um diretório dentro de sua preferência. Em caso de download do arquivo .zip não se esqueça de extrair os aqruivos.\n- Utillizando o terminal, navegue até o diretório que você extraiu esse repositório e digite o seguinte comando:\n```\nsource env/bin/activate\n\n```\n\n### Inicializando o servidor\n\nPara usuários do sistema operacional Linux, basta navegar até o diretório que foi extraido o repositório e digitar o comando a seguir no terminal de controle.\n\n```\npython3 manage.py runserver\n```\n\n- Acesse a aplicação em `http://127.0.0.1:8000/app/` ou `localhost:8000/app/`\n- Utilize o usuário **superadmin** e a senha **superadmin** para se autenticar no sistema.\n\n#### Criando novo usuário\n- Acesse http://127.0.0.1:8000/admin/\n- Utilize o usuário **superadmin** e a senha **superadmin** para se autenticar no sistema.\n- Na aba **Autenticação e Autorização administração** crie um usuário para o sistema\n- Na aba **APP** crie um usuário para a aplicação específica, no caso o Agendê. Esse usuário deve estar associado a um usuário já criado no sistema.\n\n### Testando\n\n```\npython3 manage.py test app/tests/\n```\n\n## Responsáveis\n\n- [Caio Riqueza](https://github.com/caiocrr)\n- [Lucas Rolim](https://github.com/lucaslrolim)\n- [Pedro Boueke](https://github.com/pboueke)\n- [Vinícius Alves](https://github.com/vinicius-alves)\n"
},
{
"alpha_fraction": 0.6863468885421753,
"alphanum_fraction": 0.6863468885421753,
"avg_line_length": 19.846153259277344,
"blob_id": "19517de4713a10151581df4e36a7d973ddafb6ac",
"content_id": "4aa67f3763ba037b83a15fda0368d7241354436e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 271,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 13,
"path": "/app/models/notificador_cadastro.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Notificador\n\nclass NotificadorCadastro (Notificador):\n\n cadastro_usuario = None #CadastroUsuario\n\n def construirMensagem ():\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.7009456157684326,
"alphanum_fraction": 0.7033097147941589,
"avg_line_length": 51.875,
"blob_id": "1f2d3e86ee8e3e04f0577c9baa67db059501fdd5",
"content_id": "0596b4827c53f408e391adb7d7e2519cdc001229",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1692,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 32,
"path": "/app/urls.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import url\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\nfrom rest_framework.authtoken import views as authviews\nfrom rest_framework.urlpatterns import format_suffix_patterns\n\nfrom . import views\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'about/$', views.about, name='about'),\n url(r'people/$', views.people, name='people'),\n url(r'person/$', views.person, name='person'),\n url(r'catalog/$', views.catalog, name='catalog'),\n url(r'catalog/searchCatalog/$', views.searchCatalog, name='search-catalog'),\n url(r'catalog/newResource/createNewResource/$', views.createNewResource, name='create_new_resource'),\n url(r'catalog/newResource/$', views.newResource, name='new_resource'),\n url(r'catalog/(?P<id>-?[0-9]+)/$', views.resource, name='resource'),\n url(r'catalog/(?P<patrimonio>-?[0-9]+)/updateResource/$', views.updateResource, name='update_resource'),\n]\n\nurlpatterns += staticfiles_urlpatterns()\n\n#rest webservices\nurlpatterns +=[\n url(r'^ws/login/$', authviews.obtain_auth_token, name='login'),\n url(r'^ws/logout/$', views.logout, name='logout'),\n url(r'^ws/getinfousuario/$', views.getInfoUsuario, name='getInfoUsuario'),\n url(r'^ws/(?P<typeOp>[\\w\\-]+)/funcionario/$', views.CadastroFuncionario, name='cadastroFuncionario'),\n url(r'^ws/(?P<typeOp>[\\w\\-]+)/administrador/$', views.CadastroAdministrador, name='cadastroAdministrador'),\n url(r'^ws/(?P<typeOp>[\\w\\-]+)/superadministrador/$', views.CadastroSuperAdministrador, name='cadastroSuperAdministrador'),\n url(r'^ws/(?P<typeOp>[\\w\\-]+)/agendamento/$', views.CadastroAgendamentoController, name='cadastroAgendamento'),\n]\n"
},
{
"alpha_fraction": 0.5729166865348816,
"alphanum_fraction": 0.5729166865348816,
"avg_line_length": 26.200000762939453,
"blob_id": "79d5807c10ec6dc1f4365853341d9619aaf4cd0b",
"content_id": "4e401609c079a6a3d132b15c32472a7efe3a5785",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1632,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 60,
"path": "/app/models/cadastro_recurso.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Usuario, Recurso, Cadastro, BuscaRecurso\n\nclass CadastroRecurso(Cadastro):\n\n notificador = None #NotificadorCadastro\n recurso = None\n solicitante = None\n\n def cadastrar (self, nome, patrimonio, endereco, categoria, descricao):\n s = BuscaRecurso()\n s.params = '{\"type\": \"match\", \"id\": ' + str(patrimonio) + '}'\n #print(s.params)\n res = s.buscar()\n\n if res != \"DoesNotExist ERROR\":\n return False\n\n rec = Recurso(nome=nome, patrimonio=patrimonio, endereco=endereco, categoria=categoria, descricao=descricao)\n rec.save()\n return True\n\n def atualizar (self, patrimonio, nome, descricao, endereco, categoria, estado):\n\n s = BuscaRecurso()\n s.params = '{\"type\": \"match\", \"id\": ' + str(patrimonio) + '}'\n #print(s.params)\n res = s.buscar()\n\n if res == \"DoesNotExist ERROR\":\n # if resource already exists..\n return False\n\n res.nome=nome\n res.endereco=endereco\n res.categoria=categoria\n res.descricao=descricao\n res.estado=estado\n res.save()\n return True\n\n def deletar (self, patrimonio):\n s = BuscaRecurso()\n s.params = '{\"type\": \"match\", \"id\": ' + str(patrimonio) + '}'\n #print(s.params)\n res = s.buscar()\n\n if res == \"DoesNotExist ERROR\":\n # if resource already exists..\n return False\n\n res.delete()\n return True\n\n def notificar ():\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.6105651259422302,
"alphanum_fraction": 0.6203930974006653,
"avg_line_length": 27.068965911865234,
"blob_id": "c381b933abc242cc8936b0d1315ba9ec0f7159fb",
"content_id": "bd41755b18bcf6cc9ea824e259e0e34bba3e52d4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 814,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 29,
"path": "/app/models/agendamento.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Recurso, Usuario\n\nclass Agendamento (models.Model):\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n\n usuario = models.ForeignKey(Usuario)\n recurso = models.ForeignKey(Recurso)\n inicio = models.DateTimeField()\n periodo = models.PositiveIntegerField() #seconds\n # state choices:\n ESTADO_CHOICES = (\n ('Agendado', 'Agendado'),\n ('Cancelado', 'Cancelado'),\n ('Confirmado', 'Confirmado'),\n )\n estado = models.CharField(\n max_length = 12,\n choices = ESTADO_CHOICES,\n default = 'Agendado',\n )\n\n def __str__(self):\n\n return self.recurso.nome + ' - '+ str(self.inicio)\n\n class Meta:\n db_table = 'Agendamentos'\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.6469643712043762,
"alphanum_fraction": 0.6479280591011047,
"avg_line_length": 40.50666809082031,
"blob_id": "58247aa1400e6ddcab2b299776311670de10c85c",
"content_id": "27f9456fe6425f491e50148da9aed030e627be4d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3115,
"license_type": "permissive",
"max_line_length": 114,
"num_lines": 75,
"path": "/app/models/cadastro_usuario.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Cadastro, Usuario\nfrom django.contrib.auth.models import User, Group\nfrom .settingsgroups import SettingsUserGroups\n\nclass CadastroUsuario(Cadastro):\n\n notificador = None #NotificadorCadastro\n usuario = Usuario()\n usuarioTemplate = Usuario()\n solicitante = Usuario()\n settingsUserGroups = SettingsUserGroups()\n\n def has_permission(self):\n if self.solicitante.groups.all()[0].pk == self.settingsUserGroups.AdminGroup:\n if Usuario.objects.get(user=self.solicitante).departamento != self.usuarioTemplate.departamento:\n return False\n #if self.usuario.departamento != self.usuarioTemplate.departamento:\n # return False \n return True \n\n def parser (self,json):\n #model : {\"pk\":\"1\",\"username\":\"anything\", \"password\":\"pass\", \"email\":\"[email protected]\", \"first_name\" : \" \", \n #\"last_name\" : \" \", \"registro\" : \"5\", \"departamento\" : \"\"}\n self.usuarioTemplate.usr = User()\n self.usuarioTemplate.usr.username = json['username']\n if 'password' in json.keys():\n self.usuarioTemplate.usr.set_password(json['password'])\n self.usuarioTemplate.usr.email = json['email']\n self.usuarioTemplate.usr.first_name = json['first_name']\n self.usuarioTemplate.usr.last_name = json['last_name']\n if 'pk' in json.keys() and json['pk'] != '':\n self.usuarioTemplate.pk=int(json['pk'])\n self.usuario = Usuario.objects.get(pk=self.usuarioTemplate.pk)\n self.usuarioTemplate.registro = int(json['registro'])\n self.usuarioTemplate.departamento = json['departamento']\n \n def cadastrar (self,group):\n self.usuario.usr = User.objects.create_user(\n username=self.usuarioTemplate.usr.username,\n email=self.usuarioTemplate.usr.email, \n first_name=self.usuarioTemplate.usr.first_name, \n last_name=self.usuarioTemplate.usr.last_name\n )\n self.usuario.usr.groups.add(Group.objects.get(pk=group))\n\n self.usuario = Usuario.objects.create(\n user=self.usuario.usr,\n registro=self.usuarioTemplate.registro,\n departamento=self.usuarioTemplate.departamento\n )\n return self.usuario.pk\n #falta notificação\n\n def atualizar (self):\n self.usuario.user.username = self.usuarioTemplate.usr.username\n self.usuario.user.password = self.usuarioTemplate.usr.password\n self.usuario.user.email = self.usuarioTemplate.usr.email\n self.usuario.user.first_name = self.usuarioTemplate.usr.first_name\n self.usuario.user.last_name = self.usuarioTemplate.usr.last_name\n self.usuario.registro = self.usuarioTemplate.registro\n self.usuario.departamento = self.usuarioTemplate.departamento\n self.usuario.user.save()\n self.usuario.save()\n\n def deletar (self):\n self.usuario.user.is_active = False\n self.usuario.user.save()\n\n def notificar (self):\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.5732483863830566,
"alphanum_fraction": 0.5881103873252869,
"avg_line_length": 20.409090042114258,
"blob_id": "d5bfee36a1f1f856a461d2d59c8613ef9b0892c3",
"content_id": "6be1c1d060520f30bc91fe501d9551135a5fb613",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 471,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 22,
"path": "/app/models/notificador.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\nclass Notificador(models.Model):\n # no getters and setters please (http://dirtsimple.org/2004/12/python-is-not-java.html)\n\n remetente = \"<EMAIL DO SISTEMA>\"\n\n mensagem = \"\"\n assunto = \"\"\n tipo_mensagem = 0\n\n def enviarMensagem ():\n res = None\n return res\n\n def construirMensagem ():\n return False\n\n class Meta:\n abstract = True\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.6830188632011414,
"alphanum_fraction": 0.6830188632011414,
"avg_line_length": 19.384614944458008,
"blob_id": "e7d68bab601a4279344b8def11f233e4d650de53",
"content_id": "37943af6e39a153b3a0942dab232e1de04d1b394",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 265,
"license_type": "permissive",
"max_line_length": 43,
"num_lines": 13,
"path": "/app/models/notificador_agendamento.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom app.models import Notificador\n\nclass NotificadorAgendamento (Notificador):\n\n agendamento = None #Agendamento\n\n def construirMensagem ():\n return False\n\n class Meta:\n managed = False\n app_label = 'app'\n"
},
{
"alpha_fraction": 0.5498627424240112,
"alphanum_fraction": 0.5809698104858398,
"avg_line_length": 27.0256404876709,
"blob_id": "34d4f40779654da6f11c5cc26fc2bcb11f4fdd91",
"content_id": "11440dc2c626d595c7fa1541ad2bb464b58f5610",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1093,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 39,
"path": "/app/migrations/0005_auto_20161119_1727.py",
"repo_name": "ESEGroup/Brasil",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-19 19:27\nfrom __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('app', '0004_auto_20161115_1756'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='usuario',\n name='email',\n ),\n migrations.RemoveField(\n model_name='usuario',\n name='estado',\n ),\n migrations.RemoveField(\n model_name='usuario',\n name='nome',\n ),\n migrations.RemoveField(\n model_name='usuario',\n name='tipo_perfil',\n ),\n migrations.AddField(\n model_name='usuario',\n name='user',\n field=models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL),\n ),\n ]\n"
}
] | 31 |
smaibom/DiscordSignupBot | https://github.com/smaibom/DiscordSignupBot | 833492e9bb0a1b5ae0795e3ec8dbbcc82cf3ca9c | 0bf1d2abfca9e73eda991337a0accd2d664d76ef | 78405e98ccddeaa9fb574fcf736487e3db1e60b7 | refs/heads/master | 2020-03-25T10:50:59.447469 | 2018-08-07T09:41:12 | 2018-08-07T09:41:12 | 143,707,997 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6869080662727356,
"alphanum_fraction": 0.6896935701370239,
"avg_line_length": 26.600000381469727,
"blob_id": "f129c2edbd2f190f0633bdee52bcdf6b480ddef1",
"content_id": "a74f9f6a42069514e8023e4eb1d44491d5103485",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1795,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 65,
"path": "/gsheetsapi.py",
"repo_name": "smaibom/DiscordSignupBot",
"src_encoding": "UTF-8",
"text": "import gspread\nfrom oauth2client import file, client, tools\n\nclass Spreadsheet(object):\n \"\"\"docstring for Spreadsheet\"\"\"\n def __init__(self, sheetID):\n self.sheet = load_spreadsheet(sheetID)\n\n def add_worksheet(self,sheetName,srows = 40,scols = 50):\n \"\"\"\n Adds a new worksheet to the spreadsheet object\n\n Args:\n sheetName(string): Name of the worksheet to be created\n srows(int): The number of rows the worksheet is initialized with\n scols(int): The number of columns the worksheet is initialized with\n\n Returns:\n worksheet. A worksheet object on success, None if worksheet name already exist or failed to be created\n \"\"\"\n try:\n worksheet = self.sheet.add_worksheet(title=sheetName, rows=str(srows), cols=str(scols))\n except gspread.exceptions.APIError as e:\n worksheet = None\n return worksheet\n\n def get_worksheets(self):\n \"\"\"\n Returns the worksheets in the spreadsheet\n \"\"\"\n worksheets = self.sheet.worksheets()\n return worksheets\n\n def del_worksheet(self,worksheet):\n \"\"\"\n Deletes a worksheet from the spreadsheet\n\n Args:\n worksheet(Worksheet): A gspread worksheet object to be deleted\n\n Returns:\n True on success, False if failed to be deleted or does not exist\n\n \"\"\"\n try:\n self.sheet.del_worksheet(worksheet)\n return True\n except Exception as e:\n return False\n\n def append_col(self,colnum,worksheet):\n \n\n\n\n\ndef load_spreadsheet(sheetID):\n scope = 'https://www.googleapis.com/auth/spreadsheets'\n store = file.Storage('token.json')\n creds = store.get()\n if not creds or creds.invalid:\n flow = client.flow_from_clientsecrets('credentials.json', scope)\n creds = tools.run_flow(flow, store)\n gc = gspread.authorize(creds)\n return gc.open_by_key(sheetID)\n\n"
},
{
"alpha_fraction": 0.6425542235374451,
"alphanum_fraction": 0.6545711755752563,
"avg_line_length": 26.914474487304688,
"blob_id": "0b355d7eb66e5eede20c8b2e7172dc1c89a03db0",
"content_id": "9101e1b2a624feeae363f985706152608a943d27",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4244,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 152,
"path": "/signupsystem.py",
"repo_name": "smaibom/DiscordSignupBot",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport gspread\nimport gsheetsapi\n\nclass SignupSystem(object):\n \"\"\"docstring for SignupSystem\"\"\"\n def __init__(self,sheetID):\n self.spreadsheet = gsheetsapi.Spreadsheet(sheetID)\n self.worksheets = dict()\n wsl = self.spreadsheet.get_worksheets()\n for ws in wsl:\n self.worksheets[ws.title] = ws\n \n\n def register(self, userID, numChars=1):\n \"\"\"\n Register user to system\n\n Args:\n userID(string): Discord userID\n numChars(int): Number of chars to register, defaults to 1\n\n Returns:\n True on successfull registration, False if user exists\n\n TODO:\n Fix the upcoming sheet by updating the sumif and adding -values to upcoming events\n \"\"\"\n usersheet = self.worksheets[\"Users\"]\n pastsheet = self.worksheets[\"Past\"]\n upcomingsheet = self.worksheets[\"Upcoming\"]\n users = usersheet.col_values(1)[1:]\n if not userID in users:\n #Starting index for inserting values\n si = 2+len(users)\n numevents = len(upcomingsheet.row_values(1))-1\n usersheet.insert_row([userID,numChars],si)\n pastsheet.insert_row([userID],si)\n upcomingsheet.insert_row([userID] + [-1] * numevents,si)\n return True\n else:\n return False\n\n def unregister(self,userID):\n \"\"\"\n Removes user from system\n\n Args:\n userID(string): Discord userID\n\n Returns:\n True on successfull removal, False if userID does not exist\n\n TODO:\n Add changes to the signup sheet\n \"\"\"\n usersheet = self.worksheets[\"Users\"]\n pastsheet = self.worksheets[\"Past\"]\n upcomingsheet = self.worksheets[\"Upcoming\"]\n try:\n cell = usersheet.find(userID)\n usersheet.delete_row(cell.row)\n pastsheet.delete_row(cell.row)\n upcomingsheet.delete_row(cell.row)\n return True\n except gspread.exceptions.CellNotFound:\n return False\n\n\n def update_num_chars(self,userID,numChars):\n \"\"\"\n Updates the number of chars registrated to a userID\n\n Args:\n userID(string): Discord userID\n numChars(int): The new number of chars\n\n Returns:\n True on successfull update, False if userID does not exists\n \"\"\"\n try:\n cell = self.worksheets[\"Users\"].find(userID)\n self.worksheets[\"Users\"].update_cell(cell.row,2,numChars)\n return True\n except gspread.exceptions.CellNotFound:\n return False\n\n def get_num_chars(self,userID):\n \"\"\"\n Gets the number of chars of a user\n\n Args:\n userID(string): Discord userID\n\n Returns:\n The number of chars, -1 if userID is not registered\n \"\"\"\n try:\n cell = self.worksheets[\"Users\"].find(userID)\n val = self.worksheets[\"Users\"].cell(cell.row,2)\n return val\n except gspread.exceptions.CellNotFound:\n return -1\n\n def create_event(self,date):\n \"\"\"\n \"\"\"\n upcoming = self.worksheets[\"Upcoming\"]\n length = len(upcoming.col_values(1))\n \n #If we only have 2 values we do not have any registered users as first and last entry is reserved\n if length <= 2:\n return False\n \n col = len(upcoming.row_values(1))\n #The letter for a range \n letter = chr(65+col)\n\n #gspread dosent support insert column so we have to get the range of cells and update each cell\n cellrange = letter + '1:'+letter+str(length)\n\n cells = upcoming.range(cellrange)\n \n #Format of event is date, signup for each registered person and ending is a sumif for total chars available based on signups\n cells[0].value = \"placeholder\"\n\n for i in range(1,len(cells)-1):\n cells[i].value = -1\n\n #Need to create a sumif statement rangeing from 2:len-1\n #example: =sumif(B2:B5,=1,Users!B2:B5)\n sumif = \"=sumif(\" + letter + '2:' + letter + str(length-1) \n sumif += ',\"=1\",'\n sumif += 'Users!B2:B' + str(length-1) + ')'\n\n cells[-1].value = sumif\n\n upcoming.update_cells(cells[:-1])\n #Update cells has a bug where it appends a ' to the front of a cell starting with = causing it to give a wrong statement\n upcoming.update_acell(letter+str(length),sumif)\n\n\n\n\ndef main():\n sheet = SignupSystem(\"19lDNiH55dpAJNG573fwxQvM3o3YmY-8M_8k8wDGkDD0\")\n #sheet.unregister('ragnors')\n sheet.register('ragn',5)\n #sheet.create_event(\"tommorow\")\n\nif __name__ == '__main__':\n main()\n\n"
}
] | 2 |
DomJob/reddit-bot | https://github.com/DomJob/reddit-bot | daa687480bc1848370c2e26f7b93864f3457ae17 | 36287316f6084a13da32b8c087caa13c4c193bc8 | 67930b8b83073e688dc775517bed522732db7824 | refs/heads/master | 2017-06-24T19:21:55.283072 | 2017-03-05T06:26:40 | 2017-03-05T06:26:40 | 83,641,799 | 3 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5167046189308167,
"alphanum_fraction": 0.5209757089614868,
"avg_line_length": 33.431373596191406,
"blob_id": "5f275d8cd5def84ffdd64de3e10088ee21a6ba4d",
"content_id": "c7fd39e9e7a8221c04d55bad0f1c38c8e548a72e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10536,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 306,
"path": "/bot.py",
"repo_name": "DomJob/reddit-bot",
"src_encoding": "UTF-8",
"text": "import praw, random, time, pickle, sys, os, configparser, datetime\n\nclass Bot:\n def __init__(self, botInfo):\n self.botInfo = botInfo\n self.directory = botInfo['directory']\n self.testing = (botInfo['testing'].lower() == 'true')\n if self.testing:\n self.log(\"Test run initiated\")\n \n \n self.todoList = ['getInactiveMembers','selectNewMembers','kickUsers','addUsers','flairUsers','postRecap']\n \n self.doubleCheckSubmissions = True\n self.doubleCheckComments = True\n \n self.toBeKicked = []\n self.toBeAdded = []\n self.userNumbers = {}\n \n self.activeMembers = []\n self.timeLimit = time.time() - int(self.botInfo['hour_limit']) * 60 * 60\n \n def run(self):\n self.log(\"Running the bot - To do: %s\" % \", \".join(self.todoList))\n self.loadReddit()\n \n while self.todoList != []:\n action = self.todoList[0]\n self.log(\"Next action: %s\" % action)\n \n if action == 'getInactiveMembers':\n self.getInactiveMembers()\n if action == 'selectNewMembers':\n self.selectNewMembers()\n if action == 'kickUsers':\n self.kickUsers()\n if action == 'addUsers':\n self.addUsers()\n if action == 'flairUsers':\n self.flairUsers()\n if action == 'postRecap':\n self.postRecap()\n \n self.todoList.pop(0)\n self.logState()\n \n def getInactiveMembers(self):\n self.log(\"Getting list of inactive members...\")\n self.memberList = self.getMemberList()\n self.getActiveMembers()\n number = 1\n \n for username in self.memberList:\n if username not in self.activeMembers:\n kickUser = True\n if self.doubleCheckSubmissions:\n if self.hasUserPosted(username):\n kickUser = False\n \n if self.doubleCheckComments:\n if self.hasUserCommented(username):\n kickUser = False\n \n if kickUser:\n self.log(\" /u/%s has been inactive\" % username)\n self.toBeKicked.append(username)\n self.userNumbers[username] = number\n \n number += 1\n \n def selectNewMembers(self):\n numberOfMembersToAdd = int(self.botInfo['membercap']) - len(self.memberList) + len(self.toBeKicked)\n self.log(\"Getting %d new members\" % numberOfMembersToAdd)\n numberPicked = 0\n for comment in self.reddit.subreddit(\"all\").stream.comments():\n username = str(comment.author)\n \n if 'bot' in username.lower():\n continue\n \n karma = comment.author.comment_karma\n \n if karma < 1000 or karma > 75000:\n continue\n \n self.log(\" /u/%s selected\" % username)\n \n self.toBeAdded.append(username)\n numberPicked+=1\n \n if numberPicked == numberOfMembersToAdd:\n break\n \n def kickUsers(self):\n self.log(\"Kicking users\")\n if self.testing:\n return\n \n for username in self.toBeKicked:\n self.subreddit.contributor.remove(username)\n self.flairUser(username, 'Kicked', 'kicked')\n self.log(\"Kicked /u/%s\" % username)\n \n def addUsers(self):\n self.log(\"Adding users\")\n if self.testing:\n return\n \n for username in self.toBeAdded:\n self.subreddit.contributor.add(username)\n self.log(\" Added /u/%s\" % username)\n \n def flairUsers(self):\n self.log(\"Flairing users\")\n \n newMemberList = self.getMemberList()\n if self.testing:\n newMemberList += self.toBeAdded\n \n number = 1\n \n for username in newMemberList:\n flairText = \"#%d\" % number\n self.userNumbers[username] = number\n \n if username in self.toBeAdded:\n flairCSS = 'numbernew'\n else:\n flairCSS = 'number'\n \n self.flairUser(username, flairText, flairCSS)\n number += 1\n \n def postRecap(self):\n self.log(\"Generating and posting the recap... \", False)\n \n recapTitle = '%s - Bot Recap' % time.strftime('%Y-%m-%d', time.gmtime())\n recapBody = \"Kicked users:\\n\\n\"\n \n for username in self.toBeKicked:\n recapBody += \"* \\#%d - /u/%s\\n\\n\" % (self.userNumbers[username], username)\n \n recapBody += \"Added users:\\n\\n\"\n \n for username in self.toBeAdded:\n recapBody += \"* \\#%d - /u/%s\\n\\n\" % (self.userNumbers[username], username)\n \n if not self.testing:\n self.subreddit.submit(recapTitle, recapBody).mod.distinguish()\n else:\n open('recap_test.txt','w').write(recapBody)\n self.log(\"posted!\")\n \n def flairUser(self, username, flairText, flairCSS):\n if self.testing:\n return\n \n self.log(\"Flairing /u/%s to %s - CSS %s\" % (username, flairText, flairCSS) )\n self.subreddit.flair.set(username, flairText, flairCSS)\n \n def loadReddit(self):\n self.log(\"Loading reddit...\", False)\n \n self.reddit = praw.Reddit(client_id=botInfo['bot']['client_id'],\n client_secret=botInfo['bot']['client_secret'],\n user_agent=botInfo['bot']['user_agent'],\n username=botInfo['bot']['username'],\n password=botInfo['bot']['password'])\n self.subreddit = self.reddit.subreddit(self.botInfo['subreddit'])\n \n self.log(\"done\")\n \n def getMemberList(self):\n self.log(\"Fetching member list...\", False)\n memberList = []\n \n for member in self.subreddit.contributor(limit=None):\n username = str(member)\n if username not in ['Kovmar',self.botInfo['bot']['username']]: # Add users in this list to whitelist them\n memberList.append(username)\n \n memberList.reverse()\n self.log(\"done\")\n return memberList\n \n def getActiveMembers(self):\n self.log(\"Getting all active members\")\n \n self.log(\" Getting recent submissions...\")\n counter = 0\n for submission in self.subreddit.new(limit=1000):\n counter += 1\n author = str(submission.author)\n \n if author not in self.activeMembers:\n self.log(\" /u/%s has been active\" % author)\n self.activeMembers.append(author)\n\n if submission.created_utc < self.timeLimit:\n self.doubleCheckSubmissions = False\n break\n \n self.log(\" %d submissions\" % counter)\n \n self.log(\" Getting recent comments\")\n \n counter = 0\n for comment in self.subreddit.comments(limit=1000):\n counter += 1\n author = str(comment.author)\n \n if author not in self.activeMembers:\n self.log(\" /u/%s has been active\" % author)\n self.activeMembers.append(author)\n \n if comment.created_utc < self.timeLimit:\n self.doubleCheckComments = False\n break\n \n self.log(\" %d comments\" % counter)\n \n if self.doubleCheckSubmissions:\n self.log(\" Submissions will be double checked\")\n if self.doubleCheckComments:\n self.log(\" Comments will be double checked\")\n \n def logState(self):\n self.log(\"Logging the state\")\n tmp = [self.reddit, self.subreddit]\n self.reddit, self.subreddit = None, None\n \n stateFile = '%s/botstate.pkl' % (self.directory)\n with open(stateFile, 'wb') as file:\n pickle.dump(self, file, pickle.HIGHEST_PROTOCOL)\n \n self.reddit, self.subreddit = tmp[0], tmp[1]\n \n def hasUserPosted(self, username):\n self.log(\"Manually checking submissions of /u/%s\" % username)\n \n for submission in self.reddit.redditor(username).submissions.new(limit=1000):\n if submission.created_utc < self.timeLimit:\n return False\n elif submission.subreddit.display_name == self.botInfo['subreddit']:\n return True\n \n return False\n \n def hasUserCommented(self, username):\n self.log(\"Manually checking comments of /u/%s\" % username)\n \n for comment in self.reddit.redditor(username).comments.new(limit=1000):\n if comment.created_utc < self.timeLimit:\n return False\n elif comment.subreddit.display_name == self.botInfo['subreddit']:\n return (True, comment.id, comment.created_utc, self.timeLimit)\n \n return False\n\n def log(self, message, endLine = True):\n currentTime = time.strftime('%H:%M:%S', time.gmtime())\n toLog = \"%s : %s\" % (currentTime, message)\n if endLine:\n print(toLog)\n toLog += \"\\n\"\n else:\n print(toLog, end=' ')\n date = time.strftime('%Y-%m-%d', time.gmtime())\n open('%s/logs/%s.log' % (self.directory, date),'a').write(toLog)\n\n\n\n \ndirectory = os.path.dirname(os.path.realpath(__file__))\n \nconfig = configparser.ConfigParser()\nconfig.read(directory+'/praw.ini')\n\nbotInfo = dict(config._sections['config'])\nbotInfo['directory'] = directory\nbotInfo['bot'] = dict(config._sections['bot'])\n\ntry:\n os.mkdir(directory+\"/logs\")\nexcept:\n pass\n\nif __name__ == \"__main__\":\n if len(sys.argv) == 1:\n bot = Bot(botInfo)\n bot.run()\n else:\n argument = sys.argv[1]\n \n if argument == \"-retry\":\n stateFile = '%s/botstate.pkl' % (directory)\n \n try:\n with open(stateFile, 'rb') as file:\n bot = pickle.load(file)\n \n if bot.todoList != []:\n bot.run()\n except:\n print(\"No need for bot to restart\")\n"
},
{
"alpha_fraction": 0.7571915984153748,
"alphanum_fraction": 0.7752315998077393,
"avg_line_length": 40.040000915527344,
"blob_id": "02f45561d62c4ad79c1b20d6e08376d99d5ca042",
"content_id": "cdffbd742292957a7400f595a841d3222cc168a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2051,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 50,
"path": "/README.md",
"repo_name": "DomJob/reddit-bot",
"src_encoding": "UTF-8",
"text": "#Bot for auto-managing private subreddits\n\nWhat this bot does:\n\n* Kick users who haven't been active in the subreddit in a specified time frame\n\n* Add random users to replace them up to a certain amount of members\n\n* Give a numbered flair (in order) to every member based\n\nWhy:\n\n* not?\n\nHow to set this bot up:\n\n* Install praw if necessary. This bot was made on version 4.3.0, anything below 4.0 isn't compatible.\n\n* Set up your bot's app\n\n* Edit praw.ini.example with your bot's app and customize stuff like membercap and what not\n\n* rename it to praw.ini\n\n* Run bot.py to kick inactive members and add new ones.\n\nOptional: set up a cron to run bot.py automatically.\n\n\nI also recommend running \"bot.py -retry\" every 15-30 minutes as a backup in case the bot failed previously,\nin which case it will start from where it left off.\n\nIf it didn't fail, the script will simply stop without any request being made to reddit's API.\n\nThis bot has a small limitation which is good to know:\n\nReddit prevents any listing being made beyond 1000 items. \nAs this bot checks activity by loading all submissions and comments in the last [hour_limit] hours,\nThe listing will either stop when an item was made more than [hour_limit] hours ago, or at item #1000.\n\nIf item #1000 is fetched and was made within the last [hour_limit] hours (which is rare but can realistically happen for comments, especially on a more active subreddit)\nthere is a failsafe in place so that members who haven't been logged as being active can be given the benefit of the doubt,\nand the bot will manually check their overview to see if they have been active in the subreddit or not.\n\nOnce again, the 1000 item limitation could theoretically prevent the bot from seeing a valid activity if the user\nin question made 1000 comments or submissions and their 1001st comment was made in the subreddit within the allowed time frame.\n\nThat would be VERY rare, but still possible. \n\nI haven't tested it, but there might also be a problem for subreddits with over 1000 members due to this limitation."
}
] | 2 |
guoruize01/learned_iterative_algorithms | https://github.com/guoruize01/learned_iterative_algorithms | d94ffdd2727cb76d043b2c6ed7c6b03b72bf5ee2 | e8063fd7332d2f455a1eee03ebf0f940bbe6fb74 | cf319442753374d2200d2a070c4098f5f6a12e0b | refs/heads/master | 2022-04-08T06:58:12.289410 | 2020-03-16T17:37:38 | 2020-03-16T17:37:38 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6382211446762085,
"alphanum_fraction": 0.653245210647583,
"avg_line_length": 22.785715103149414,
"blob_id": "aebde6cbd79c7415805c63f5e4e71479085ec0ac",
"content_id": "c6693bca8067b0debd6003179d31177e969b9cda",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1664,
"license_type": "permissive",
"max_line_length": 151,
"num_lines": 70,
"path": "/matrix_analysis.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n\n# Create the basic problem structure.\nprob = problems.bernoulli_gaussian_trial(kappa=None,M=250,N=500,L=1000,pnz=.1,SNR=40) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n#prob = problems.random_access_problem(2) # 1 or 2 for compressive random access or massive MIMO\n# print('Problem created ...')\n# print('A is:')\n# print(prob.A)\n\nfilename = 'LAMP_bg_giid.npz'\n\nother = {}\nB_t = []\nU_t = []\nS_t = []\nV_t = []\ntheta_t = []\n\ntry:\n filecontent = np.load(filename).items()\n for k, d in filecontent:\n if k.startswith('B_'):\n B_t.append(d)\n print('restoring ' + k + ' is:' + str(d))\n elif k.startswith('theta_'):\n theta_t.append(d)\nexcept IOError:\n pass\n\n\n\nA = prob.A\nM,N = A.shape\nU_A, S_A, Vh_A = np.linalg.svd(A.transpose())\n\nfor i in range(len(B_t)):\n print(str(i))\n U, S, Vh = np.linalg.svd( B_t[i])\n U_t.append(U)\n S_t.append(S)\n V_t.append(Vh)\n\n\ndot_prod = U_A.transpose().dot(U_A)\n\ntest = np.amax(dot_prod, axis=0)\n\nstop = 1"
},
{
"alpha_fraction": 0.6765519976615906,
"alphanum_fraction": 0.6868327260017395,
"avg_line_length": 27.75,
"blob_id": "c6e876e383e76b79cb029a7ed1276188e6a08767",
"content_id": "366de313a31467bd264310ea8f149af5ee0f65e7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2529,
"license_type": "permissive",
"max_line_length": 134,
"num_lines": 88,
"path": "/LAMP4SSCtests/ssc_problem_test.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\n# MC is the number of transmitted messages (Monte Carlo simulations)\nMC = 10\n# L is the number of sections\nL = 4\nbps = 4\n# B is the size of the section\nB = np.power(2, bps)\n# R is the rate of the code, bits per channel use\nR = 1.0\nn = int(L*bps / R)\n# N is the length of a uncoded SSC message\nN = B * L\nnoise_var = 1.\n\n# Create the basic problem structure.\nprob = problems.ssc_problem(n = n, L=L, bps=bps, MC=MC, SNR_dB=8)\n\n\nr_ = prob.xgen_ + tf.random_normal((N, MC), stddev=math.sqrt(noise_var))\n\nr_rs_ = tf.reshape(tf.transpose(r_ ), (-1,B))\nmessages_hat_ = tf.reshape(tf.arg_max(r_rs_, 1), (MC,L))\nx_hat_ = tf.one_hot(messages_hat_, depth=B)\nx_hat_ = tf.transpose(tf.reshape(x_hat_, [MC, N]))\n\n# BLER_ = 1 - tf.reduce_mean(tf.dtypes.cast(prob.messages_ == messages_hat_, dtype = tf.uint8))\nerror_matrix_ = tf.dtypes.cast(tf.math.equal(prob.messages_, tf.dtypes.cast(messages_hat_, tf.int32)), dtype = tf.float64)\nBLER_ = 1 - tf.reduce_mean(error_matrix_)\n\nsess = tf.Session()\n\nmessages, xgen, x_hat, messages_hat, BLER_tf, r, r_rs = sess.run([prob.messages_, prob.xgen_, x_hat_, messages_hat_, BLER_,r_, r_rs_])\n\nBLER = 1 - np.mean((messages==messages_hat))\n\n# print('xgen=\\n',xgen)\n# print('x_hat=\\n',x_hat)\nprint('xgen.shape=\\n',xgen.shape)\nprint('x_hat.shape=\\n',x_hat.shape)\nprint('r.shape=\\n',r.shape)\nprint('r_rs.shape=\\n',r_rs.shape)\n\nprint('messages=\\n',messages)\nprint('messages_hat=\\n',messages_hat)\nprint('BLER=\\n',BLER)\nprint('BLER_tf=\\n',BLER_tf)\n\n\nmessages, xgen, ygen = sess.run([prob.messages_, prob.xgen_, prob.ygen_])\n\n# print('messages=\\n',messages)\n# print('xgen=\\n',xgen)\n# print('ygen=\\n',ygen)\n\n# print('xgen.shape=\\n',xgen.shape)\n# print('ygen.shape=\\n',ygen.shape)\n\nsess.close()"
},
{
"alpha_fraction": 0.683698296546936,
"alphanum_fraction": 0.698296844959259,
"avg_line_length": 34.753623962402344,
"blob_id": "b5c4737d9e49731c6ea40be3c14c7d69010cdc6c",
"content_id": "d74584ef861bc973a74dacc4a2039ea5e091dfa3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2466,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 69,
"path": "/LAMP4SSCtests/evaluate_AMPSSC2.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\n\nMC = 1000 # MC is the number of transmitted messages (Monte Carlo simulations)\nL = 32 # L is the number of sections\nbps = 4 # bits per section\nR = 1.0 # R is the rate of the code, bits per channel use\nn = int(L*bps/R) # number of channel uses, i.e., number of rows of A\n\n# Create the basic problem structure.\nprob = problems.ssc_problem(n=n, L=L, bps=bps, MC=MC, SNR_dB=8)\n\n\n# # build a LAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LAMP4SSC(prob,T=6,untied=False)\nprint('Building layers ... done')\n\n\n# plan the learning\ntraining_stages = train.setup_LAMP4SSCtraining(layers,prob,trinit=1e-3, refinements=(.5,) )\n# training_stages = train.setup_LAMP4SSCtraining(layers,prob,trinit=1e-3,refinements=(.5,.1,.01) )\nprint('Plan the learning ... done')\n\nsess = tf.Session()\nsess.run(tf.global_variables_initializer())\n\nprint('norms xval:{xval:.7f} yval:{yval:.7f}'.format(xval=la.norm(prob.xval), yval=la.norm(prob.yval)))\n\n# state = load_trainable_vars(sess, savefile) # must load AFTER the initializer\n#\n# must use this same Session to perform all training\n# if we start a new Session, things would replay and we'd be training with our validation set (no no)\n#\n# done = state.get('done', [])\n# log = str(state.get('log', ''))\n\nfor name, xhat_, loss_, nmse_, ser_, train_, var_list in training_stages:\n nmse, ser = sess.run([nmse_, ser_], feed_dict={prob.y_: prob.yval, prob.x_: prob.xval})\n print(name, '\\tnmse=', nmse, '\\tnmse/dB=', 10 * np.log10(nmse), '\\tser=', ser)\n\nsess.close()"
},
{
"alpha_fraction": 0.6677577495574951,
"alphanum_fraction": 0.6914893388748169,
"avg_line_length": 27.430233001708984,
"blob_id": "88cc9b35db87afcf535bab9de177cb9411f48452",
"content_id": "8ad9e74082d779ef2839cbe46ac42cd721bf0f43",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2444,
"license_type": "permissive",
"max_line_length": 146,
"num_lines": 86,
"path": "/evaluate_AMP.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\nL=10000\nM=250\nN=500\nSNR=20\npnz=.1\nuntied=False\nT=8\nshrink='bg'\n\n# Create the basic problem structure.\nprob = problems.bernoulli_gaussian_trial(kappa=None,M=M,N=N,L=L,pnz=pnz,SNR=SNR) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n#prob = problems.random_access_problem(2) # 1 or 2 for compressive random access or massive MIMO\nprint('Problem created ...')\nprint('A is:')\nprint(prob.A)\n\n# from scipy.io import savemat\n# # W = np.load(config.W)\n# dict = dict(D=prob.A)\n# savemat( 'D.mat', dict, oned_as='column' )\n\n\n# build a LAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LAMP(prob,T=T,shrink=shrink,untied=untied)\nprint('Building layers ... done')\n\n\nnmse_arrray = []\nmse_arrray = []\nsigma2_array = []\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\ny,x_true = prob(sess)\n\n\nsess.run(tf.global_variables_initializer())\n\nfor name, xhat_, rvar_, var_list in layers:\n\n nmse_denom_ = tf.nn.l2_loss(prob.x_)\n nmse_ = tf.nn.l2_loss( xhat_ - prob.x_) / nmse_denom_\n\n mse_ = 2* tf.nn.l2_loss(xhat_ - prob.x_) / (L*N)\n\n rvar_mean_ = tf.reduce_mean(rvar_)\n\n x_hat, nmse, mse, rvar_mean = sess.run([xhat_, nmse_, mse_, rvar_mean_], feed_dict={prob.y_: y, prob.x_: x_true})\n\n if \"non-linear T=\" in name:\n nmse_arrray.append(nmse)\n mse_arrray.append(mse)\n sigma2_array.append(rvar_mean)\n\n print(name, '\\tnmse=', nmse,'\\tNMSE/dB=',10*np.log10(nmse),'\\tMSE/dB=',10*np.log10(mse), '\\t sigma2/dB=',10*np.log10(rvar_mean))\n\n\nsess.close()\n\nprint('nmse/dB=', 10*np.log10(nmse_arrray))\nprint('mse/dB=', 10*np.log10(mse_arrray))\nprint('sigma2/dB=', 10*np.log10(sigma2_array))"
},
{
"alpha_fraction": 0.5890227556228638,
"alphanum_fraction": 0.6077643632888794,
"avg_line_length": 39.17204284667969,
"blob_id": "fda43788dd4cd1aea460e5b7ff1ffe6a7bffa5f6",
"content_id": "352701daaf40c0be035bfe38934f9195c1a77d0b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3735,
"license_type": "permissive",
"max_line_length": 179,
"num_lines": 93,
"path": "/LGAMPtests/debug_LGAMP_nans.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\n# ------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------\n# For this function you need to use the line below in networks.build_LGAMP!\n# layers.append( ('GLAMP T={0}'.format(t+1),xhat_,dxdr_,r_,rvar_,s_,svar_,p_,pvar_,(G_theta_,) ) )\n# ------------------------------------------------------------------------------------------\n# ------------------------------------------------------------------------------------------\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\n# Create the basic problem structure.\n# prob = one_bit_CS_with_BG_prior(kappa=None,M=5,N=8,L=2,pnz=1.00,SNR=2, tf_floattype=tf.float32) #a Bernoulli-Gaussian x, noisily observed through a random matrix\nprob = problems.one_bit_CS_with_BG_prior(kappa=None,M=2000,N=512,L=1000,pnz=.0625,SNR=2, tf_floattype=tf.float32) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n# prob = one_bit_CS_with_BG_prior(kappa=None,M=512,N=250,L=1000,pnz=.1,SNR=2) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n#prob = problems.random_access_problem(2) # 1 or 2 for compressive random access or massive MIMO\nprint('Problem created ...')\n\n# build a LGAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LGAMP(prob,T=7,shrink='onebitF',untied=True,tf_floattype = tf.float32)\nprint('Building layers ... done')\n\ny, x_true = prob(sess)\n\nsess.run(tf.global_variables_initializer())\n\n\nfor name, xhat_, var_list in layers:\n\n x_hat = sess.run(xhat_, feed_dict={prob.y_:y})\n\n NMSE = la.norm(x_true/la.norm(x_true, axis=0) - x_hat/la.norm(x_hat, axis=0), axis=0)**2\n L = len(NMSE)\n NMSE_no_Nan = NMSE[np.logical_not(np.isnan(NMSE))]\n\n NMSE_dB = 10*math.log10(np.mean(NMSE_no_Nan))\n print(name, 'NMSE=', NMSE_dB, '\\tdB with',L - len(NMSE_no_Nan),'instances of NaN (out of',L,')')\n\n\n\n# for name, xhat_, xvar_, r_, rvar_, s_, svar_, p_, pvar_, var_list in layers:\n#\n# x_hat, xvar, r, rvar, s, svar, p , pvar = sess.run([xhat_, xvar_, r_, rvar_, s_, svar_, p_, pvar_], feed_dict={prob.y_:y, prob.x_:x_true})\n#\n# NMSE = la.norm(x_true/la.norm(x_true, axis=0) - x_hat/la.norm(x_hat, axis=0), axis=0)**2\n# L = len(NMSE)\n# NMSE_no_Nan = NMSE[np.logical_not(np.isnan(NMSE))]\n#\n# NMSE_dB = 10*math.log10(np.mean(NMSE_no_Nan))\n# print(name, 'NMSE=', NMSE_dB, '\\tdB with',L - len(NMSE_no_Nan),'instances of NaN (out of',L,')')\n#\n# if L != len(NMSE_no_Nan):\n# print(\"Let's do some debugging ...\")\n# nan_indices = np.isnan(NMSE)\n#\n# p_nan = p[:,nan_indices]\n# pvar_nan = pvar[:,nan_indices]\n# s_nan = s[:,nan_indices]\n# svar_nan = svar[:,nan_indices]\n# r_nan = r[:,nan_indices]\n# rvar_nan = rvar[:,nan_indices]\n# xvar_nan = xvar[:,nan_indices]\n# x_hat_nan = x_hat[:,nan_indices]\n# break\n\n# Close the session\nsess.close()"
},
{
"alpha_fraction": 0.5501929521560669,
"alphanum_fraction": 0.5693076252937317,
"avg_line_length": 40.1682243347168,
"blob_id": "aefad5656e17f82057e511c6790d01394c0d094d",
"content_id": "3846d00d3fee1c41e69716f12cd6c8f0077c3bdc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 22025,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 535,
"path": "/tools/networks.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\nimport numpy as np\nimport numpy.linalg as la\nimport math\n\nimport tensorflow as tf\nimport tensorflow_probability as tfp\nimport tools.shrinkage as shrinkage\n\ndef build_LISTA(prob,T,initial_lambda=.1,untied=False):\n \"\"\"\n Builds a LISTA network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n assert not untied,'TODO: untied'\n eta = shrinkage.simple_soft_threshold\n layers = []\n A = prob.A\n M,N = A.shape\n B = A.T / (1.01 * la.norm(A,2)**2)\n B_ = tf.Variable(B,dtype=tf.float32,name='B_0')\n S_ = tf.Variable( np.identity(N) - np.matmul(B,A),dtype=tf.float32,name='S_0')\n By_ = tf.matmul( B_ , prob.y_ )\n layers.append( ('Linear',By_,None) )\n\n initial_lambda = np.array(initial_lambda).astype(np.float32)\n if getattr(prob,'iid',True) == False:\n # create a parameter for each coordinate in x\n initial_lambda = initial_lambda*np.ones( (N,1),dtype=np.float32 )\n lam0_ = tf.Variable( initial_lambda,name='lam_0')\n xhat_ = eta( By_, lam0_)\n layers.append( ('LISTA T=1',xhat_, (lam0_,) ) )\n for t in range(1,T):\n lam_ = tf.Variable( initial_lambda,name='lam_{0}'.format(t) )\n xhat_ = eta( tf.matmul(S_,xhat_) + By_, lam_ )\n layers.append( ('LISTA T='+str(t+1),xhat_,(lam_,)) )\n return layers\n\ndef build_LBISTA(prob,T,initial_lambda=.1,untied=False):\n \"\"\"\n Builds a LISTA network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n\n prob - is a TFGenerator which contains problem parameters and def of how to generate training data\n initial_lambda - could be some parameter of Block ISTA <- DELETE if unnecessary\n untied - flag for tied or untied case\n\n Return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n\n layers = []\n\n \"\"\"\n # check other functions in this file (e.g., build_LISTA and build_LAMP4SSC) to implement LBISTA network\n # send me questions if needed\n \"\"\"\n\n return layers\n\n\ndef build_LAMP(prob,T,shrink,untied):\n \"\"\"\n Builds a LAMP network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n eta,theta_init = shrinkage.get_shrinkage_function(shrink, prob)\n print('theta_init='+repr(theta_init))\n layers=[]\n A = prob.A\n M,N = A.shape\n\n B = A.T / (1.01 * la.norm(A,2)**2)\n B_ = tf.Variable(B,dtype=tf.float32,name='B_0')\n\n By_ = tf.matmul( B_ , prob.y_ )\n layers.append( ('Linear',By_,tf.constant(0.0),tf.constant(0.0),None) )\n\n if getattr(prob,'iid',True) == False:\n # set up individual parameters for every coordinate\n theta_init = theta_init*np.ones( (N,1),dtype=np.float32 )\n theta_ = tf.Variable(theta_init,dtype=tf.float32,name='theta_0')\n OneOverM = tf.constant(float(1)/M,dtype=tf.float32)\n NOverM = tf.constant(float(N)/M,dtype=tf.float32)\n rhat_ = By_\n rvar_ = tf.reduce_sum(tf.square(prob.y_),0) * OneOverM\n\n (xhat_,dxdr_) = eta( rhat_, rvar_ , theta_ )\n layers.append( ('LAMP-{0} non-linear T=1'.format(shrink),xhat_,rhat_,rvar_,(theta_,) ) )\n\n vt_ = prob.y_\n for t in range(1,T):\n if len(dxdr_.get_shape())==2:\n dxdr_ = tf.reduce_mean(dxdr_,axis=0)\n bt_ = dxdr_ * NOverM\n vt_ = prob.y_ - tf.matmul( prob.A_ , xhat_ ) + bt_ * vt_\n rvar_ = tf.reduce_sum(tf.square(vt_),0) * OneOverM\n theta_ = tf.Variable(theta_init,name='theta_'+str(t))\n if untied:\n B_ = tf.Variable(B,dtype=tf.float32,name='B_'+str(t))\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n layers.append( ('LAMP-{0} linear T={1}'.format(shrink,t+1),rhat_ ,(B_,) ) )\n else:\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n\n (xhat_,dxdr_) = eta( rhat_ ,rvar_ , theta_ )\n layers.append( ('LAMP-{0} non-linear T={1}'.format(shrink,t+1),xhat_,rhat_,rvar_,(theta_,) ) )\n\n return layers\n\ndef build_GLAMP(prob,T,shrink,untied):\n \"\"\"\n Builds a GLAMP network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n eta,theta_init = shrinkage.get_shrinkage_function(shrink, prob)\n print('theta_init='+repr(theta_init))\n layers=[]\n A = prob.A\n M,N = A.shape\n\n\n B = A.T / (1.01 * la.norm(A,2)**2)\n B_ = tf.Variable(B,dtype=tf.float32,name='B_0')\n\n By_ = tf.matmul( B_ , prob.y_ )\n layers.append( ('Linear',By_,tf.constant(0.0),None) )\n\n if getattr(prob,'iid',True) == False:\n # set up individual parameters for every coordinate\n theta_init = theta_init*np.ones( (N,1),dtype=np.float32 )\n theta_ = tf.Variable(theta_init,dtype=tf.float32,name='theta_0')\n OneOverM = tf.constant(float(1)/M,dtype=tf.float32)\n NOverM = tf.constant(float(N)/M,dtype=tf.float32)\n rvar_ = tf.reduce_sum(tf.square(prob.y_),0) * OneOverM\n (xhat_,dxdr_) = eta( By_,rvar_ , theta_ )\n layers.append( ('LAMP-{0} non-linear T=1'.format(shrink),xhat_,rvar_,(theta_,) ) )\n\n vt_ = prob.y_\n for t in range(1,T):\n if len(dxdr_.get_shape())==2:\n dxdr_ = tf.reduce_mean(dxdr_,axis=0)\n bt_ = dxdr_ * NOverM\n vt_ = prob.y_ - tf.matmul( prob.A_ , xhat_ ) + bt_ * vt_\n rvar_ = tf.reduce_sum(tf.square(vt_),0) * OneOverM\n theta_ = tf.Variable(theta_init,name='theta_'+str(t))\n if untied:\n B_ = tf.Variable(B,dtype=tf.float32,name='B_'+str(t))\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n layers.append( ('LAMP-{0} linear T={1}'.format(shrink,t+1),rhat_ ,(B_,) ) )\n else:\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n\n D_1_exp_helper_ = tf.reshape(tf.stack([tf.eye(N), -tf.eye(N)]), [2 * N, -1])\n # D_1_exp_ = tf.constant(D_1_exp_helper_, dtype=tf.float32, name='D_1_')\n\n # lam = theta_[0] * tf.sqrt(rvar_)\n\n # b_1_ = tf.Variable(tf.zeros([2*N, 1]), dtype=tf.float32, name='b_1_')\n # b_1_ = tf.Variable(tf.ones([2 * N, 1]), dtype=tf.float32, name='b_1_')\n\n lam = theta_[0] * tf.sqrt(rvar_)\n scale = theta_[1]\n\n b_1_helper_ = tf.reshape(tf.stack([tf.ones([N, 1]), tf.ones([N, 1])]), [2 * N, -1])\n # b_1_ = tf.Variable(b_1_helper_, dtype=tf.float32, name='b_1_')\n\n\n rhat_expanded_1_ = tf.nn.relu(tf.matmul(D_1_exp_helper_, rhat_) - lam * b_1_helper_)\n\n dxdr_ = 2*tf.reduce_mean(tf.to_float(rhat_expanded_1_ > 0), 0)\n\n D_1_com_helper_ = tf.transpose(tf.reshape(tf.stack([tf.eye(N), -tf.eye(N)]), [2 * N, -1]))\n # D_1_com_ = tf.constant(D_1_com_helper_, dtype=tf.float32, name='B_com')\n\n xhat_ = tf.matmul(D_1_com_helper_, rhat_expanded_1_)\n\n xhat_ = scale * xhat_\n dxdr_ = scale * dxdr_\n\n layers.append( ('LAMP-{0} non-linear T={1}'.format(shrink,t+1),xhat_,rvar_,(theta_,) ) )\n\n return layers\n\ndef build_ALAMP(prob,T,shrink,untied):\n \"\"\"\n Builds a LAMP network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n eta,theta_init = shrinkage.get_shrinkage_function(shrink, prob)\n print('theta_init='+repr(theta_init))\n layers=[]\n A = prob.A\n M,N = A.shape\n\n # B = A.T\n B = prob.W\n B_ = tf.constant(B, dtype=tf.float32)\n\n By_ = tf.matmul( B_ , prob.y_ )\n\n if getattr(prob,'iid',True) == False:\n # set up individual parameters for every coordinate\n theta_init = theta_init*np.ones( (N,1),dtype=np.float32 )\n theta_ = tf.Variable(theta_init,dtype=tf.float32,name='theta_0')\n OneOverM = tf.constant(float(1)/M,dtype=tf.float32)\n NOverM = tf.constant(float(N)/M,dtype=tf.float32)\n rvar_ = tf.reduce_sum(tf.square(prob.y_),0) * OneOverM\n (xhat_,dxdr_) = eta( By_,rvar_ , theta_ )\n layers.append( ('LAMP-{0} non-linear T=1'.format(shrink),xhat_,rvar_,(theta_,) ) )\n\n vt_ = prob.y_\n for t in range(1,T):\n if len(dxdr_.get_shape())==2:\n dxdr_ = tf.reduce_mean(dxdr_,axis=0)\n bt_ = dxdr_ * NOverM\n vt_ = prob.y_ - tf.matmul( prob.A_ , xhat_ ) + bt_ * vt_\n rvar_ = tf.reduce_sum(tf.square(vt_),0) * OneOverM\n theta_ = tf.Variable(theta_init,name='theta_'+str(t))\n if untied:\n B_ = tf.Variable(B,dtype=tf.float32,name='B_'+str(t))\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n layers.append( ('LAMP-{0} linear T={1}'.format(shrink,t+1),rhat_ ,(B_,) ) )\n else:\n rhat_ = xhat_ + tf.matmul(B_,vt_)\n\n (xhat_,dxdr_) = eta( rhat_ ,rvar_ , theta_ )\n layers.append( ('LAMP-{0} non-linear T={1}'.format(shrink,t+1),xhat_,rvar_,(theta_,) ) )\n\n return layers\n\ndef build_LGAMP(prob, T, shrink, untied, tf_floattype=tf.float32):\n \"\"\"\n Builds a LAMP network to infer x from prob.y_ = matmul(prob.A,x) + AWGN\n return a list of layer info (name,xhat_,newvars)\n name : description, e.g. 'LISTA T=1'\n xhat_ : that which approximates x_ at some point in the algorithm\n newvars : a tuple of layer-specific trainable variables\n \"\"\"\n F, F_theta_init = shrinkage.get_shrinkage_function(shrink, prob)\n G, G_theta_init = shrinkage.get_shrinkage_function('bg', prob)\n\n F_theta_ = tf.constant(F_theta_init, dtype=tf_floattype, name='F_theta_0')\n G_theta_ = tf.Variable(G_theta_init, dtype=tf_floattype, name='G_theta_0')\n\n layers = []\n\n A = prob.A\n A_ = tf.dtypes.cast(prob.A_, dtype=tf_floattype)\n M, N = A.shape\n AT = A.T\n # This line causes numerical instability\n # AT = A.T / (1.01 * la.norm(A,2)**2)\n AT_ = tf.Variable(AT, dtype=tf_floattype, name='AT')\n A_A = np.multiply(A, A)\n A_A_ = tf.constant(A_A, dtype=tf_floattype, name='AA')\n A_AT_ = tf.transpose(A_A_)\n\n sigma_x2 = tf.constant(1, dtype=tf_floattype)\n sigmaw2 = tf.constant(prob.noise_var, dtype=tf_floattype)\n code_scale = prob.code_symbol\n\n xhat_ = 0 * tf.matmul(AT_, prob.y_)\n xvar_ = tf.constant(prob.pnz, dtype=tf_floattype) * sigma_x2 * (1 + xhat_)\n s_ = 0 * prob.y_\n\n for t in range(0, T):\n pvar_ = tf.matmul(A_A_, xvar_)\n p_ = tf.matmul(A_, xhat_) - pvar_ * s_\n\n (s_, svar_) = F(prob.y_, p_, pvar_, sigmaw2, M, F_theta_, code_scale)\n\n svar_rm = tf.reduce_mean(svar_, axis=0)\n svar_ = tf.matmul(1 + 0 * svar_, tf.matrix_diag(svar_rm))\n\n rvar_ = tf.math.reciprocal(tf.matmul(A_AT_, svar_))\n\n rvar_rm = tf.reduce_mean(rvar_, axis=0)\n rvar_ = tf.matmul(1 + 0 * rvar_, tf.matrix_diag(rvar_rm))\n\n r_ = xhat_ + tf.matmul(AT_, s_) * rvar_\n\n (xhat_, dxdr_) = G(r_, rvar_, G_theta_)\n xvar_ = rvar_ * dxdr_ # Rangan GAMP, eq 8b\n\n # layers.append( ('LGAMP T={0}'.format(t+1),xhat_,xvar_,r_,rvar_,s_,svar_,p_,pvar_,(G_theta_,) ) )\n layers.append(('LGAMP T={0}'.format(t + 1), xhat_, (G_theta_,)))\n\n return layers\n\ndef build_LAMP4SSC(prob,T,untied, alg_version):\n L = prob.L\n\n theta_init = 1.\n print('theta_init=' + repr(theta_init))\n theta_ = tf.Variable(theta_init, dtype=tf.float32, name='theta_0')\n\n layers = []\n A = prob.A\n A_ = prob.A_\n n, N = A.shape\n B = A.T\n B_ = tf.Variable(B, dtype=tf.float32, name='B_0')\n z_ = prob.y_\n\n if alg_version == 'tied' or alg_version == 'untied':\n s_ = tf.matmul(B_, z_)\n # var_list: Defaults to the list of variables collected in the graph under the key GraphKeys.TRAINABLE_VARIABLES\n layers.append(('LAMP for SSC Linear-B\\t', s_, None))\n elif \"tied S\" in alg_version:\n # initalization with randS = tf.random_normal((N, N), stddev=1.0 / math.sqrt(N)) is simply bad\n S_ = tf.Variable(tf.eye(N), dtype=tf.float32, name='S')\n s_amp_ = tf.matmul(B_, z_)\n s_ = tf.matmul(S_, s_amp_)\n layers.append(('LAMP for SSC Linear-B,Linear-S\\t', s_, None))\n\n\n tau_ = theta_ * tf.sqrt(tf.reduce_sum(z_ ** 2, axis=0) / n)\n s1_ = prob.sqrtnPl * tf.matmul((s_ - prob.sqrtnPl), tf.linalg.diag(1 / (tau_ ** 2)))\n s2_ = tf.reshape(tf.transpose(s1_), (-1, prob.B))\n beta_rs_ = prob.sqrtnPl * tf.nn.softmax(s2_, axis=1)\n beta_ = tf.transpose(tf.reshape(beta_rs_, [-1, N]))\n\n layers.append( ('LAMP for SSC non-linear T= 1\\t', beta_, (theta_,)) )\n\n for t in range(1,T):\n\n if \"no Onsager\" in alg_version:\n print('Guys no Onsager term!')\n z_ = prob.y_ - tf.matmul(A_, beta_)\n else:\n ons_ = tf.matmul(z_, tf.linalg.diag(1 / (tau_ ** 2)))\n ons_ = tf.matmul(ons_, tf.linalg.diag(prob.P - tf.norm(beta_, axis=0) ** 2 / n))\n z_ = prob.y_ - tf.matmul(A_, beta_) + ons_\n\n if alg_version == 'tied':\n s_ = beta_ + tf.matmul(B_, z_)\n elif alg_version == 'untied':\n B_ = tf.Variable(B, dtype=tf.float32, name='B_' + str(t))\n s_ = beta_ + tf.matmul(B_, z_)\n layers.append(('LAMP for SSC linear-B T={:2d}'.format(t + 1), s_, (B_,)))\n elif \"tied LAMP tied S\" in alg_version:\n print('tied LAMP tied S is in alg_version')\n s_amp_ = beta_ + tf.matmul(B_, z_)\n s_ = tf.matmul(S_, s_amp_)\n elif \"tied LAMP untied S\" in alg_version:\n print('untied LAMP tied S is in alg_version')\n S_ = tf.Variable(tf.eye(N), dtype=tf.float32, name='S_' + str(t))\n s_amp_ = beta_ + tf.matmul(B_, z_)\n s_ = tf.matmul(S_, s_amp_)\n layers.append(('LAMP for SSC linear-S T={:2d}'.format(t + 1), s_, (S_,)))\n else:\n s_ = beta_ + tf.matmul(B_, z_)\n print('Something is wrong with alg_version in build_LAMP4SSC')\n\n # if untied:\n # B_exp_tail_ = tf.random_normal((N, n), stddev=1.0 / math.sqrt(n))\n # # B_exp_tail_ = tf.zeros([N, n], tf.float32)\n # B_exp_helper_ = tf.reshape(tf.stack([B, B_exp_tail_]), [2 * N, -1])\n # B_exp_ = tf.Variable(B_exp_helper_, dtype=tf.float32, name='B_exp')\n #\n # # B_com_tail_ = tf.zeros([N, N], tf.float32)\n # B_com_tail_ = tf.random_normal((N, N), stddev=1.0 / math.sqrt(n))\n # B_com_helper_ = tf.transpose(tf.reshape(tf.stack([tf.eye(N), B_com_tail_]), [2 * N, -1]))\n # B_com_ = tf.Variable(B_com_helper_, dtype=tf.float32, name='B_com')\n #\n # s_ = beta_ + tf.matmul(B_com_, tf.nn.relu(tf.matmul(B_exp_, z_)))\n #\n # layers.append(('LAMP for SSC non-linear-B-expcom T={0}'.format(t + 1), s_, (B_com_, B_exp_)))\n\n # if untied:\n # # B_exp_tail_ = tf.random_normal((N, n), stddev=1.0 / math.sqrt(n))\n # B_exp_tail_ = tf.zeros([N, n], tf.float32)\n # B_exp_helper_ = tf.reshape(tf.stack([B, B_exp_tail_]), [2*N,-1])\n # B_exp_ = tf.Variable(B_exp_helper_, dtype=tf.float32,name='B_exp')\n #\n # B_inter = np.random.normal(size=(2*N, 2*N), scale=1.0 / math.sqrt(2*N)).astype(np.float32)\n # B_inter_ = tf.Variable(B_inter, name='B_inter')\n #\n # # B_com_tail_ = tf.zeros([N, N], tf.float32)\n # B_com_tail_ = tf.random_normal((N, N), stddev=1.0 / math.sqrt(n))\n # B_com_helper_ = tf.transpose(tf.reshape(tf.stack([tf.eye(N), B_com_tail_]), [2 * N, -1]))\n # B_com_ = tf.Variable(B_com_helper_, dtype=tf.float32, name='B_com')\n #\n # s_ = beta_ + tf.matmul(B_com_, tf.nn.relu(tf.matmul(B_inter_, tf.nn.relu(tf.matmul(B_exp_, z_)))))\n #\n # layers.append( ('LAMP for SSC non-linear-B-expcom T={:2d}'.format(t+1),s_ ,(B_com_,B_inter_,B_exp_) ) )\n # else:\n # s_ = beta_ + tf.matmul(B_, z_)\n\n\n theta_ = tf.Variable(theta_init, name='theta_' + str(t))\n tau_ = theta_ * tf.sqrt(tf.reduce_sum(z_ ** 2, axis=0) / n)\n\n s1_ = prob.sqrtnPl * tf.matmul((s_ - prob.sqrtnPl), tf.linalg.diag(1 / (tau_ ** 2)))\n s2_ = tf.reshape(tf.transpose(s1_), (-1, prob.B))\n beta_rs_ = prob.sqrtnPl * tf.nn.softmax(s2_, axis=1)\n\n beta_ = tf.transpose(tf.reshape(beta_rs_, [-1, N]))\n\n layers.append(('LAMP for SSC non-linear T={:2d}\\t'.format(t + 1), beta_, (theta_,)))\n\n # layers.append(('LAMP for SSC non-linear T={0}'.format(t + 1), beta_, z_, tau_, Bz_, s1_, s2_, beta_rs_, None))\n\n return layers\n\ndef build_LVAMP(prob,T,shrink):\n \"\"\"\n Build the LVMAP network with an SVD parameterization.\n Learns the measurement noise variance and nonlinearity parameters\n \"\"\"\n eta,theta_init = shrinkage.get_shrinkage_function(shrink)\n print('theta_init='+repr(theta_init))\n layers=[]\n A = prob.A\n M,N = A.shape\n AA = np.matmul(A,A.T)\n s2,U = la.eigh(AA) # this is faster than svd, but less precise if ill-conditioned\n s = np.sqrt(s2)\n V = np.matmul( A.T,U) / s\n print('svd reconstruction error={nmse:.3f}dB'.format(nmse=20*np.log10(la.norm(A-np.matmul(U*s,V.T))/la.norm(A) ) ) )\n assert np.allclose( A, np.matmul(U*s,V.T),rtol=1e-4,atol=1e-4)\n V_ = tf.constant(V,dtype=tf.float32,name='V')\n\n # precompute some tensorflow constants\n rS2_ = tf.constant( np.reshape( 1/(s*s),(-1,1) ).astype(np.float32) ) # reshape to (M,1) to allow broadcasting\n #rj_ = tf.zeros( (N,L) ,dtype=tf.float32)\n rj_ = tf.zeros_like( prob.x_)\n taurj_ = tf.reduce_sum(prob.y_*prob.y_,0)/(N)\n logyvar_ = tf.Variable( 0.0,name='logyvar',dtype=tf.float32)\n yvar_ = tf.exp( logyvar_)\n ytilde_ = tf.matmul( tf.constant( ((U/s).T).astype(np.float32) ) ,prob.y_) # inv(S)*U*y\n Vt_ = tf.transpose(V_)\n\n xhat_ = tf.constant(0,dtype=tf.float32)\n for t in range(T): # layers 0 thru T-1\n # linear step (LMMSE estimation and Onsager correction)\n varRat_ = tf.reshape(yvar_/taurj_,(1,-1) ) # one per column\n scale_each_ = 1/( 1 + rS2_*varRat_ ) # LMMSE scaling individualized per element {singular dimension,column}\n zetai_ = N/tf.reduce_sum(scale_each_,0) # one per column (zetai_ is 1/(1-alphai) from Phil's derivation )\n adjust_ = ( scale_each_*(ytilde_ - tf.matmul(Vt_,rj_))) * zetai_ # adjustment in the s space\n ri_ = rj_ + tf.matmul(V_, adjust_ ) # bring the adjustment back into the x space and apply it\n tauri_ = taurj_*(zetai_-1) # adjust the variance\n\n # non-linear step\n theta_ = tf.Variable(theta_init,dtype=tf.float32,name='theta_'+str(t))\n xhat_,dxdr_ = eta(ri_,tauri_,theta_)\n if t==0:\n learnvars = None # really means \"all\"\n else:\n learnvars=(theta_,)\n layers.append( ('LVAMP-{0} T={1}'.format(shrink,t+1),xhat_, learnvars ) )\n\n if len(dxdr_.get_shape())==2:\n dxdr_ = tf.reduce_mean(dxdr_,axis=0)\n zetaj_ = 1/(1-dxdr_)\n rj_ = (xhat_ - dxdr_*ri_)*zetaj_ # apply Onsager correction\n taurj_ = tauri_*(zetaj_-1) # adjust the variance\n\n return layers\n\ndef build_LVAMP_dense(prob,T,shrink,iid=False):\n \"\"\" Builds the non-SVD (i.e. dense) parameterization of LVAMP\n and returns a list of trainable points(name,xhat_,newvars)\n \"\"\"\n eta,theta_init = shrinkage.get_shrinkage_function(shrink)\n layers=[]\n A = prob.A\n M,N = A.shape\n\n Hinit = np.matmul(prob.xinit,la.pinv(prob.yinit) )\n H_ = tf.Variable(Hinit,dtype=tf.float32,name='H0')\n xhat_lin_ = tf.matmul(H_,prob.y_)\n layers.append( ('Linear',xhat_lin_,None) )\n\n if shrink=='pwgrid':\n theta_init = np.linspace(.01,.99,15).astype(np.float32)\n vs_def = np.array(1,dtype=np.float32)\n if not iid:\n theta_init = np.tile( theta_init ,(N,1,1))\n vs_def = np.tile( vs_def ,(N,1))\n\n theta_ = tf.Variable(theta_init,name='theta0',dtype=tf.float32)\n vs_ = tf.Variable(vs_def,name='vs0',dtype=tf.float32)\n rhat_nl_ = xhat_lin_\n rvar_nl_ = vs_ * tf.reduce_sum(prob.y_*prob.y_,0)/N\n\n xhat_nl_,alpha_nl_ = eta(rhat_nl_ , rvar_nl_,theta_ )\n layers.append( ('LVAMP-{0} T={1}'.format(shrink,1),xhat_nl_, None ) )\n for t in range(1,T):\n alpha_nl_ = tf.reduce_mean( alpha_nl_,axis=0) # each col average dxdr\n\n gain_nl_ = 1.0 /(1.0 - alpha_nl_)\n rhat_lin_ = gain_nl_ * (xhat_nl_ - alpha_nl_ * rhat_nl_)\n rvar_lin_ = rvar_nl_ * alpha_nl_ * gain_nl_\n\n H_ = tf.Variable(Hinit,dtype=tf.float32,name='H'+str(t))\n G_ = tf.Variable(.9*np.identity(N),dtype=tf.float32,name='G'+str(t))\n xhat_lin_ = tf.matmul(H_,prob.y_) + tf.matmul(G_,rhat_lin_)\n\n layers.append( ('LVAMP-{0} lin T={1}'.format(shrink,1+t),xhat_lin_, (H_,G_) ) )\n\n alpha_lin_ = tf.expand_dims(tf.diag_part(G_),1)\n\n eps = .5/N\n alpha_lin_ = tf.maximum(eps,tf.minimum(1-eps, alpha_lin_ ) )\n\n vs_ = tf.Variable(vs_def,name='vs'+str(t),dtype=tf.float32)\n\n gain_lin_ = vs_ * 1.0/(1.0 - alpha_lin_)\n rhat_nl_ = gain_lin_ * (xhat_lin_ - alpha_lin_ * rhat_lin_)\n rvar_nl_ = rvar_lin_ * alpha_lin_ * gain_lin_\n\n theta_ = tf.Variable(theta_init,name='theta'+str(t),dtype=tf.float32)\n\n xhat_nl_,alpha_nl_ = eta(rhat_nl_ , rvar_nl_,theta_ )\n alpha_nl_ = tf.maximum(eps,tf.minimum(1-eps, alpha_nl_ ) )\n layers.append( ('LVAMP-{0} nl T={1}'.format(shrink,1+t),xhat_nl_, (vs_,theta_,) ) )\n\n return layers\n"
},
{
"alpha_fraction": 0.5989698767662048,
"alphanum_fraction": 0.6193055510520935,
"avg_line_length": 36.53666687011719,
"blob_id": "750587ff9251d785ba3ffafa90b0c3d28e9b2f4c",
"content_id": "2b55df8b6b81ab3fcbb8647bd811d728b7600247",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11261,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 300,
"path": "/tools/problems.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\nimport numpy as np\nimport numpy.linalg as la\nimport math\nimport tensorflow as tf\n\nclass Generator(object):\n def __init__(self,A,**kwargs):\n self.A = A\n M,N = A.shape\n vars(self).update(kwargs)\n self.x_ = tf.placeholder( tf.float32,(N,None),name='x' )\n self.y_ = tf.placeholder( tf.float32,(M,None),name='y' )\n\nclass TFGenerator(Generator):\n def __init__(self,**kwargs):\n Generator.__init__(self,**kwargs)\n def __call__(self,sess):\n 'generates y,x pair for training'\n return sess.run( ( self.ygen_,self.xgen_ ) )\n\nclass NumpyGenerator(Generator):\n def __init__(self,**kwargs):\n Generator.__init__(self,**kwargs)\n\n def __call__(self,sess):\n 'generates y,x pair for training'\n return self.p.genYX(self.nbatches,self.nsubprocs)\n\n\ndef bernoulli_gaussian_trial(M=250,N=500,L=1000,pnz=.1,kappa=None,SNR=40):\n # This function returns an object called prob which contains:\n # the measurement matrix, both numpy array A and TensorFlow constant A_,\n # Tensors xgen, ygen_ which can be used in TensorFlow to generate new training data,\n # numpy arrays xval and yval which are used to evaluate the learned network\n # numpy arrays xinit and yinit, which I am not sure are used at all ???\n # and a scalar noise_var\n\n\n A = np.random.normal(size=(M, N), scale=1.0 / math.sqrt(M)).astype(np.float32)\n\n # A_pnz = 0.1\n # A_sparse = ((np.random.uniform(0, 1, (M, N)) < A_pnz) * A / math.sqrt(A_pnz)).astype(np.float32)\n # A = A_sparse\n\n # A_0pm1_pnz = 0.1\n # A_0pm1 = ((np.random.uniform(0, 1, (M, N)) < A_0pm1_pnz) * np.sign(A) / math.sqrt(M * A_0pm1_pnz)).astype(np.float32)\n # A = A_0pm1\n\n # U_helper = np.random.normal(size=(N, N), scale=1.0).astype(np.float32)\n # U, S, Vh = np.linalg.svd(U_helper)\n # # print(U.shape)\n # rows = np.random.permutation(np.arange(N))[0:M]\n # A = U[rows, :] * np.sqrt(N/M)\n\n col_normalized = False\n if col_normalized:\n A = A / np.sqrt(np.sum(np.square(A), axis=0, keepdims=True))\n\n if not(kappa is None):\n if kappa >= 1:\n # create a random operator with a specific condition number\n U,_,V = la.svd(A,full_matrices=False)\n s = np.logspace( 0, np.log10( 1/kappa),M)\n A = np.dot( U*(s*np.sqrt(N)/la.norm(s)),V).astype(np.float32)\n A_ = tf.constant(A,name='A')\n prob = TFGenerator(A=A,A_=A_,pnz=pnz,kappa=kappa,SNR=SNR)\n prob.name = 'Bernoulli-Gaussian, random A'\n\n from scipy.io import loadmat\n W_dict = loadmat('W.mat')\n prob.W = np.transpose(W_dict[\"W\"])\n\n bernoulli_ = tf.to_float( tf.random_uniform( (N,L) ) < pnz)\n xgen_ = bernoulli_ * tf.random_normal( (N,L) )\n noise_var = pnz*N/M * math.pow(10., -SNR / 10.)\n ygen_ = tf.matmul( A_,xgen_) + tf.random_normal( (M,L),stddev=math.sqrt( noise_var ) )\n\n prob.xval = ((np.random.uniform( 0,1,(N,L))<pnz) * np.random.normal(0,1,(N,L))).astype(np.float32)\n prob.yval = np.matmul(A,prob.xval) + np.random.normal(0,math.sqrt( noise_var ),(M,L))\n prob.xinit = ((np.random.uniform( 0,1,(N,L))<pnz) * np.random.normal(0,1,(N,L))).astype(np.float32)\n prob.yinit = np.matmul(A,prob.xinit) + np.random.normal(0,math.sqrt( noise_var ),(M,L))\n prob.xgen_ = xgen_\n prob.ygen_ = ygen_\n prob.noise_var = noise_var\n prob.pnz = pnz\n\n from scipy.io import loadmat\n W_dict = loadmat('W.mat')\n prob.W = np.transpose(W_dict[\"W\"])\n\n return prob\n\ndef one_bit_CS_with_BG_prior(M=500, N=500, L=1000, pnz=.1, kappa=None, SNR=None, tf_floattype = tf.float32):\n # This function returns an object called prob which contains:\n # the measurement matrix, both numpy array A and TensorFlow constant A_,\n # Tensors xgen, ygen_ which can be used in TensorFlow to generate new training data,\n # numpy arrays xval and yval which are used to evaluate the learned network\n # numpy arrays xinit and yinit, which I am not sure are used at all ???\n # and a scalar noise_va\n\n A = np.random.normal(size=(M, N), scale=1.0 / math.sqrt(M)).astype(np.float32)\n\n A_ = tf.constant(A, name='A', dtype=tf_floattype)\n prob = TFGenerator(A=A, tf_floattype=tf_floattype, A_=A_,pnz=pnz,kappa=kappa,SNR=SNR)\n prob.name = '1bit CS, BG prior, Gaussian A'\n prob.pnz = pnz\n prob.code_symbol = np.sqrt(prob.pnz*N/M)\n\n bernoulli_ = tf.to_float( tf.random_uniform( (N,L) ) < pnz)\n xgen_ = bernoulli_ * tf.random_normal( (N,L) )\n\n xgen_ = tf.dtypes.cast(xgen_, dtype=tf_floattype)\n\n if SNR is None:\n noise_var = 0\n else:\n # This definition is with correspondence to the MATLAB code\n # Here the SNR is related to P(y)/P(w)\n noise_var = math.pow(10., -SNR / 10.)*prob.code_symbol**2\n # where as in this definition SNR is related to P(x)/P(w)\n # noise_var = pnz * N / M * math.pow(10., -SNR / 10.)\n\n ygen_ = prob.code_symbol*tf.math.sign(tf.matmul(A_, xgen_)) + tf.random_normal((M, L), stddev=math.sqrt(noise_var), dtype=tf_floattype)\n ygen_ = tf.dtypes.cast(ygen_, dtype=tf_floattype)\n\n prob.xval = ((np.random.uniform( 0,1,(N,L))<pnz) * np.random.normal(0,1,(N,L))).astype(np.float32)\n prob.y_starval = prob.code_symbol*np.sign(np.matmul(A,prob.xval))\n prob.noise = np.random.normal(0,math.sqrt( noise_var ),(M,L))\n prob.yval = prob.y_starval + prob.noise\n\n prob.xinit = ((np.random.uniform( 0,1,(N,L))<pnz) * np.random.normal(0,1,(N,L))).astype(np.float32)\n prob.yinit = np.sign(np.matmul(A,prob.xinit)) + np.random.normal(0,math.sqrt( noise_var ),(M,L))\n\n prob.xgen_ = xgen_\n prob.ygen_ = ygen_\n prob.noise_var = noise_var\n\n y_star_test = prob.y_starval[:,1]\n\n y_starval_norm2 = la.norm(prob.y_starval[:,1])**2\n noise_norm2 = la.norm(prob.noise[:,1])**2\n\n # print('y_star_test size is', y_star_test.size)\n # print('y_starval norm2 is', y_starval_norm2)\n # print('prob.noise var=',prob.noise_var)\n # print('noise norm2 is', noise_norm2)\n\n SNR = 10*np.log10(y_starval_norm2 / noise_norm2)\n print('SNR=',SNR)\n\n return prob\n\n\ndef ssc_problem(n=50, L=4, bps=4, MC=1000, SNR_dB=None):\n # This function returns an object called prob which contains:\n # the measurement matrix, both numpy array A and TensorFlow constant A_,\n # Tensors xgen, ygen_ which can be used in TensorFlow to generate new training data,\n # numpy arrays xval and yval which are used to evaluate the learned network\n # numpy arrays xinit and yinit, which I am not sure are used at all ???\n # and a scalar noise_va\n\n # MC is the number of transmitted messages (Monte Carlo simulations)\n # L is the number of sections\n # bits per section\n # R is the rate of the code, bits per channel use\n # number of channel uses, i.e., number of rows of A\n\n\n B = np.power(2, bps) # B is the size of the section\n N = B * L # N is the length of a uncoded SSC message, i.e., number of columns of A\n k = L * bps # number of transmitted bits\n noise_var =1\n\n A = np.random.normal(size=(n, N), scale=1.0 / math.sqrt(n)).astype(np.float32)\n\n A_ = tf.constant(A, name='A')\n prob = TFGenerator(A=A, A_=A_, kappa=None, SNR=SNR_dB)\n prob.name = 'SSC, Gaussian A'\n prob.n = n\n prob.L = L\n prob.bps = bps\n prob.SNR_dB = SNR_dB\n prob.B = B\n prob.N = N\n prob.k = k\n prob.P = math.pow(10., SNR_dB / 10.)\n prob.Pl = prob.P/L\n prob.noise_var = noise_var\n prob.sqrtnPl = np.sqrt(n*prob.Pl)\n\n\n\n # Create tf vectors\n messages_ = tf.random.uniform((MC, L), maxval=B, dtype=tf.int32)\n prob.messages_ = messages_\n\n x_ = tf.one_hot(messages_, depth=B)\n xgen_ = prob.sqrtnPl*tf.transpose(tf.reshape(x_, [MC, N]))\n y_clean_gen_ = tf.matmul(A_, xgen_)\n noise_ = tf.random_normal((n, MC), stddev=math.sqrt(noise_var))\n ygen_ = y_clean_gen_ + noise_\n\n # Create validation vectors\n messages = np.random.randint(0, B, (L, MC))\n x = np.zeros((N, MC))\n for sample_index in range(MC):\n for i in range(0, L):\n x[i * B + messages[i, sample_index], sample_index] = prob.sqrtnPl\n\n prob.xval = x\n prob.y_cleanval = np.matmul(A, prob.xval)\n prob.noise = np.random.normal(0, math.sqrt(noise_var), (n, MC))\n prob.yval = np.matmul(A, prob.xval) + prob.noise\n\n # Uncomment for checking SNR\n y_clean_norm2 = np.mean(la.norm(prob.y_cleanval, axis=0)**2)\n noise_norm2 = np.mean(la.norm(prob.noise, axis=0)**2)\n SNR_empirical = y_clean_norm2/noise_norm2\n SNR_dB_empirical = 10*np.log10(SNR_empirical)\n print('y_clean_norm=',y_clean_norm2)\n print('noise_norm=',noise_norm2)\n print('SNR_empirical=', SNR_empirical)\n print('SNR_dB_empirical=', SNR_dB_empirical)\n\n # # Not sure if this is needed\n # prob.xinit = ((np.random.uniform( 0,1,(N,L))<pnz) * np.random.normal(0,1,(N,L))).astype(np.float32)\n # prob.yinit = np.sign(np.matmul(A,prob.xinit)) + np.random.normal(0,math.sqrt( noise_var ),(M,L))\n\n prob.xgen_ = xgen_\n prob.ygen_ = ygen_\n\n return prob\n\ndef block_gaussian_trial(m=128, L=32, B=16, MC=1000, pnz=.1, SNR_dB=20):\n\n N = B * L # N is the length of a the unknown block-sparse x\n A = np.random.normal(size=(m, N), scale=1.0 / math.sqrt(m)).astype(np.float32)\n A_ = tf.constant(A, name='A')\n prob = TFGenerator(A=A, A_=A_, kappa=None, SNR=SNR_dB)\n\n prob.name = 'block sparse, Gaussian A'\n prob.L = L\n prob.B = B\n prob.N = N\n prob.SNR_dB = SNR_dB\n prob.pnz = pnz\n\n # Create tf vectors\n active_blocks_ = tf.to_float(tf.random_uniform((L, 1, MC)) < pnz)\n ones_ = tf.ones([L, B, MC])\n\n product_ = tf.multiply(active_blocks_, ones_)\n xgen_ = tf.reshape(product_, [L * B, MC])\n\n # you should probably change the way noise_var is calculated\n noise_var = pnz * N / m * math.pow(10., -SNR_dB / 10.)\n ygen_ = tf.matmul(A_, xgen_) + tf.random_normal((m, MC), stddev=math.sqrt(noise_var))\n\n active_blocks_val = (np.random.uniform( 0,1,(L,MC))<pnz).astype(np.float32)\n active_entries_val = np.repeat(active_blocks_val, B, axis=0)\n xval = np.multiply(active_entries_val, np.random.normal(0,1,(N,MC)))\n yval = np.matmul(A,xval) + np.random.normal(0,math.sqrt( noise_var ),(m,MC))\n\n prob.xgen_ = xgen_\n prob.ygen_ = ygen_\n prob.xval = xval\n prob.yval = yval\n prob.noise_var = noise_var\n\n return prob\n\n\ndef random_access_problem(which=1):\n from tools import raputil as ru\n if which == 1:\n opts = ru.Problem.scenario1()\n else:\n opts = ru.Problem.scenario2()\n\n p = ru.Problem(**opts)\n x1 = p.genX(1)\n y1 = p.fwd(x1)\n A = p.S\n M,N = A.shape\n nbatches = int(math.ceil(1000 /x1.shape[1]))\n prob = NumpyGenerator(p=p,nbatches=nbatches,A=A,opts=opts,iid=(which==1))\n if which==2:\n prob.maskX_ = tf.expand_dims( tf.constant( (np.arange(N) % (N//2) < opts['Nu']).astype(np.float32) ) , 1)\n\n _,prob.noise_var = p.add_noise(y1)\n\n unused = p.genYX(nbatches) # for legacy reasons -- want to compare against a previous run\n (prob.yval, prob.xval) = p.genYX(nbatches)\n (prob.yinit, prob.xinit) = p.genYX(nbatches)\n import multiprocessing as mp\n prob.nsubprocs = mp.cpu_count()\n return prob\n"
},
{
"alpha_fraction": 0.688622772693634,
"alphanum_fraction": 0.7100798487663269,
"avg_line_length": 39.8979606628418,
"blob_id": "77e84ceda0d06be509de46f7470c9f25689d4007",
"content_id": "7fb025ca6bda00abede27cb51fce267e9a8d4ed4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2004,
"license_type": "permissive",
"max_line_length": 146,
"num_lines": 49,
"path": "/LBISTA.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\nIt provides a basic skeleton for training a Learned Block-ISTA (LBISTA) network\n\"\"\"\n\nimport numpy as np\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nimport tensorflow as tf\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\nclass Parameters(object):\n def __init__(self,MC,**not_obligatory):\n self.MC = MC\n vars(self).update(not_obligatory)\n\nparameters = Parameters(MC = 1000) # MC is the training batch size\nparameters.L = 32 # L is the number of blocks\nparameters.B = 16 # size of each block\nparameters.R = 1.0 # R is the rate of the code, bits per channel use\nparameters.m = 128 # number of measurements, i.e., number of rows of A\nparameters.SNR_dB = 8 # training and evaluation SNR in dB\n\n# Create the basic problem structure.\nprob = problems.block_gaussian_trial(m=128, L=32, B=16, MC=1000, pnz=.1, SNR_dB=20) # a Block-Gaussian x, noisily observed through a random matrix\n\n\n# build a LBISTA network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LBISTA(prob,T=6,initial_lambda=.1,untied=False)\n\n# plan the learning\n# if you want to minimize nmse you do not need to reimplement the following two lines. You can just play with the parameters of learning\ntraining_stages = train.setup_training(layers,prob,trinit=1e-3,refinements=(.5,.1,.01) )\n\n# do the learning (takes a while)\nsess = train.do_training(training_stages,prob,'LBISTA_block_Gauss_giid.npz')\n"
},
{
"alpha_fraction": 0.6888677477836609,
"alphanum_fraction": 0.7354900240898132,
"avg_line_length": 34.06666564941406,
"blob_id": "58a3e1301c591e3f824ab3cb51cc6cb849c45360",
"content_id": "afc4a510e9a1cab370f4d06493183081e7b03cef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1057,
"license_type": "permissive",
"max_line_length": 182,
"num_lines": 30,
"path": "/LGAMPtests/check_prob.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\nimport matplotlib.pyplot as plt\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\nimport tensorflow_probability as tfp\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\nfrom tools import problems\n\nprob = problems.one_bit_CS_with_BG_prior(kappa=None,M=2000,N=512,L=1000,pnz=0.0625,SNR=2, tf_floattype = tf.float32) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n\nprint('prob.noise_var=', prob.noise_var, ', and should be equal to 0.010095317511683')\nprint('la.norm(prob.yval[:,0])=', la.norm(prob.yval[:,0]), ', and should be close to 7.2552')"
},
{
"alpha_fraction": 0.5838876962661743,
"alphanum_fraction": 0.6000733375549316,
"avg_line_length": 42.390907287597656,
"blob_id": "2748240618e6ef1dc9eb0cca2120a5e4370767b1",
"content_id": "b31a63379167daabe5f80324990ec8f0c1057254",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 19091,
"license_type": "permissive",
"max_line_length": 176,
"num_lines": 440,
"path": "/tools/train.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\nimport numpy as np\nimport numpy.linalg as la\nimport sys\nimport tensorflow as tf\nimport time\n\ndef save_trainable_vars(sess,filename,**kwargs):\n \"\"\"save a .npz archive in `filename` with\n the current value of each variable in tf.trainable_variables()\n plus any keyword numpy arrays.\n \"\"\"\n save={}\n for v in tf.trainable_variables():\n save[str(v.name)] = sess.run(v)\n save.update(kwargs)\n np.savez(filename,**save)\n\ndef load_trainable_vars(sess,filename):\n \"\"\"load a .npz archive and assign the value of each loaded\n ndarray to the trainable variable whose name matches the\n archive key. Any elements in the archive that do not have\n a corresponding trainable variable will be returned in a dict.\n \"\"\"\n other={}\n try:\n tv=dict([ (str(v.name),v) for v in tf.trainable_variables() ])\n for k,d in np.load(filename).items():\n if k in tv:\n print('restoring ' + k + ' is:' + str(d))\n sess.run(tf.assign( tv[k], d) )\n else:\n other[k] = d\n except IOError:\n pass\n return other\n\ndef get_train_variables(sess):\n \"\"\"save a .npz archive in `filename` with\n the current value of each variable in tf.trainable_variables()\n plus any keyword numpy arrays.\n \"\"\"\n save={}\n for v in tf.trainable_variables():\n save[str(v.name)] = sess.run(v)\n \n return save\n\ndef setup_training(layer_info,prob, trinit=1e-3,refinements=(.5,.1,.01),final_refine=None ):\n \"\"\" Given a list of layer info (name,xhat_,newvars),\n create an output list of training operations (name,xhat_,loss_,nmse_,trainop_ ).\n Each layer_info element will be split into one or more output training operations\n based on the presence of newvars and len(refinements)\n \"\"\"\n losses_=[]\n nmse_=[]\n trainers_=[]\n assert np.array(refinements).min()>0,'all refinements must be in (0,1]'\n assert np.array(refinements).max()<=1,'all refinements must be in (0,1]'\n\n maskX_ = getattr(prob,'maskX_',1)\n if maskX_ != 1:\n print('masking out inconsequential parts of signal x for nmse reporting')\n\n nmse_denom_ = tf.nn.l2_loss(prob.x_ *maskX_)\n\n tr_ = tf.Variable(trinit,name='tr',trainable=False)\n training_stages=[]\n for name,xhat_,rhat_,rvar_,var_list in layer_info:\n loss_ = tf.nn.l2_loss( xhat_ - prob.x_)\n nmse_ = tf.nn.l2_loss( (xhat_ - prob.x_)*maskX_) / nmse_denom_\n sigma2_ = tf.reduce_mean(rvar_)\n sigma2_empirical_ = tf.reduce_mean((rhat_ - prob.x_)**2)\n\n se_ = 2 * tf.nn.l2_loss(xhat_ - prob.x_)# to get MSE, divide by / (L * N)\n\n if var_list is not None:\n train_ = tf.train.AdamOptimizer(tr_).minimize(loss_, var_list=var_list)\n training_stages.append( (name,xhat_,sigma2_,loss_,nmse_,sigma2_empirical_,se_,train_,var_list) )\n for fm in refinements:\n train2_ = tf.train.AdamOptimizer(tr_*fm).minimize(loss_)\n training_stages.append( (name+' trainrate=' + str(fm) ,xhat_,sigma2_,loss_,nmse_,sigma2_empirical_,se_,train2_,()) )\n if final_refine:\n train2_ = tf.train.AdamOptimizer(tr_*final_refine).minimize(loss_)\n training_stages.append( (name+' final refine ' + str(final_refine) ,xhat_,sigma2_,loss_,nmse_,sigma2_empirical_,se_,train2_,()) )\n\n return training_stages\n\ndef binary_crossentropy(target, output, from_logits=False):\n \"\"\"Binary crossentropy between an output tensor and a target tensor.\n Arguments:\n target: A tensor with the same shape as `output`.\n output: A tensor.\n from_logits: Whether `output` is expected to be a logits tensor.\n By default, we consider that `output`\n encodes a probability distribution.\n Returns:\n A tensor.\n \"\"\"\n # Note: nn.sigmoid_cross_entropy_with_logits\n # expects logits, Keras expects probabilities.\n if not from_logits:\n # transform back to logits\n epsilon_ = tf.convert_to_tensor(1e-07, output.dtype.base_dtype)\n output = tf.clip_by_value(output, epsilon_, 1 - epsilon_)\n output = tf.log(output / (1 - output))\n return tf.nn.sigmoid_cross_entropy_with_logits(labels=target, logits=output)\n\ndef setup_LAMP4SSCtraining(layer_info,prob, trinit=1e-3,refinements=(.5,.1,.01),final_refine=None, parameters=None ):\n \"\"\" Given a list of layer info (name,xhat_,newvars),\n create an output list of training operations (name,xhat_,loss_,nmse_,trainop_ ).\n Each layer_info element will be split into one or more output training operations\n based on the presence of newvars and len(refinements)\n \"\"\"\n losses_=[]\n nmse_=[]\n trainers_=[]\n assert np.array(refinements).min()>0,'all refinements must be in (0,1]'\n assert np.array(refinements).max()<=1,'all refinements must be in (0,1]'\n\n nmse_denom_ = tf.nn.l2_loss(prob.x_)\n\n tr_ = tf.Variable(trinit,name='tr',trainable=False)\n training_stages=[]\n for name,xhat_,var_list in layer_info:\n nmse_ = tf.nn.l2_loss( xhat_ - prob.x_) / nmse_denom_\n\n\n xhat_rs_ = tf.reshape(tf.transpose(xhat_), (-1, prob.B))\n messages_hat_ = tf.reshape(tf.arg_max(xhat_rs_, 1), (-1, prob.L))\n\n xtrue_rs_ = tf.reshape(tf.transpose(prob.x_), (-1, prob.B))\n messages_true_ = tf.reshape(tf.arg_max(xtrue_rs_, 1), (-1, prob.L))\n\n error_matrix_ = tf.dtypes.cast(tf.math.equal(messages_true_, tf.dtypes.cast(messages_hat_, tf.int64)),\n dtype=tf.float32)\n ser_ = 1 - tf.reduce_mean(error_matrix_)\n\n if parameters.loss == 'nmse':\n loss_ = tf.nn.l2_loss(xhat_ - prob.x_)\n elif parameters.loss == 'log loss with probs':\n xhat_rs_01_ = xhat_rs_ / prob.sqrtnPl\n xtrue_rs_01_ = xtrue_rs_ / prob.sqrtnPl\n # sigmoid_cross_entropy_with_logits loss is not the way to go since 1.) it does not consider a binary problem\n # and 2.) each class is independent and not mutually exclusive. However it is interesting the it still kinda\n # wors just as well as l2_loss. It should be noted that I don't use logits, and use probabilities instead\n loss_ = tf.nn.sigmoid_cross_entropy_with_logits(labels=xtrue_rs_01_, logits=xhat_rs_01_)\n elif parameters.loss == 'binary crossentropy':\n # https://peltarion.com/knowledge-center/documentation/modeling-view/build-an-ai-model/loss-functions/binary-crossentropy\n loss_ = binary_crossentropy(xtrue_rs_01_, xhat_rs_01_)\n\n # loss_ = tf.nn.l2_loss( xhat_rs_01_*(1-xtrue_rs_01_) ) # this is shit\n\n # name = name + ' ' + parameters.loss\n\n\n if var_list is not None:\n train_ = tf.train.AdamOptimizer(tr_).minimize(loss_, var_list=var_list)\n training_stages.append( (name,xhat_,loss_,nmse_,ser_,train_,var_list) )\n for fm in refinements:\n train2_ = tf.train.AdamOptimizer(tr_*fm).minimize(loss_)\n training_stages.append( (name+' trainrate=' + str(fm) ,xhat_,loss_,nmse_,ser_,train2_,()) )\n if final_refine:\n train2_ = tf.train.AdamOptimizer(tr_*final_refine).minimize(loss_)\n training_stages.append( (name+' final refine ' + str(final_refine) ,xhat_,loss_,nmse_,ser_,train2_,()) )\n\n return training_stages\n\ndef setup_LGAMPtraining(layer_info,prob, trinit=1e-3,refinements=(.5,.1,.01),final_refine=None ):\n \"\"\" Given a list of layer info (name,xhat_,newvars),\n create an output list of training operations (name,xhat_,loss_,nmse_,trainop_ ).\n Each layer_info element will be split into one or more output training operations\n based on the presence of newvars and len(refinements)\n \"\"\"\n losses_=[]\n nmse_=[]\n trainers_=[]\n assert np.array(refinements).min()>0,'all refinements must be in (0,1]'\n assert np.array(refinements).max()<=1,'all refinements must be in (0,1]'\n\n tr_ = tf.Variable(trinit,name='tr',trainable=False)\n training_stages=[]\n for name,xhat_,var_list in layer_info:\n # loss_ = tf.nn.l2_loss( xhat_ - prob.x_)\n loss_ = tf.nn.l2_loss( xhat_/tf.norm(xhat_, axis=0) - prob.x_/tf.norm(prob.x_, axis=0))\n\n nmse_ = tf.reduce_mean(tf.norm(xhat_/tf.norm(xhat_, axis=0) - prob.x_/tf.norm(prob.x_, axis=0), axis=0)**2)\n # loss_ = nmse_\n\n if var_list is not None:\n train_ = tf.train.AdamOptimizer(tr_).minimize(loss_, var_list=var_list)\n training_stages.append( (name,xhat_,loss_,nmse_,train_,var_list) )\n for fm in refinements:\n train2_ = tf.train.AdamOptimizer(tr_*fm).minimize(loss_)\n training_stages.append( (name+' trainrate=' + str(fm) ,xhat_,loss_,nmse_,train2_,()) )\n if final_refine:\n train2_ = tf.train.AdamOptimizer(tr_*final_refine).minimize(loss_)\n training_stages.append( (name+' final refine ' + str(final_refine) ,xhat_,loss_,nmse_,train2_,()) )\n\n return training_stages\n\ndef do_training(training_stages,prob,savefile,ivl=10,maxit=1000000,better_wait=5000):\n \"\"\"\n ivl:how often should we compute the nmse of the validation set?\n maxit: max number of training iterations\n better_wait:wait this many iterations for an nmse that is better than the prevoius best of the current training session\n \"\"\"\n sess = tf.Session()\n sess.run(tf.global_variables_initializer())\n\n print('norms xval:{xval:.7f} yval:{yval:.7f}'.format(xval=la.norm(prob.xval), yval=la.norm(prob.yval) ) )\n\n state = load_trainable_vars(sess,savefile) # must load AFTER the initializer\n\n # must use this same Session to perform all training\n # if we start a new Session, things would replay and we'd be training with our validation set (no no)\n\n done=state.get('done',[])\n log=str(state.get('log',''))\n\n for name,xhat_,rvar_,loss_,nmse_,sigma2_empirical_,se_,train_,var_list in training_stages:\n start = time.time()\n if name in done:\n print('Already did ' + name + '. Skipping.')\n continue\n if len(var_list):\n describe_var_list = 'extending ' + ','.join([v.name for v in var_list])\n else:\n describe_var_list = 'fine tuning all ' + ','.join([v.name for v in tf.trainable_variables() ])\n\n print(name + ' ' + describe_var_list)\n nmse_history=[]\n for i in range(maxit+1):\n if i%ivl == 0:\n nmse = sess.run(nmse_,feed_dict={prob.y_:prob.yval,prob.x_:prob.xval})\n if np.isnan(nmse):\n raise RuntimeError('nmse is NaN')\n nmse_history = np.append(nmse_history,nmse)\n nmse_dB = 10*np.log10(nmse)\n nmsebest_dB = 10*np.log10(nmse_history.min())\n sys.stdout.write('\\ri={i:<6d} nmse={nmse:.6f} dB (best={best:.6f})'.format(i=i,nmse=nmse_dB,best=nmsebest_dB))\n sys.stdout.flush()\n if i%(100*ivl) == 0:\n print('')\n age_of_best = len(nmse_history) - nmse_history.argmin()-1 # how long ago was the best nmse?\n if age_of_best*ivl > better_wait:\n break # if it has not improved on the best answer for quite some time, then move along\n y,x = prob(sess)\n sess.run(train_,feed_dict={prob.y_:y,prob.x_:x} )\n done = np.append(done,name)\n \n end = time.time()\n time_log = 'Took me {totaltime:.3f} minutes, or {time_per_interation:.1f} ms per iteration'.format(totaltime = (end-start)/60, time_per_interation = (end-start)*1000/i)\n print(time_log)\n log = log+'\\n{name} nmse={nmse:.6f} dB in {i} iterations'.format(name=name,nmse=nmse_dB,i=i)\n\n state['done'] = done\n state['log'] = log\n save_trainable_vars(sess,savefile,**state)\n return sess\n\ndef do_LAMP4SSCtraining(training_stages, prob, savefile, ivl=10, maxit=1000000, better_wait=5000):\n \"\"\"\n ivl:how often should we compute the nmse of the validation set?\n maxit: max number of training iterations\n better_wait:wait this many iterations for an nmse that is better than the prevoius best of the current training session\n \"\"\"\n sess = tf.Session()\n sess.run(tf.global_variables_initializer())\n\n print('norms xval:{xval:.7f} yval:{yval:.7f}'.format(xval=la.norm(prob.xval), yval=la.norm(prob.yval)))\n\n state = load_trainable_vars(sess, savefile) # must load AFTER the initializer\n\n # must use this same Session to perform all training\n # if we start a new Session, things would replay and we'd be training with our validation set (no no)\n\n done = state.get('done', [])\n log = str(state.get('log', ''))\n\n for name, xhat_, loss_, nmse_, ser_, train_, var_list in training_stages:\n start = time.time()\n if name in done:\n print('Already did ' + name + '. Skipping.')\n continue\n if len(var_list):\n describe_var_list = 'extending ' + ','.join([v.name for v in var_list])\n else:\n describe_var_list = 'fine tuning all ' + ','.join([v.name for v in tf.trainable_variables()])\n\n print(name + ' ' + describe_var_list)\n nmse_history = []\n for i in range(maxit + 1):\n if i % ivl == 0:\n nmse, ser = sess.run([nmse_, ser_], feed_dict={prob.y_: prob.yval, prob.x_: prob.xval})\n if np.isnan(nmse):\n raise RuntimeError('nmse is NaN')\n nmse_history = np.append(nmse_history, nmse)\n nmse_dB = 10 * np.log10(nmse)\n nmsebest_dB = 10 * np.log10(nmse_history.min())\n sys.stdout.write(\n '\\ri={i:<6d} nmse={nmse:.6f} dB (best={best:.6f}) with ser={ser:.6f}'.format(i=i, nmse=nmse_dB, best=nmsebest_dB, ser=ser))\n sys.stdout.flush()\n if i % (100 * ivl) == 0:\n print('')\n age_of_best = len(nmse_history) - nmse_history.argmin() - 1 # how long ago was the best nmse?\n if age_of_best * ivl > better_wait:\n break # if it has not improved on the best answer for quite some time, then move along\n y, x = prob(sess)\n sess.run(train_, feed_dict={prob.y_: y, prob.x_: x})\n done = np.append(done, name)\n\n end = time.time()\n time_log = '\\nTook me {totaltime:.3f} minutes, or {time_per_interation:.1f} ms per iteration\\n'.format(\n totaltime=(end - start) / 60, time_per_interation=(end - start) * 1000 / i)\n print(time_log)\n log = log + '\\n{name} nmse={nmse:.6f} dB in {i} iterations'.format(name=name, nmse=nmse_dB, i=i)\n\n state['done'] = done\n state['log'] = log\n save_trainable_vars(sess, savefile, **state)\n return sess\n\ndef evaluate_nmse(sess, training_stages, prob, savefile, pnz=.1, SNR=40, L=1000):\n import math\n\n A = prob.A\n M,N = A.shape\n\n noise_var = pnz*N/M * math.pow(10., -SNR / 10.)\n\n data_set_size = 100;\n\n xtest = ((np.random.uniform( 0,1,(N,data_set_size))<pnz) * np.random.normal(0,1,(N,data_set_size))).astype(np.float32)\n ytest = np.matmul(A, xtest) + np.random.normal(0,math.sqrt( noise_var ),(M,data_set_size))\n\n nmse_dB_arrray = []\n mse_dB_arrray = []\n sigma2_dB_array = []\n sigma2_empirical_array = []\n\n for name, xhat_, sigma2_, loss_, nmse_, sigma2_empirical_, se_, train_, var_list in training_stages:\n\n if \" trainrate=\" not in name:\n nmse, se, sigma2, sigma2_empirical = sess.run([nmse_, se_, sigma2_, sigma2_empirical_], feed_dict={prob.y_: ytest, prob.x_: xtest})\n\n nmse_dB = 10 * np.log10(nmse)\n mse_dB = 10 * np.log10(se/(data_set_size*N))\n sigma2_dB = 10 * np.log10(sigma2)\n sigma2_empirical_dB = 10 * np.log10(sigma2_empirical)\n print('{name} nmse={nmse:.6f} dB'.format(name=name,nmse=nmse_dB))\n\n nmse_dB_arrray.append(nmse_dB)\n mse_dB_arrray.append(mse_dB)\n sigma2_dB_array.append(sigma2_dB)\n sigma2_empirical_array.append(sigma2_empirical_dB)\n\n print('nmse/dB=', nmse_dB_arrray)\n print('mse/dB=', mse_dB_arrray)\n print('sigma2/dB=', sigma2_dB_array)\n print('sigma2_empirical/dB=', sigma2_empirical_array)\n\ndef evaluate_LAMP4SSC_nmse(sess, training_stages, prob, savefile, SNR=40):\n import math\n\n L = prob.L\n B = prob.B\n A = prob.A\n n,N = A.shape\n\n noise_var = 1\n\n data_set_size = 1000;\n\n # Create validation vectors\n messages = np.random.randint(0, B, (L, data_set_size))\n xtest = np.zeros((N, data_set_size))\n for sample_index in range(data_set_size):\n for i in range(0, L):\n xtest[i * B + messages[i, sample_index], sample_index] = prob.sqrtnPl\n\n y_clean = np.matmul(A, xtest)\n noise = np.random.normal(0, math.sqrt(noise_var), (n, data_set_size))\n ytest= y_clean + noise\n\n ser_arrray = []\n nmse_arrray = []\n\n for name, xhat_, loss_, nmse_, ser_, train_, var_list in training_stages:\n\n if \" trainrate=\" not in name:\n nmse, ser = sess.run([nmse_, ser_], feed_dict={prob.y_: ytest, prob.x_: xtest})\n nmse_dB = 10 * np.log10(nmse)\n print('{name} nmse={nmse:.6f} dB with ser={ser:.6f}'.format(name=name,nmse=nmse_dB,ser=ser))\n nmse_arrray.append(nmse_dB)\n ser_arrray.append(ser)\n\n print('SER = [', ', '.join(['{:.5f}'.format(ser) for ser in ser_arrray]), '];')\n print('NMSE/dB = [', ', '.join(['{:.5f}'.format(nmse) for nmse in nmse_arrray]), '];')\n\n\ndef evaluate_LGAMP_nmse(sess, training_stages, prob, pnz=.1, SNR=2):\n import math\n\n A = prob.A\n M, N = A.shape\n\n scale = prob.code_symbol\n\n noise_var = math.pow(10., -SNR / 10.) * scale ** 2\n\n data_set_size = 1000;\n\n xtest = ((np.random.uniform(0, 1, (N, data_set_size)) < pnz) * np.random.normal(0, 1, (N, data_set_size))).astype(\n np.float32)\n y_starval = scale * np.sign(np.matmul(A, xtest))\n noise = np.random.normal(0, math.sqrt(noise_var), (M, data_set_size))\n ytest = y_starval + noise\n\n x_true = xtest\n\n for name, xhat_, rvar_, loss_, nmse_, se_, train_, var_list in training_stages:\n if \" trainrate=\" not in name:\n # The following 6 lines of code are used in debugging.\n # They provide NMSE and the count of nan occurrences\n\n\n # x_hat = sess.run(xhat_, feed_dict={prob.y_:ytest, prob.x_:xtest})\n #\n # NMSE = la.norm(x_true/la.norm(x_true, axis=0) - x_hat/la.norm(x_hat, axis=0), axis=0)**2\n # L = len(NMSE)\n # NMSE_no_Nan = NMSE[np.logical_not(np.isnan(NMSE))]\n # NMSE_dB = 10*math.log10(np.mean(NMSE_no_Nan))\n # print(name, 'NMSE=', NMSE_dB, '\\tdB with',L - len(NMSE_no_Nan),'instances of NaN (out of',L,')')\n\n nmse = sess.run(nmse_, feed_dict={prob.y_: ytest, prob.x_: xtest})\n nmse_dB = 10 * np.log10(nmse)\n print('{name} NMSE= {nmse:.6f} \\tdB'.format(name=name, nmse=nmse_dB))"
},
{
"alpha_fraction": 0.699448823928833,
"alphanum_fraction": 0.7159849405288696,
"avg_line_length": 42.64556884765625,
"blob_id": "115d28fbb69089315082a88a1a73852e96465e20",
"content_id": "ba92ebb33d05fd4437dc016a13364a51db9c5446",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3447,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 79,
"path": "/LAMP4SSC.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\n# import tensorflow as tf\n\nimport tensorflow.compat.v1 as tf\ntf.disable_v2_behavior()\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\nclass Parameters(object):\n def __init__(self,MC,**not_obligatory):\n self.MC = MC\n vars(self).update(not_obligatory)\n\nparameters = Parameters(MC = 1000) # MC is the number of transmitted messages (Monte Carlo simulations)\nparameters.L = 32 # L is the number of sections\nparameters.bps = 4 # bits per section\nparameters.R = 1.0 # R is the rate of the code, bits per channel use\nparameters.n = int(parameters.L*parameters.bps/parameters.R)\n # number of channel uses, i.e., number of rows of A\nparameters.SNR_dB = 8 # training and evaluation SNR in dB\nparameters.T = 8 # number of layers of the network/iterations of the algorithm\nparameters.Onsager = True # is the Onsager term included in the calculation of the residual\n# type of the loss function in the learning. Possible values 'nmse', 'log loss with probs', 'binary crossentropy'\nparameters.loss = 'nmse'\nparameters.untied_B = False # tied or untied B in LAMP\n# 'tied', 'untied', 'tied LAMP tied S', 'tied LAMP untied S', 'tied LAMP tied S no Onsager' .... 'tied LAMP untied S loss=nmse',\nparameters.alg_version = 'tied'\n\n# Create the basic problem structure.\nprob = problems.ssc_problem(n=parameters.n, L=parameters.L, bps=parameters.bps, MC=parameters.MC, SNR_dB=parameters.SNR_dB)\n\n# # build a LAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LAMP4SSC(prob,T=parameters.T,untied=parameters.untied_B, alg_version=parameters.alg_version)\nprint('Building layers ... done')\n\n\n# plan the learning\n# training_stages = train.setup_LAMP4SSCtraining(layers,prob,trinit=1e-3, refinements=(.5,) )\ntraining_stages = train.setup_LAMP4SSCtraining(layers,prob,trinit=1e-3,refinements=(.5,.1,.01), parameters = parameters)\nprint('Plan the learning ... done')\n\n# do the learning (takes a whixle)\nprint('Do the learning (takes a while)')\nsess = train.do_LAMP4SSCtraining(training_stages,prob,'LAMP4SSC.npz',10,100,10)\n# sess = train.do_LAMP4SSCtraining(training_stages,prob,'LAMP4SSC.npz',10,10000,500)\n# sess = train.do_LAMP4SSCtraining(training_stages,prob,'LAMP4SSC.npz')\n\n# train.plot_estimate_to_test_message(sess, training_stages, prob, 'LAMP_bg_giid.npz' )\n# train.test_vector_sizes(sess, training_stages, prob, 'LAMP_bg_giid.npz' )\ntrain.evaluate_LAMP4SSC_nmse(sess, training_stages, prob, 'LAMP4SSC.npz' )\n\ntrain_vars = train.get_train_variables(sess)\n\nsess.close()"
},
{
"alpha_fraction": 0.630839467048645,
"alphanum_fraction": 0.646294355392456,
"avg_line_length": 32.904762268066406,
"blob_id": "6630994344fd78b0c579d10dedd97aede1294ca0",
"content_id": "37a8a27097414efa8d2af741d7cbef09ef98fa12",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2847,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 84,
"path": "/LAMP4SSCtests/evaluate_AMPSSC.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\n\nMC = 1000 # MC is the number of transmitted messages (Monte Carlo simulations)\nL = 32 # L is the number of sections\nbps = 4 # bits per section\nR = 1.0 # R is the rate of the code, bits per channel use\nn = int(L*bps/R) # number of channel uses, i.e., number of rows of A\nT_max = 20 # max number of iterations/layers\nSNR_dB = 8 # training and evaluation SNR in dB\n\n# Create the basic problem structure.\nprob = problems.ssc_problem(n=n, L=L, bps=bps, MC=MC, SNR_dB=SNR_dB)\n\n\n# # build a LAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LAMP4SSC(prob,T=T_max,untied=False)\nprint('Building layers ... done')\n\nser_arrray = []\nnmse_arrray = []\n\n\ny,x_true = prob(sess)\nx_true_rs = np.reshape(np.transpose(x_true), (-1, prob.B))\nmessages_true = np.reshape(np.argmax(x_true_rs, 1), (-1, prob.L))\n\nsess.run(tf.global_variables_initializer())\n\nfor name, xhat_, var_list in layers:\n\n xhat_rs_ = tf.reshape(tf.transpose(xhat_), (-1, prob.B))\n messages_hat_ = tf.reshape(tf.arg_max(xhat_rs_, 1), (-1, prob.L))\n\n nmse_denom_ = tf.nn.l2_loss(prob.x_)\n nmse_ = tf.nn.l2_loss( xhat_ - prob.x_) / nmse_denom_\n\n\n messages_hat, x_hat, nmse = sess.run([messages_hat_, xhat_, nmse_], feed_dict={prob.y_: y, prob.x_: x_true})\n\n error_matrix = 1*np.equal(messages_true, messages_hat, dtype=np.int32)\n SER = 1 - np.mean(error_matrix)\n NMSE_dB = 10 * np.log10(nmse)\n\n if \" non-linear T=\" in name:\n nmse_arrray.append(NMSE_dB)\n ser_arrray.append(SER)\n\n MSE = np.mean((la.norm(x_true - x_hat , axis=0) ** 2)/(n*prob.P))\n print(name, '\\tMSE=', '{:.5f}'.format(MSE),'\\tMSE/dB=', '{:7.3f}'.format(NMSE_dB) , '\\tSER=', '{:.5f}'.format(SER))\n\n\nsess.close()\n\nprint('SER = [',' ,'.join(['{:.5f}'.format(ser) for ser in ser_arrray]),'];')\nprint('NMSE/dB = [',' ,'.join(['{:.5f}'.format(nmse) for nmse in nmse_arrray]),'];')"
},
{
"alpha_fraction": 0.7190786004066467,
"alphanum_fraction": 0.7381071448326111,
"avg_line_length": 30.21875,
"blob_id": "da231738c029442ce74a5aa58205dab2dcebc257",
"content_id": "79b5e2a765e08b092356ff8eae31d202e8c347f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1997,
"license_type": "permissive",
"max_line_length": 146,
"num_lines": 64,
"path": "/LAMP.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\nL=10000\nM=250\nN=500\nSNR=20\npnz=.1\nuntied=False\nT=8\nshrink='bg'\n\n\n# Create the basic problem structure.\nprob = problems.bernoulli_gaussian_trial(kappa=None,M=M,N=N,L=L,pnz=pnz,SNR=SNR) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n#prob = problems.random_access_problem(2) # 1 or 2 for compressive random access or massive MIMO\nprint('Problem created ...')\nprint('A is:')\nprint(prob.A)\n\n# build a LAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LAMP(prob,T=T,shrink=shrink,untied=False)\nprint('Building layers ... done')\n\n# plan the learning\ntraining_stages = train.setup_training(layers,prob,trinit=1e-3,refinements=(.5,.1,.01) )\nprint('Plan the learning ... done')\n\n# do the learning (takes a while)\nprint('Do the learning (takes a while)')\n# sess = train.do_training(training_stages,prob,'LAMP_bg_giid.npz')\nsess = train.do_training(training_stages,prob,'LAMP_bg_giid.npz',10,10,50)\n\n# train.plot_estimate_to_test_message(sess, training_stages, prob, 'LAMP_bg_giid.npz' )\n# train.test_vector_sizes(sess, training_stages, prob, 'LAMP_bg_giid.npz' )\nprint('Evaluating network on test data ...')\ntrain.evaluate_nmse(sess, training_stages, prob, 'LAMP_bg_giid.npz',SNR=SNR, L=L)\n\ntrain_vars = train.get_train_variables(sess)\n\n\n\n\nstop = 1;"
},
{
"alpha_fraction": 0.7148916125297546,
"alphanum_fraction": 0.7426955699920654,
"avg_line_length": 36.24561309814453,
"blob_id": "3f9eeb3d3c373b1e647a68cff06bd4b331fccdfc",
"content_id": "696bb9571cd7760993ea2a638f43558fe616e100",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2122,
"license_type": "permissive",
"max_line_length": 179,
"num_lines": 57,
"path": "/LGAMPtests/evaluate_GAMP.py",
"repo_name": "guoruize01/learned_iterative_algorithms",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nfrom __future__ import division\nfrom __future__ import print_function\n\"\"\"\nThis file serves as an example of how to \na) select a problem to be solved \nb) select a network type\nc) train the network to minimize recovery MSE\n\n\"\"\"\nimport numpy as np\nimport math\nimport numpy.linalg as la\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # BE QUIET!!!!\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\nimport tensorflow as tf\n\n\n\nnp.random.seed(1) # numpy is good about making repeatable output\ntf.set_random_seed(1) # on the other hand, this is basically useless (see issue 9171)\n\n# import our problems, networks and training modules\nfrom tools import problems,networks,train\n\n# Evaluating (fixed-)GAMP in TensorFlow\n# For debugging purposes I initialize a session here - Intialize the Session\nsess = tf.Session()\n\n# Create the basic problem structure.\nprob = problems.one_bit_CS_with_BG_prior(kappa=None,M=2000,N=512,L=1000,pnz=.0625,SNR=2, tf_floattype=tf.float32) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n# prob = one_bit_CS_with_BG_prior(kappa=None,M=512,N=250,L=1000,pnz=.1,SNR=2) #a Bernoulli-Gaussian x, noisily observed through a random matrix\n#prob = problems.random_access_problem(2) # 1 or 2 for compressive random access or massive MIMO\nprint('Problem created ...')\n\n# build a LGAMP network to solve the problem and get the intermediate results so we can greedily extend and then refine(fine-tune)\nlayers = networks.build_LGAMP(prob,T=7,shrink='onebitF',untied=True,tf_floattype = tf.float32)\nprint('Building layers ... done')\n\ntraining_stages = train.setup_LGAMPtraining(layers,prob,trinit=1e-3,refinements=(.5,.1,.01) )\nprint('Plan the learning ... done')\n\n# # do the learning (takes a while)\n# print('Do the learning (takes a while)')\n# sess = train.do_training(training_stages,prob,'LGAMP_bg_giid.npz',10,30,5)\n\nsess.run(tf.global_variables_initializer())\ntrain.evaluate_LGAMP_nmse(sess, training_stages, prob, pnz=.0625, SNR=2)\n\n# Close the session\nsess.close()\nif (sess._closed == True):\n print('The session is now closed')\nelse:\n print('The session is NOT closed')"
}
] | 14 |
Sagisurya/CS238FinalProject | https://github.com/Sagisurya/CS238FinalProject | 5ab576583772bc08f84b7fb60deedff5195e2d8d | ae74e8a81ca1b828f56ab1ffe4840f952b5cc827 | 09a0d61c944de06064ad5c9680ba4f3bcb68e2fb | refs/heads/master | 2021-08-24T15:35:04.220741 | 2017-12-10T08:00:55 | 2017-12-10T08:00:55 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7830188870429993,
"alphanum_fraction": 0.7971698045730591,
"avg_line_length": 41.400001525878906,
"blob_id": "12811033113ddb01fb95bc072bd36776ad2d5f5a",
"content_id": "71d77c6ce0c2f8d30c61db71eaf0053bdfe158ad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 212,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 5,
"path": "/README.md",
"repo_name": "Sagisurya/CS238FinalProject",
"src_encoding": "UTF-8",
"text": "# CS238FinalProject\n\nInstall nba_py by doing \"pip install nba_py\" (https://github.com/seemethere/nba_py)\n\ngetData.py contains functions and constants to help loading data from stats.nba.com into pandas dataframe\n"
},
{
"alpha_fraction": 0.4766697287559509,
"alphanum_fraction": 0.49588289856910706,
"avg_line_length": 28.54054069519043,
"blob_id": "3784ee1189d132a19922a0ef03d3316f8ccf9b0c",
"content_id": "d79ff7832c197db72b052071601bf6e512e6db90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1093,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 37,
"path": "/fixingData.py",
"repo_name": "Sagisurya/CS238FinalProject",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nimport numpy as np\n\ndef main():\n df = pd.read_csv(\"data127Night.csv\")\n matchups = df[\"MATCHUP\"].values\n firstteamPts = df[\"PTS\"].values\n i = 0\n for ptsList in df[\"points\"]:\n splitString = ptsList[1:-1].split()\n pts = []\n for j in range(len(splitString)):\n pts.append(float(splitString[j]))\n matchup = matchups[i]\n home = 0\n if '@' in matchup:\n home = 0\n else:\n home = 1\n pts = np.array(pts)\n indices = [k for k, e in enumerate(pts) if e != 0]\n newPts = pts.copy()\n if home:\n if (pts[indices[0]] != firstteamPts[i]):\n newPts[indices[0]] = pts[indices[1]]\n newPts[indices[1]] = pts[indices[0]]\n else:\n if (pts[indices[1]] != firstteamPts[i]):\n newPts[indices[0]] = pts[indices[1]]\n newPts[indices[1]] = pts[indices[0]]\n df = df.set_value(i, \"points\", newPts)\n i+=1\n df.to_csv(\"FIXED_DATA.csv\")\n return\n\nif __name__ == '__main__':\n main()\n"
}
] | 2 |
cocpy/Tello-Python | https://github.com/cocpy/Tello-Python | 563326f578f23fe9f6400c35c32d986b9d720480 | d75a5467be674b33570837ae0f3780f1b6cdf204 | 317713de7c8e8232eaf16503ba782533dd82254b | refs/heads/master | 2023-07-01T19:41:06.990844 | 2021-08-14T01:40:54 | 2021-08-14T01:40:54 | 292,188,118 | 8 | 4 | null | null | null | null | null | [
{
"alpha_fraction": 0.5707316994667053,
"alphanum_fraction": 0.6048780679702759,
"avg_line_length": 21.88888931274414,
"blob_id": "711a93a81329a35a609693835460943786b3a187",
"content_id": "0ed227a8727074cdea610d669ddbd8bf86d0e421",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 209,
"license_type": "permissive",
"max_line_length": 36,
"num_lines": 9,
"path": "/tello/__init__.py",
"repo_name": "cocpy/Tello-Python",
"src_encoding": "UTF-8",
"text": "__title__ = 'tello-python'\n__author__ = 'C灵C'\n__liscence__ = 'MIT'\n__copyright__ = 'Copyright 2021 C灵C'\n__version__ = '1.1.6'\n__all__ = ['tello', 'stats']\n\nfrom .tello import Tello\nfrom .stats import Stats"
},
{
"alpha_fraction": 0.601783037185669,
"alphanum_fraction": 0.6225854158401489,
"avg_line_length": 15.047618865966797,
"blob_id": "87d7b869bc7f007a928148eba8c93c317f9619b5",
"content_id": "3b8891db11798b7718144dd25bd0fce5d9fc8957",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 673,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 42,
"path": "/README.md",
"repo_name": "cocpy/Tello-Python",
"src_encoding": "UTF-8",
"text": "# Tello-Python\n Control DJI Tello drone with python\n\n\n## Installation\n pip install tello-python\n\n\n## How to import\n import tello\n \n drone = tello.Tello()\n\n\n## Examples\n import tello\n \n drone = tello.Tello()\n \n drone.takeoff()\n \n drone.forward(100)\n \n drone.cw(90)\n \n drone.flip('r')\n \n drone.streamon()\n \n drone.land()\n \n\n### Distance\n Required. The distance to fly forward in cm. Has to be between 20 and 500.\n\n\n### Degrees\n Required. The number of degrees to rotate. Has to be between 1 and 360.\n\n\n## More\n For more commands, please refer to the methods and comments in the source code tello.py file"
},
{
"alpha_fraction": 0.6105263233184814,
"alphanum_fraction": 0.6631578803062439,
"avg_line_length": 7.636363506317139,
"blob_id": "473ba54cbe823e049a255cb04b13ca24aa271050",
"content_id": "c555b5046665c811c348b28eed32f49004e77de8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 223,
"license_type": "permissive",
"max_line_length": 23,
"num_lines": 22,
"path": "/tello/demo.py",
"repo_name": "cocpy/Tello-Python",
"src_encoding": "UTF-8",
"text": "from tello import tello\n\n\ndrone = tello.Tello()\n\n# 起飞\ndrone.takeoff()\n\n# 前进100cm\ndrone.forward(100)\n\n# 旋转90°\ndrone.cw(90)\n\n# 左翻滚\ndrone.flip('l')\n\n# 打开视频流\ndrone.streamon()\n\n# 降落\ndrone.land()\n"
},
{
"alpha_fraction": 0.6283310055732727,
"alphanum_fraction": 0.6367461681365967,
"avg_line_length": 28.75,
"blob_id": "c4171549ab70499f742bd29e4d6bb5fa38f12680",
"content_id": "6eb2b7844cc073541c08c45d00f753cce4553410",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 715,
"license_type": "permissive",
"max_line_length": 61,
"num_lines": 24,
"path": "/setup.py",
"repo_name": "cocpy/Tello-Python",
"src_encoding": "UTF-8",
"text": "import setuptools\n\nwith open('README.md', 'r', encoding='utf-8') as fh:\n long_description = fh.read()\n\nsetuptools.setup(\n name='tello-python',\n version='1.1.6',\n author='C灵C',\n author_email='[email protected]',\n description='Control DJI Tello drone with Python3',\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://github.com/cocpy/Tello-Python',\n packages=setuptools.find_packages(),\n install_requires=[\n 'opencv-python', 'flask', 'paddlepaddle', 'paddlehub'\n ],\n classifiers=[\n 'Programming Language :: Python :: 3',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n ],\n)"
}
] | 4 |
G3Code-CS/Sprint-Challenge--Algorithms | https://github.com/G3Code-CS/Sprint-Challenge--Algorithms | 54b9af5ec51bedaea8f773af45a67479d146044c | 386f6a4469aca5a02207148a457f00b429dce835 | 0572a952d0bac57f82a558a90f20e988763c283f | refs/heads/master | 2020-11-24T03:48:49.650455 | 2019-12-19T02:07:54 | 2019-12-19T02:07:54 | 227,952,602 | 0 | 0 | null | 2019-12-14T02:04:59 | 2019-12-14T02:05:01 | 2019-12-19T02:07:55 | null | [
{
"alpha_fraction": 0.6272806525230408,
"alphanum_fraction": 0.6359687447547913,
"avg_line_length": 29.263158798217773,
"blob_id": "360f2aaf6f113ea0c35a21552806902b31c45c90",
"content_id": "8875a248e872803f021fc9b4c80fb38588ee6f5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1151,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 38,
"path": "/recursive_count_th/count_th.py",
"repo_name": "G3Code-CS/Sprint-Challenge--Algorithms",
"src_encoding": "UTF-8",
"text": "import math\n\n\n'''\nYour function should take in a single parameter (a string `word`)\nYour function should return a count of how many occurences of ***\"th\"*** occur within `word`. Case matters.\nYour function must utilize recursion. It cannot contain any loops.\n'''\ndef count_th(word):\n # Initializing the counter value to be returned\n count = 0\n\n # If the length of the word is less than 2 then there is no possibility of finding \n # the match and returns 0\n if len(word) <= 2:\n return 0\n \n # This is the recursive function which recurses once called\n def countVal(word, count, position):\n\n # Base criteria for recursion stop\n if (len(word) == position+1):\n return count\n \n # Checks if the letter and the next letter matches to 'th'\n if word[position] + word[position+1] == 'th':\n count = count + 1\n \n # Goes to the next position\n position = position + 1\n return countVal(word, count, position)\n \n # The first call to the recursion function\n return countVal(word, count, 0)\n\n \n\nprint(count_th(\"the king is the best thing\"))\n\n"
},
{
"alpha_fraction": 0.7563270330429077,
"alphanum_fraction": 0.7598961591720581,
"avg_line_length": 52.13793182373047,
"blob_id": "3c08d4f757844635687f7395848f017ecba5c87f",
"content_id": "ffb53e7c8d74c12fa495393d0969ca4f292432a3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3082,
"license_type": "no_license",
"max_line_length": 170,
"num_lines": 58,
"path": "/Short-Answer/Algorithms_Answers.md",
"repo_name": "G3Code-CS/Sprint-Challenge--Algorithms",
"src_encoding": "UTF-8",
"text": "#### Please add your answers to the **_Analysis of Algorithms_** exercises here.\n\n## Exercise I\n\na) At the first look, there is one while loop. This while loop is based on the condition of n*n*n and\nn\\*n is getting added in the inside. However, the whole loop is based on the value of n. With one while loop\nwhich is based on the value of n, the run time complexity will be O(n).\n\nb) This code snippet has two loops and they are nested - one for loop with a while loop within O(n^2). However, on closer\nexamination we find that, the value of j within for-loop is always 1 and j is always going to be less than n. Even\nif the while loop was not present, the code snipped could have been implemented and this looping will be a O(log n). In summary,\nthe outer loop is O(n) and inner loop is O(log n). Therefore, the runtime complexity is O(n log n).\n\nc) The catch in this function is this is a recursive function. The return is a constant time. This function is recursive till\nbunnies gets to 0 (where bunnies is n). Hence the run time complexity of this function is O(n)\n\n## Exercise II\n\n### Algorithm for devising the least broken eggs:\n\nFrom the problem statement: \"Suppose also that an egg gets broken if it is thrown off floor f or higher, and doesn't get broken if dropped off a floor less than floor f.\"\n\nThis is a perfect problem to be solved using binary search. We have to find that floor from when on wards the egg will start breaking.\n\nNote: For binary search the main criteria is the list that we are searching has to be sorted. The building in this cases is like a sorted array list.\n\nFollowing will be the steps I will device to find the floor when the egg starts to break:\n\n1. Find the middle floor and drop the egg.\n ---> If the egg breaks then we need to move to the lower list of floors else we will need to move to higher list of floors.\n Assuming the egg breaks:\n\n2. From the lower list of floors, we will find the middle floor and drop the egg.\n ---> If the egg breaks then we need to move to the lower list of floors else we will need to move to higher list of floors.\n Assuming the egg breaks:\n\nAs you can see we find a pattern for recursion here. We will recursively use this strategy to find the exact floor where the egg will break.\n\n3. Finally, we will come to a point where we might have just only two floors or one floor. At that point, if there is only one floor we will\n return the floor or if there are two floors we can check in which floor the egg breaks and return that floor value.\n\n## A simple pseudo code to understand the logic\n\neggbreak( floor_list)\nif number of floors length is 0\nreturn 0\nif number of floor length is 1\nreturn that floor\nif number of floor length is 2\nfind which floor the egg breaks and return that floor\nfloor_list/2\nif egg breaks\nreturn eggbreak(bottom floor list)\nelse\nreturn eggbreak(top floor list)\n\nAs we can see we do not iterate through the entire array / set of floors to find which floor. Rather we work\non a subset of the floors narrowing the possibility. Hence the run time complexity will be O(log n)\n"
}
] | 2 |
simon123h/housedetection | https://github.com/simon123h/housedetection | e9717b736c4acbada018581689b59da9ad009c20 | c19328ca8bd91a8bd0f1397a6162e789bb4a4e17 | 2abb77e2c4819f989647addaf4548262cc947f3f | refs/heads/master | 2020-03-12T07:53:34.613640 | 2018-04-24T22:46:34 | 2018-04-24T22:46:34 | 130,515,864 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6755218505859375,
"alphanum_fraction": 0.6925995945930481,
"avg_line_length": 24.707317352294922,
"blob_id": "270cc02bb27e07d4608c30c08fec263593e8702c",
"content_id": "2fe2df968b7811f8c468adc6e4159073175ac91d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1054,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 41,
"path": "/live/live.py",
"repo_name": "simon123h/housedetection",
"src_encoding": "UTF-8",
"text": "from __future__ import print_function\nfrom PIL import Image\nimport numpy as np\n\n\ndef save(arr, path):\n # save an image\n newarr = 0.+arr\n newarr -= min(map(min, arr))\n newarr *= 255. / (max(map(max, arr)))\n Image.fromarray(arr).convert('L').save(path)\n\n\n# load input file into numpy array\norig = Image.open(\"original.png\")\norig = np.array(orig)\nsave(orig, \"out/1_input.png\")\n\n# load target file into numpy array\ntarg = Image.open(\"target.png\")\ntarg = np.array(targ)\nsave(targ, \"out/2_target.png\")\n\n# FFT original\norigft = np.fft.fft2(orig)\norigft = np.fft.fftshift(origft)\nsave(np.log(np.abs(origft)), \"out/3_original_ft.png\")\norigft = np.fft.ifftshift(origft)\n\n# FFT target\ntargft = np.fft.fft2(targ)\ntargft = np.fft.fftshift(targft)\nsave(np.log(np.abs(targft)), \"out/4_target_ft.png\")\ntargft = np.fft.ifftshift(targft)\n\n# divide target/origin\nmaskft = targft / origft\nmaskft = np.fft.fftshift(maskft)\nsave(np.abs(maskft)*100, \"out/5_mask_ft.png\")\nsave(np.log(np.abs(maskft))*100, \"out/5_mask_ftlog.png\")\nmaskft = np.fft.ifftshift(maskft)\n"
},
{
"alpha_fraction": 0.6598639488220215,
"alphanum_fraction": 0.8503401279449463,
"avg_line_length": 72.5,
"blob_id": "461b5ef5fb41b5e882fd8a62387eb13828311193",
"content_id": "f13f0d86e955c54dcd17aaba2371e1d16a384ea3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 147,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 2,
"path": "/resources.md",
"repo_name": "simon123h/housedetection",
"src_encoding": "UTF-8",
"text": "https://pdfs.semanticscholar.org/0d04/0308f3c812fefe00f2be32859b0cee23f2d7.pdf\nhttp://www.isprs.org/proceedings/XXXV/congress/comm3/papers/395.pdf\n"
},
{
"alpha_fraction": 0.6220238208770752,
"alphanum_fraction": 0.6631944179534912,
"avg_line_length": 31,
"blob_id": "09d55135de9cb168c72ffbd222294acebb87a5e6",
"content_id": "0208598a3632f76005719403cbd4f3ffa40d7cb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2016,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 63,
"path": "/process.py",
"repo_name": "simon123h/housedetection",
"src_encoding": "UTF-8",
"text": "from __future__ import print_function\nfrom PIL import Image\nimport numpy as np\nfrom scipy.signal import correlate2d\n\n# load input file into numpy array\nim = Image.open(\"data.tif\")\nimarray = np.array(im) / 2.\nim = Image.fromarray(imarray)\nim.convert('L').save(\"out/1_input.png\")\n\n# high pass filtering in fourier domain (edge detection)\n# filter is a rectangle, while the zero mode is kept for overall brightness\nftim = np.fft.fft2(imarray)\nsize = 50\ndc = ftim[0, 0]\nftim[0:size, 0:size] = 0\nftim[-size:, -size:] = 0\nftim[-size:, 0:size] = 0\nftim[0:size, -size:] = 0\nftim[0, 0] = dc*0.8\nftim = np.fft.fftshift(ftim)\nImage.fromarray(np.abs(ftim)/1000.).convert('L').save(\"out/6_fourierspace.png\")\nftim = np.fft.ifftshift(ftim)\nim = np.real(np.fft.ifft2(ftim))\nImage.fromarray(im).convert('L').save(\"out/2_edge_detection.png\")\n\n# normalize image\n# im -= np.mean(im)\n# im /= max(map(max, im)) - min(map(min, im))\n# im = (im + 1) * 128\n# Image.fromarray(im).convert('L').save(\"out/2_edge_detection_n.png\")\n\n# invert image\ninv = 256 - im\ninv -= np.mean(inv)\ninv = np.maximum(0, inv)\nImage.fromarray(inv*5).convert('L').save(\"out/3_inverted.png\")\n\n# cross correlate with L-shapes\ncorr = 0\nfor angle in np.linspace(0, 360, 100):\n print(\"{:5.1f} %\".format(angle / 3.6))\n shape = Image.open(\"houses/L11.png\").rotate(angle).convert('L')\n # zoom = 1\n # shape = shape.resize((int(shape.size[0]*zoom), int(shape.size[1]*zoom)))\n shape = np.array(shape)\n _corr = correlate2d(inv, shape, mode=\"same\")\n corr = np.maximum(corr, _corr)\n # _corr = corr - np.mean(corr[30:-30])\n # _corr *= 2*255 / max(map(max, _corr))\n # Image.fromarray(_corr).convert('L').save(\"out/4_xcorrelation.png\")\n\n# normalization and output\ncorr -= np.mean(corr[30:-30])\ncorr *= 2*255 / max(map(max, corr))\nImage.fromarray(corr).convert('L').save(\"out/4_xcorrelation.png\")\n\n\n# thresholding\nthreshold = 0.33\ncorr = np.maximum(0, corr - 255 * threshold) / (1-threshold)\nImage.fromarray(corr).convert('L').save(\"out/5_threshold.png\")\n"
}
] | 3 |
aeberspaecher/transparent_pyfftw | https://github.com/aeberspaecher/transparent_pyfftw | a58fcb6fa4e075c368142925d2cb6e9b329d1952 | 9d9eb8109a2ab15ff96a4d5d8fb8bb18442f007f | aed25a8ad603d549935b61a91b470683ee78e143 | refs/heads/master | 2021-01-23T02:48:39.462200 | 2015-06-14T11:02:09 | 2015-06-14T11:02:09 | 20,500,911 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6329229474067688,
"alphanum_fraction": 0.6361148953437805,
"avg_line_length": 31.73134422302246,
"blob_id": "944af3fd62b1c1b6b1c3641d0ab9796d6914d718",
"content_id": "55cb0d6c3e8da4a810ac9258de01d90d6bf41cfc",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2193,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 67,
"path": "/transparent_pyfftw/generate_wrappers.py",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n\"\"\"Generate pyfftw wrapper functions by name.\n\nAdd the threads keyword to the call, do nothing else.\n\"\"\"\n\n# if the wrapper codes are removed from transparent_pyfftw_wrapper.py,\n# we can regenerate and re-add with\n# ./generate_wrappers.py >> transparent_pyfftw_wrapper.py\n\nnames = [\"fft\", \"ifft\", \"fft2\", \"ifft2\", \"fftn\", \"ifftn\", \"rfft\", \"irfft\",\n \"rfft2\", \"irfft2\", \"rfftn\", \"irfftn\", \"hfft\", \"ihfft\", \"hfft2\",\n \"ihfft2\", \"hfftn\", \"ihfftn\"]\n\ndef generate_wrapper(name, module, original_docstring, num_threads):\n \"\"\"Generate a wrapper function.\n\n Parameters\n ----------\n name : string\n Name of the function wrapped.\n module : string\n Name of the module the wrapped function is part of.\n original_docstring : string\n Docstring of the wrapped function.\n num_threads : int\n Number of threads to use.\n\n Returns\n -------\n wrapper_code : string\n A string that contains the code to the wrapper function.\n \"\"\"\n\n # create a string that informs the user about the 'threads' parameter added\n # to the call if appropriate:\n\n # check two versions of the string that triggers addition of the threads\n # keyword - this is necessary due to pyfftw documentation inconsistencies\n add_keyword_atoms = ('additional arguments docs', 'additional argument docs')\n if(any( [ keyword in original_docstring for keyword in add_keyword_atoms ] )):\n additional_arg_string = \\\n 'Arguments automatically added on call are \"threads=%s\".\\n'%num_threads\n additional_arg_code = 'kwargs[\"threads\"] = %s'%num_threads\n else:\n additional_arg_string = \\\n 'This wrapper does nothing besides calling the pyfftw function.\\n'\n additional_arg_code = ''\n\n wrapper_string = '''\ndef %(name)s(*args, **kwargs):\n \"\"\"A thin wrapper around pyfftw.interfaces.%(module)s.%(name)s.\n\n %(additional_arg_string)s\n Docstring of original pyfftw function:\n --------------------------------------\n %(original_docstring)s\n\n \"\"\"\n %(additional_arg_code)s\n\n return _%(name)s(*args, **kwargs)\n'''%locals()\n\n return wrapper_string\n"
},
{
"alpha_fraction": 0.7267502546310425,
"alphanum_fraction": 0.732758641242981,
"avg_line_length": 32.87610626220703,
"blob_id": "27f159a741130b46cd5e04827aff52723557086a",
"content_id": "01ef1ebd3a48e25bc464d07868a3208de7f18b0d",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 3828,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 113,
"path": "/README.rst",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "The simple minded pyfftw wrapper\n================================\n\nIntro\n-----\n\nThe `Fastet Fourier Transform in the West <http://www.fftw.org>`_ is an\nincredible library. There are at least two different Python wrappers around\nFFTW: `anfft <https://code.google.com/p/anfft/>`_ (which is declared dead) and\nthe awesome `pyfftw <http://hgomersall.github.io/pyFFTW/>`_.\n\nanfft used to be the simplest wrapper possible. It automatically took care of\nFFTW's 'wisdom' (recipes on how to compute specific transforms the fastest) and\nused threads if possible. From a user's point of view, it hid all FFTW details\na simple minded user does not want to take care of. As the only downside it\ndidn't expose the full range of possible routines, e.g. a dedicated fft2() was\nmissing.\n\nCompared to anfft, pyfftw is the more complete wrapper. These days, it even\noffers `NumPy or SciPy style interfaces\n<http://hgomersall.github.io/pyFFTW/pyfftw/interfaces/interfaces.html>`_ to\nFFTW. However, it also exposes FFTW details such as wisdom, threads and\nbuffers. transparent_pyfftw is a wrapper of a wrapper that tries to hide these\ndetails much in the spirit of anfft.\n\nConfiguring and installing\n--------------------------\n\nThe only time the user needs to think about wisdom files is when preparing to\ninstall. Configure first::\n\n ./waf configure --wisdom-file=\"/home/your_user/.pyfftw_wisdom\"\n\nThe number of threads used is determined by an environment variable.\nSet (using bash)\n\n::\n\n export -x TFFTW_NUM_THREADS=2\n\nto use two threads. If your FFTW does not support threads, do not set this\nvariable or set it to 1. If the variable is unset, a single thread is used.\n\nLast, install with\n\n::\n\n sudo ./waf install\n\n\nUsage\n-----\n\nBoth the NumPy and Scipy style interfaces from pyfttw are supported. Import one\nof those using either of those lines::\n\n import transparent_pyfftw.numpy_fft as nftt\n import transparent_pyfftw.scipy_fftpack as sftt\n\nIn each case, just use this package as if you had used the NumPy FFTs or the\nScipy ones::\n\n nfft.fft2(your_data)\n\nWhen performing new transforms, pyfftw will acquire new wisdom - to use this\nwisdom in the future, call ``transparent_pyfftw.save_wisdom()``. Wisdom is\nautomatically loaded when the wrapper is imported.\n\nTo create a byte-aligned array, call\n\n::\n\n # create an empty, byte-aligned 256 x 512 array:\n foo = transparent_pyfftw.get_empty_fftw_array([256, 512])\n\n\nOptionally making your project depend on transparent_pyfftw\n-------------------------------------------------------------------\n\nIn case you want to use transparent_pyfftw in your project without\nhaving it as a hard dependency for users, you may use the fact that pyfftw and\nthus this wrapper as well use NumPy interfaces::\n\n try:\n from transparent_pyfftw.numpy_fft import fft, ifft\n except ImportError:\n from numpy.fft import fft, ifft\n\n\nNotes\n-----\n\n- transparent_pyfftw is pure Python and thus introduces some overhead on\n function calls. Do not use for this wrapper for very small FFTs that need to\n be fast.\n- The wrapper functions are automatically created on import of one of the\n numpy_fft or scipy_fftpack modules. At that time, the keyword argument\n ``threads=x`` argument is added to pyfftw calls for pyfftw functions that\n contain certain substrings in the docstring - namely short strings indicating\n that the pyfftw function offers additional keywords compared to the Scipy or\n NumPy function. Although this approach minimises code length, it is fragile\n as it breaks as soon as pyfftw doctsrings change. You have been warned.\n The code is developed against pyfftw 0.9.2.\n- The number of threads used can not be changed after transparent_pyfftw is\n imported.\n\n\nLicense and Copyright\n---------------------\n\nCopyright 2014 by Alexander Eberspächer\n\nBSD license, see LICENSE file\n"
},
{
"alpha_fraction": 0.6001955270767212,
"alphanum_fraction": 0.6044313907623291,
"avg_line_length": 24.155736923217773,
"blob_id": "fba947c8a8c0049334aee1163bcc20d4897e5fd7",
"content_id": "f5d1c6e4ffeeb9a27999d9d1df70e937d20d9765",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3069,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 122,
"path": "/transparent_pyfftw/transparent_pyfftw.py",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n\"\"\"Common functions for transparent_pyfftw.\n\"\"\"\n\nimport os\n\nimport numpy as np\n\nimport pyfftw\n\nfrom .options import wisdom_file\n\n\ndef read_wisdom():\n \"\"\"Read wisdom and get it in the data structures expected by pyfftw.\n \"\"\"\n\n try:\n wisdom = file(wisdom_file, mode=\"r\").readlines()\n except IOError:\n print(\"Wisdom file not loadable. If you haven't saved any wisdom yet, try calling save_wisdom().\")\n wisdom = None\n else:\n if(len(wisdom) == 0):\n print(\"Wisdom file is empty. Try calling save_wisdom().\")\n wisdom = None\n else:\n wisdom_tuple = []\n for line in wisdom:\n # if a line starts with a space or a right paren, it belongs to\n # last list member (\"current\" element). otherwise, it starts a\n # new member.\n if(line.startswith(\" \") or line.startswith(\")\")):\n wisdom_tuple[-1] += line # append to string\n else:\n wisdom_tuple.append(line)\n\n wisdom = wisdom_tuple # override\n\n return wisdom\n\n\n# if configured to use centuries of fftw wisdom, read the fftw oracle of\n# delphi (i.e. the wisdom file) - do this on import:\nif(wisdom_file is not None):\n wisdom = read_wisdom()\n if(wisdom is not None):\n pyfftw.import_wisdom(wisdom)\n\npyfftw_simd_alignment = pyfftw.simd_alignment\npyfftw.interfaces.cache.enable()\npyfftw.interfaces.cache.set_keepalive_time(300) # keep cache alive for 300 sec\n# TODO: make this a configurable parameter?\n\n\ndef get_num_threads():\n \"\"\"Get number of threads from environment variable.\n\n Returns\n -------\n num_threads : int\n $TFFTW_NUM_THREADS if set, 1 otherwise.\n \"\"\"\n\n # set number of threads from environment variable:\n try:\n num_threads = int(os.environ[\"TFFTW_NUM_THREADS\"])\n except KeyError:\n num_threads = 1\n\n return num_threads\n\n\ndef save_wisdom():\n \"\"\"Save generated wisdom to file specified when configuring the project.\n \"\"\"\n\n if(wisdom_file is not None):\n wisdom = pyfftw.export_wisdom()\n with file(wisdom_file, mode=\"w\") as f:\n for wisdom_bit in wisdom:\n f.write(wisdom_bit)\n else:\n raise Exception(\"Configured not to use any FFTW wisdom!\")\n\n\ndef get_empty_fftw_array(shape, dtype=np.float64, **kwargs):\n \"\"\"Create memory aligned empty array.\n\n Parameters\n ----------\n shape : tuple-like\n dtype : object\n\n Returns\n -------\n aligned : array\n Empty, byte-aligned array.\n\n Notes\n -----\n Keyword arguments are passed on to pyfftw.n_byte_align_empty().\n \"\"\"\n\n return pyfftw.n_byte_align_empty(shape, pyfftw_simd_alignment, dtype, **kwargs)\n\n\ndef align_array(arr):\n \"\"\"Return memory aligned copy of arr. This may be speed up pyfftw calls.\n\n Parameters\n ----------\n arr : array\n\n Returns\n -------\n arr_aligned : array\n \"\"\"\n\n return pyfftw.n_byte_align(arr, pyfftw_simd_alignment)\n"
},
{
"alpha_fraction": 0.6335403919219971,
"alphanum_fraction": 0.6372670531272888,
"avg_line_length": 27.75,
"blob_id": "72fb5d5fcaae1c623745a32cbc61cc57eb3b7805",
"content_id": "3b7c3a8683c6fb10fb2d583e37ebc7f1854a0f1b",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 805,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 28,
"path": "/wscript",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\ntop = \".\"\nout = \"build\"\n\n\ndef options(opt):\n opt.load(\"python\")\n opt.add_option(\"--wisdom-file\", action=\"store\", dest=\"wisdom_file\",\n default=\"None\", help=\"File to load wisdom from on import\")\n\n\ndef configure(conf):\n conf.load(\"python\")\n conf.check_python_version((2, 4)) # TODO: which version do we need here?\n conf.check_python_module(\"pyfftw\")\n\n wisdom_file = conf.options.wisdom_file\n option_file_content = \"wisdom_file = '%s'\"%(wisdom_file)\n opt_file = file(\"transparent_pyfftw/options.py\", mode=\"w\")\n opt_file.write(option_file_content)\n opt_file.close()\n\n\ndef build(bld):\n bld(features=\"py\", source=bld.path.ant_glob(\"transparent_pyfftw/*.py\"),\n install_path=\"${PYTHONDIR}/transparent_pyfftw\")\n"
},
{
"alpha_fraction": 0.6184689998626709,
"alphanum_fraction": 0.6208991408348083,
"avg_line_length": 29.481481552124023,
"blob_id": "e1946e599c26cc4c735e66272da98659643f49ae",
"content_id": "16280baa8f818c03db383a16567ec9578940d620",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 823,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 27,
"path": "/transparent_pyfftw/scipy_fftpack.py",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n\"\"\"Wrappers for pyfftw's SciPy fftpack interfaces.\n\"\"\"\n\nimport pyfftw.interfaces.scipy_fftpack as sfft\n\nfrom .generate_wrappers import generate_wrapper\nfrom .transparent_pyfftw import *\n\n\n# the wrappers are generated on import:\nfunc_names = sfft.__all__\n\nfor func_name in func_names:\n num_threads = get_num_threads()\n original_docstring = sfft.__dict__[func_name].__doc__\n wrapper_func_string = generate_wrapper(func_name, \"scipy_fftpack\",\n sfft.__dict__[func_name].__doc__,\n num_threads)\n\n # import pyfftw functions and add a '_' to the name:\n exec \"from pyfftw.interfaces.scipy_fftpack import %s as _%s\"%(2*(func_name,))\n\n # define the wrapper:\n exec wrapper_func_string\n"
},
{
"alpha_fraction": 0.7635658979415894,
"alphanum_fraction": 0.7655038833618164,
"avg_line_length": 29.352941513061523,
"blob_id": "42b06c5657364fb1dfbacaf48feec5bfae1da3b8",
"content_id": "97c4f7a7d5ffe669dd3046a3907f71029af3167b",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 516,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 17,
"path": "/transparent_pyfftw/__init__.py",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n\"\"\"Transparent wrappers for pyfftw's NumPy FFT interfaces.\n\nDoes nothing more but inserting a number of threads into your FFT calls and\nhanding that parameter to pyfftw.\n\nWrappers are available for pyfftw.numpy_fft and pyfftw.scipy_fftpack.\n\nTo save acquired wisdom, call transparent_pyfftw.save_wisdom(). Wisdom is automatically loaded on import.\n\nAdditional helper functions: save_wisdom(), get_empty_fftw_array(),\nalign_array().\n\"\"\"\n\nfrom .transparent_pyfftw import *\n"
},
{
"alpha_fraction": 0.6109045743942261,
"alphanum_fraction": 0.613382875919342,
"avg_line_length": 28.88888931274414,
"blob_id": "54c2184fa9485928d2caafbd2aa03690e8907731",
"content_id": "e62531dfda697cbf4a7b99a76b4038ed322c665c",
"detected_licenses": [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 807,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 27,
"path": "/transparent_pyfftw/numpy_fft.py",
"repo_name": "aeberspaecher/transparent_pyfftw",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n\"\"\"Wrappers for pyfftw's NumPy fft interfaces.\n\"\"\"\n\nimport pyfftw.interfaces.numpy_fft as nfft\n\nfrom .generate_wrappers import generate_wrapper\nfrom .transparent_pyfftw import *\n\n\n# the wrappers are generated on import:\nfunc_names = nfft.__all__\n\nfor func_name in func_names:\n num_threads = get_num_threads()\n original_docstring = nfft.__dict__[func_name].__doc__\n wrapper_func_string = generate_wrapper(func_name, \"numpy_fft\",\n nfft.__dict__[func_name].__doc__,\n num_threads)\n\n # import pyfftw functions and add a '_' to the name:\n exec \"from pyfftw.interfaces.numpy_fft import %s as _%s\"%(2*(func_name,))\n\n # define the wrapper:\n exec wrapper_func_string\n"
}
] | 7 |
edgehub-cloud/AI-games | https://github.com/edgehub-cloud/AI-games | 3d0f11eaa3ebebbcdd35516bc06ae4c4d2c47dff | 95325a7be5a9f594f1aba6f97c157990095a3043 | 4f6bba8a56ac91645e26a7232b5d9026042f543d | refs/heads/master | 2021-05-31T06:56:35.314042 | 2016-03-20T12:48:17 | 2016-03-20T12:48:17 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5608324408531189,
"alphanum_fraction": 0.5699039697647095,
"avg_line_length": 37.224491119384766,
"blob_id": "3f5aa7fae35de31ef574bed163c525592c210ecf",
"content_id": "3d0948526f448daea14243f6ac0e5db38f773350",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1874,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 49,
"path": "/tankwars/brain.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import math\nfrom constants import SCREEN_SIZE\nclass Brain(object):\n def __init__(self, body):\n self.me = body\n self.last = (None, None)\n\n def distance_to(self, other_agent):\n pass\n\n def angle_to(self, other_agent):\n xdiff = (other_agent.x - self.me.x) / SCREEN_SIZE[0]\n ydiff = (other_agent.y - self.me.y) / SCREEN_SIZE[1]\n angle = math.degrees(math.atan2(-1*ydiff, xdiff))\n converted = angle + 180\n return converted\n return math.degrees(math.atan(float(ydiff)/xdiff)) % 360\n\n def angle_decision(self, other_agent):\n if (other_agent.x, other_agent.y) == self.last:\n return\n self.last = (other_agent.x, other_agent.y)\n angle_to = self.angle_to(other_agent)\n angle_between = self.me.rotation - angle_to\n xdiff = (other_agent.x - self.me.x) / SCREEN_SIZE[0]\n ydiff = (other_agent.y - self.me.y) / SCREEN_SIZE[1]\n print(\"==================\")\n print(\"angle_between = {} | ydiff = {} | xdiff = {}\".format(angle_between, ydiff, xdiff))\n print(\"\\tratio: {} | my rotation: {}\".format(float(ydiff)/xdiff, self.me.rotation))\n print(\"\\tangle to: {}\".format(angle_to))\n if abs(angle_between) < 10:\n return\n right = other_agent.x > self.me.x\n above = other_agent.y < self.me.y\n #print(right, above)\n #print(self.me.rotation)\n self.rotation=True\n if not right and above:\n self.me.rotation -= self.me.rotation_speed\n elif right and above:\n self.me.rotation += self.me.rotation_speed\n elif not right and not above:\n self.me.rotation += self.me.rotation_speed\n elif right and not above:\n self.me.rotation -= self.me.rotation_speed\n else:\n self.rotation=False\n\n self.me.rotation %= 360\n\n"
},
{
"alpha_fraction": 0.5967366099357605,
"alphanum_fraction": 0.6060606241226196,
"avg_line_length": 17.60869598388672,
"blob_id": "695ba543c89bdb7262efd13372a2dae3a7def7c5",
"content_id": "ff13e4aec0c3f0571d067d25f34a2ccae97b6656",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 429,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 23,
"path": "/tankwars/main.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "\"\"\"\nRewriting the game loop for the tank game\n\n1. Poll for input\n2. Update Physics\n3. Handle Physics Events\n4. Check Conditions & Loop\n\"\"\"\n\n\n\ndef run(tanks, game):\n while check_conditions(tanks, game):\n for tank in tanks:\n tank.poll() # this allows for human players\n game.update(tank)\n if consts.USE_PYGAME:\n game.tick(tanks)\n\n\nclass Game:\n def __init__(self):\n pass\n\n"
},
{
"alpha_fraction": 0.5030885338783264,
"alphanum_fraction": 0.5573095679283142,
"avg_line_length": 25.962963104248047,
"blob_id": "71ff579d681d1fac877c6ee2186b59c9a5cc5d7f",
"content_id": "84fdd38f27e5529425acd438b5594185846fe4b9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1457,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 54,
"path": "/wormwars/consts.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "from boards import GameBoard, TronGameBoard, InfiniteGameBoard\nimport logging\n\n\"\"\"\nLOGGING STUFF\n=============\n\"\"\"\nLOGGER = logging.getLogger(\"wormwars\")\nLOGGER.setLevel(logging.WARNING)\n\n\n\"\"\"\nSYNTACTIC SUGAR\n===============\n\"\"\"\nHEAD = 0 # syntactic sugar: index of the worm's head\n# R G B\nWHITE = (255, 255, 255)\nBLACK = ( 0, 0, 0)\nRED = (255, 0, 0)\nGREEN = ( 0, 255, 0)\nDARKGREEN = ( 0, 155, 0)\nBLUE = ( 0, 0, 255)\nDARKBLUE = ( 0, 0, 155)\nPURPLE = (200, 0, 200)\nDARKPURPLE = (115, 0, 115)\nDARKGRAY = ( 40, 40, 40)\nBGCOLOR = BLACK\nCOLOR_LIST = {\"Green\": (GREEN, DARKGREEN), \"Blue\":(BLUE, DARKBLUE),\n \"Purple\": (PURPLE, DARKPURPLE)}\nMOVES = {\"left\": (-1, 0), \"right\": (1,0),\n \"down\": (0, 1), \"up\": (0, -1)}\nMOVE_LOOKUP = {v:k for k,v in MOVES.items()}\n\nGAME_BOARDS = {\"original\": GameBoard,\n \"tron\": TronGameBoard,\n \"infinite\": InfiniteGameBoard}\nPAUSE_AFTER_DEATH = True\n\n\"\"\"\nPYGAME SETTINGS\n===============\n\"\"\"\n\nUSE_PYGAME = True\nSTATS_ONLY = False\nFPS = 300\nWINDOWWIDTH = 300\nWINDOWHEIGHT = 300\nCELLSIZE = 10\nassert WINDOWWIDTH % CELLSIZE == 0, \"Window width must be a multiple of cell size.\"\nassert WINDOWHEIGHT % CELLSIZE == 0, \"Window height must be a multiple of cell size.\"\nCELLWIDTH = int(WINDOWWIDTH / CELLSIZE)\nCELLHEIGHT = int(WINDOWHEIGHT / CELLSIZE)\n\n"
},
{
"alpha_fraction": 0.5549892783164978,
"alphanum_fraction": 0.5600858330726624,
"avg_line_length": 34.846153259277344,
"blob_id": "17c81ba539f59a2dab944c6fbf428531549b3f79",
"content_id": "14704c2bf9c109380574a070bad07698ff154858",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3728,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 104,
"path": "/wormwars/visualizers.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "\"\"\"\nPyGame source adapted from Al Sweigart's Wormy. \n\nhis original header:\n # Wormy (a Nibbles clone)\n # By Al Sweigart [email protected]\n # http://inventwithpython.com/pygame\n # Released under a \"Simplified BSD\" license\n\"\"\"\n\n\n\nimport random, pygame, sys\nfrom pygame.locals import *\nimport logging\nimport consts\n\n\nclass PygameScreen:\n def __init__(self):\n pygame.init()\n self.FPSCLOCK = pygame.time.Clock()\n self.DISPLAYSURF = pygame.display.set_mode((consts.WINDOWWIDTH,\n consts.WINDOWHEIGHT))\n font_size = min([consts.WINDOWWIDTH // 30, 18])\n self.BASICFONT = pygame.font.Font('freesansbold.ttf', font_size)\n pygame.display.set_caption('WormWars')\n\n def draw(self, food, bots, stats):\n for event in pygame.event.get():\n if event.type == QUIT:\n self.terminate()\n self.DISPLAYSURF.fill(consts.BGCOLOR)\n self.draw_grid()\n for i, bot in enumerate(bots):\n self.draw_bot(bot.body_parts, i)\n self.draw_food(food)\n self.draw_score(stats)\n pygame.display.update()\n self.FPSCLOCK.tick(consts.FPS)\n\n def terminate(self):\n pygame.quit()\n #sys.exit()\n\n def draw_bot(self, body_parts, color_index=0):\n num_colors = len(consts.COLOR_LIST)\n if color_index >= num_colors:\n consts.LOGGER.warning(\"Repeating a color\")\n cname, (part_bg, part_fg) = list(consts.COLOR_LIST.items())[color_index % num_colors]\n\n for part in body_parts:\n x = part['x'] * consts.CELLSIZE\n y = part['y'] * consts.CELLSIZE\n body_part_rect = pygame.Rect(x, y, consts.CELLSIZE, consts.CELLSIZE)\n pygame.draw.rect(self.DISPLAYSURF, part_bg, body_part_rect)\n inner_rect = pygame.Rect(x + 4, y + 4,\n consts.CELLSIZE - 8,\n consts.CELLSIZE - 8)\n pygame.draw.rect(self.DISPLAYSURF, part_fg, inner_rect)\n\n\n def draw_food(self, food):\n x = food['x'] * consts.CELLSIZE\n y = food['y'] * consts.CELLSIZE\n food_rect = pygame.Rect(x, y, consts.CELLSIZE, consts.CELLSIZE)\n pygame.draw.rect(self.DISPLAYSURF, consts.RED, food_rect)\n\n\n def draw_grid(self):\n # vertical lines\n for x in range(0, consts.WINDOWWIDTH, consts.CELLSIZE):\n pygame.draw.line(self.DISPLAYSURF, consts.DARKGRAY,\n (x, 0), (x, consts.WINDOWHEIGHT))\n\n # horizontal lines\n for y in range(0, consts.WINDOWHEIGHT, consts.CELLSIZE):\n pygame.draw.line(self.DISPLAYSURF, consts.DARKGRAY,\n (0, y), (consts.WINDOWWIDTH, y))\n\n def draw_score(self, stats):\n num_bots = len(stats)\n for i, (bot_id, stats_dict) in enumerate(stats.items(),0):\n score = stats_dict['score']\n color_name, _ = list(consts.COLOR_LIST.items())[i]\n name = \"{} [{}]\".format(bot_id, color_name)\n surface = self.BASICFONT.render('{} Score: {}'.format(name, score),\n True, consts.WHITE)\n rect = surface.get_rect()\n y_sep = consts.WINDOWHEIGHT // (num_bots+5)\n rect.topleft = (20,\n 20+y_sep*i)\n self.DISPLAYSURF.blit(surface, rect)\n\n\nclass PygameStatsOnly(PygameScreen):\n def draw(self, food, bots, stats):\n for event in pygame.event.get():\n if event.type == QUIT:\n self.terminate()\n self.DISPLAYSURF.fill(consts.BGCOLOR)\n self.draw_score(stats)\n pygame.display.update()\n #self.FPSCLOCK.tick(consts.FPS)\n"
},
{
"alpha_fraction": 0.5435286164283752,
"alphanum_fraction": 0.5557762384414673,
"avg_line_length": 32.56666564941406,
"blob_id": "e85560578d2bb3d67861a36f854f67fb31cad556",
"content_id": "ba02e373a333e0ee64cde8a867d4a6774789d699",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3021,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 90,
"path": "/tictactoe/visualizers.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import sys\nimport pygame\nimport utils\nfrom utils import consts\nfrom pygame.locals import QUIT\n\nclass PygameScreen:\n def __init__(self):\n pygame.init()\n self.DISPLAYSURF = pygame.display.set_mode((consts.WINDOWWIDTH,\n consts.WINDOWHEIGHT))\n self.FPSCLOCK = pygame.time.Clock()\n\n self.compute_specs()\n font_size = min([consts.WINDOWWIDTH // 30, 18])\n self.BASICFONT = pygame.font.Font('freesansbold.ttf', font_size)\n pygame.display.set_caption('Tic-Tac-Toe')\n\n self.mark_history = []\n self.draw()\n\n def reset(self):\n self.mark_history = []\n self.draw()\n\n def _make_rect(self, i, j):\n\n x = self.CELLSIZE[0]*i + self.OFFSET[0]\n y = self.CELLSIZE[1]*j + self.OFFSET[1]\n w = self.CELLSIZE[0] - 2 * self.OFFSET[0]\n h = self.CELLSIZE[1] - 2 * self.OFFSET[1]\n\n return pygame.Rect(x, y, w, h)\n\n def compute_specs(self):\n self.CELLSIZE = (consts.WINDOWWIDTH//3, consts.WINDOWHEIGHT//3)\n self.OFFSET = (self.CELLSIZE[0]//10, self.CELLSIZE[1]//10)\n self.RECTS = [[self._make_rect(col_i, row_j) for col_i in range(3)] for row_j in range(3)]\n\n def draw(self, move=None, player=None):\n for event in pygame.event.get():\n if event.type == QUIT:\n self.terminate()\n\n self.DISPLAYSURF.fill(consts.BGCOLOR)\n self.draw_grid()\n if player and move:\n new_mark = {\"coords\":move, \"player\":str(player)}\n self.mark_history.append(new_mark)\n for mark in self.mark_history:\n self.draw_mark(mark)\n pygame.display.update()\n self.FPSCLOCK.tick(consts.FPS)\n\n def draw_grid(self):\n xthird = consts.WINDOWWIDTH // 3\n ythird = consts.WINDOWHEIGHT // 3\n for i in range(1,3):\n # vert\n pygame.draw.line(self.DISPLAYSURF, consts.DARKGRAY,\n (xthird*i, 0), (xthird*i, consts.WINDOWHEIGHT),3)\n #horiz\n pygame.draw.line(self.DISPLAYSURF, consts.DARKGRAY,\n (0, ythird*i), (consts.WINDOWWIDTH, ythird*i), 3)\n\n def draw_mark(self, mark):\n if mark['player'] == consts.X:\n self._draw_x(mark['coords'])\n else:\n self._draw_o(mark['coords'])\n\n def _draw_x(self, mark_coords):\n i, j = mark_coords\n rect = self.RECTS[i][j]\n pygame.draw.line(self.DISPLAYSURF, consts.RED,\n rect.topleft, rect.bottomright, 5)\n pygame.draw.line(self.DISPLAYSURF, consts.RED,\n rect.bottomleft, rect.topright, 5)\n\n def _draw_o(self, mark_coords):\n i, j = mark_coords\n rect = self.RECTS[i][j]\n center = rect.center\n radius = rect.width // 2\n radius -= radius//3\n pygame.draw.circle(self.DISPLAYSURF, consts.BLUE, center, radius, radius//3)\n\n def terminate(self):\n pygame.quit()\n sys.exit()\n"
},
{
"alpha_fraction": 0.537015974521637,
"alphanum_fraction": 0.5444191098213196,
"avg_line_length": 27.552845001220703,
"blob_id": "fd5b86e153b86ff37b655bb51d8222719bb230a8",
"content_id": "ff1e3cbc7457aa5f737e88f70d50db89afbee624",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3512,
"license_type": "permissive",
"max_line_length": 109,
"num_lines": 123,
"path": "/wormwars/main.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "from bots import AwesomeBot\nimport consts\n\n\nclass WormWars:\n def __init__(self, bots, game_type=\"original\"):\n self.game_class = consts.GAME_BOARDS[game_type]\n self.bot_classes = bots\n self.stats = {}\n self.initialize()\n\n def initialize(self):\n num_bots = len(self.bot_classes)\n\n self.game = self.game_class(num_bots)\n self.bots = BotList()\n\n for bot_id, bot_class in enumerate(self.bot_classes):\n starting_position = self.game.new_starting_position()\n fresh_bot = bot_class.new_instance(bot_id, starting_position)\n self.bots.append(fresh_bot)\n if fresh_bot.bot_id not in self.stats:\n self.stats[fresh_bot.bot_id] = {'score':0}\n\n if consts.USE_PYGAME:\n self.game.initialize_screen()\n\n self.game.tick(self.bots, self.stats)\n\n def check_conditions(self):\n for bot in self.bots:\n if self.game.outside_bounds(bot):\n bot.failed(\"out of bounds\")\n elif bot.self_collision():\n bot.failed(\"collided with self\")\n elif bot.other_collision(self.bots):\n bot.failed(\"collided with other\")\n\n if len(self.bots) == 1 and self.bots.len != 1:\n # a single winner\n return False\n elif len(self.bots) == 0:\n # no winners\n return False\n else:\n # another iteration\n return True\n\n def run(self):\n while self.check_conditions():\n for bot in self.bots:\n bot.act(self.game, self.bots)\n self.game.update(bot)\n if consts.USE_PYGAME and not consts.STATS_ONLY:\n self.game.tick(self.bots, self.stats)\n if consts.PAUSE_AFTER_DEATH:\n for bot in self.bots:\n for m in bot.last_history:\n print(m)\n input(\"<enter to continue>\")\n if consts.USE_PYGAME and consts.STATS_ONLY:\n self.game.tick(self.bots, self.stats)\n self.track_stats()\n\n def track_stats(self):\n for bot in self.bots:\n self.stats[bot.bot_id]['score'] += 0 if bot.FAILED else 1\n\nclass BotList(list):\n def __len__(self):\n return len([x for x in self if not x.FAILED])\n\n @property\n def len(self):\n return len([x for x in self])\n\n\n\n\n\n\ndef test():\n bots = [RandomWormBot, RandomWormBot]\n war = WormWars(bots)\n war.run()\n\ndef test2():\n bots = [CarefulBot, CarefulBot, CarefulBot]\n war = WormWars(bots)\n for i in range(2000):\n try:\n war.run()\n war.initialize()\n except Exception as e:\n print(\"made it to {}\".format(i))\n print(\"Turn number {}\".format(war.game.turn_number))\n print(\"Worm length: {} and {}\".format(len(war.bots[0].body_parts), len(war.bots[1].body_parts) ))\n\n raise e\n if i%10==0: print(war.stats)\n\ndef test3():\n bots = [AwesomeBot]\n war = WormWars(bots)\n for i in range(2000):\n try:\n war.run()\n war.initialize()\n except Exception as e:\n print(\"made it to {}\".format(i))\n print(\"Turn number {}\".format(war.game.turn_number))\n print(\"Worm length: {}\".format(len(war.bots[0].body_parts)))\n\n raise e\n\n if i%10==0: print(war.stats)\n\n\nif __name__ == \"__main__\":\n try:\n test3()\n except KeyboardInterrupt as e:\n print(\"Gracefully exiting\")\n"
},
{
"alpha_fraction": 0.5591965913772583,
"alphanum_fraction": 0.5634249448776245,
"avg_line_length": 23.256410598754883,
"blob_id": "9aa78b1960a63272bf4f07abe9f6acb7005948cf",
"content_id": "e73f499f7b3c2c89bed081c3cde2a9683c2519b4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 946,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 39,
"path": "/tictactoe/bots.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import random\nimport utils\nfrom utils import consts\n\nclass GenericT3Bot:\n def __init__(self, symbol):\n self.symbol = symbol\n\n def __str__(self):\n return self.symbol\n\n def __hash__(self):\n return hash(str(self))\n\n def __eq__(self, other):\n return hash(self) == hash(other)\n\n def open_moves(self, game):\n move_list = []\n for i in range(3):\n for j in range(3):\n if game.spaces[i][j] == consts.EMPTY:\n move_list.append((i,j))\n return move_list\n\n def move(self, game):\n if game.draw():\n return\n next_move = self.think(game)\n consts.LOGGER.debug(\"{} has chosen {}\".format(self.symbol, next_move))\n game.update(next_move, self)\n\n def think(self, game):\n raise NotImplementedError\n\n\nclass RandomBot(GenericT3Bot):\n def think(self, game):\n return random.choice(self.open_moves(game))\n"
},
{
"alpha_fraction": 0.5539073944091797,
"alphanum_fraction": 0.5615050792694092,
"avg_line_length": 31.51764678955078,
"blob_id": "c454c9191070dd0f15420a003fd0e47109945ac3",
"content_id": "1b437abcc3c3b0b960b99506faf2111f29015440",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2764,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 85,
"path": "/wormwars/boards.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import random\nimport visualizers\nimport consts\n\nclass GameBoard:\n def __init__(self, num_bots):\n self.right_edge = consts.WINDOWWIDTH // consts.CELLSIZE\n self.bottom_edge = consts.WINDOWHEIGHT // consts.CELLSIZE\n self.no_food = set()\n self.food = self.new_food()\n self.turn_number = 0\n self.deleted_cells = []\n self.screen = None\n\n xdist, ydist = self.right_edge//(num_bots+1), self.bottom_edge//(num_bots+1)\n self.starting_positions = [(i*xdist, i*ydist) for i in range(1,1+num_bots)]\n\n\n def new_starting_position(self):\n pos = self.starting_positions.pop()\n self.no_food.add(pos)\n return pos\n\n def outside_bounds(self, bot):\n head = bot.head\n return (head['x'] < 0 or head['y'] < 0 or\n head['x'] > self.right_edge or head['y'] > self.bottom_edge)\n\n def update(self, bot):\n self.turn_number += 1\n head = (bot.head['x'], bot.head['y'])\n self.no_food.add(head)\n if bot._single_collision(self.food):\n self.food = self.new_food()\n else:\n cell = bot.delete_tail()\n cell = (cell['x'], cell['y'])\n if cell not in self.no_food:\n consts.LOGGER.debug(\"cell ({}) not in no_food ({})\".format(cell, self.no_food))\n consts.LOGGER.debug(\"Worm body: {}\".format(bot.body_parts))\n consts.LOGGER.debug(\"Deleted cells: {}\".format(self.deleted_cells[-5:]))\n else:\n self.deleted_cells.append(cell)\n self.no_food.remove(cell)\n\n def initialize_screen(self):\n if consts.STATS_ONLY:\n self.screen = visualizers.PygameStatsOnly()\n else:\n self.screen = visualizers.PygameScreen()\n\n def tick(self, bots, stats):\n \"\"\" if drawing or reporting, do so here \"\"\"\n if self.screen:\n self.screen.draw(self.food, bots, stats)\n\n def _food_helper(self):\n x = random.randint(0, self.right_edge-1)\n y = random.randint(0, self.bottom_edge-1)\n return x,y\n\n def new_food(self):\n x,y = self._food_helper()\n while (x,y) in self.no_food:\n x,y = self._food_helper()\n return {'x':x,'y':y}\n\n\nclass TronGameBoard(GameBoard):\n def update(self, bot):\n\n head = (bot.head['x'], bot.head['y'])\n self.no_food.add(head)\n\n\nclass InfiniteGameBoard(GameBoard):\n def __init__(self, *args, **kwargs):\n super(InfiniteGameBoard, self).__init__(*args, **kwargs)\n self.right_edge = 10**10\n self.bottom_edge = 10**10\n\n def tick(self, bots, stats):\n \"\"\" infinite boards would have a hard time rendering without fancy\n camera shifting work\"\"\"\n pass\n"
},
{
"alpha_fraction": 0.6402116417884827,
"alphanum_fraction": 0.7129629850387573,
"avg_line_length": 19.432432174682617,
"blob_id": "e023c4892c7b97ef71acb42d12e3d182499ec11b",
"content_id": "a8475e96adc62f0da8e6a868b142e400a3c21dc9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 756,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 37,
"path": "/tankwars/constants.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "'''\nThis file contains all the constant variables\nWhen adding variables, make sure to place them in an alphabetical order\n'''\nCAPTION = \"Achtung Panzer\"\nSCREEN_SIZE = (1000, 700)\nFPS = 60\nANIMATION_SPEED = 2\n\n\"\"\"Agents and Ammo\"\"\"\nEXPLOSION_SIZE = 65\nEXPLOSION_SPEED = 8\nHEALTHBAR_SIZE = (0.5, 5)\nTANK_SPEED = 3\nTANK_SPEED_BACK = 1.5\nTANK_ACCELERATION = 0.125\nTANK_ROTATION_SPEED = 2\nTANK_HEIGHT = 50\nTANK_WIDTH = 50\n\n\"\"\"Sound\"\"\"\nMUSIC_CHANNELS = 1\nMUSIC_DEFAULT_VOLUME = 0.40\nGAMEFX_CHANNELS = (2, 3)\nGAMEFX_DEFAULT_VOLUME = 0.40\nMISCFX_CHANNELS = (4, 5)\nMISCFX_DEFAULT_VOLUME = 0.40\n\n\"\"\"Game World and WorldObjects\"\"\"\nSOLID_OBJ_PUSHBACK = 0.02\nMAP_BORDER_PUSHBACK = 0.05\nDEAD_BUSH_SIZE = 80\nSTONE_MAX_SIZE = 180\nDESERT_STONE_MAX_SIZE = 90\n\n\nFASTRUN=True\n"
},
{
"alpha_fraction": 0.608458936214447,
"alphanum_fraction": 0.6235343217849731,
"avg_line_length": 40.859649658203125,
"blob_id": "ecb41efa21412369dd9df859583a2ce3e021de11",
"content_id": "634246dd14efdcfa314de33d33d9856201a4487d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2388,
"license_type": "permissive",
"max_line_length": 321,
"num_lines": 57,
"path": "/tankwars/ai.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "from agent import *\nimport math\nfrom brain import Brain\n\nclass Robot(Player):\n def __init__(self, controller, color, k_right, k_backward, k_left,\n k_forward, k_weapon1, k_weapon2, x, y, rotation = 0):\n self.controller = controller\n self.screen = self.controller.screen\n self.name = \"Agent\"\n self.type = 0\n self.x, self.y = x, y\n self.health = 100\n self.max_speed = TANK_SPEED\n self.max_speed_back = TANK_SPEED_BACK\n self.acceleration = TANK_ACCELERATION\n self.rotation_speed = TANK_ROTATION_SPEED\n self.speed = 0\n self.rotation = rotation\n self.direction = None\n self.moving = False\n self.rotating = False\n self.solid = 100\n self.current_collisions = []\n self.dead = False\n\n \"\"\"Gives the player static ammo object, these objects are copied in their fire() function.\n These variables can be seen as weapons, so fiddle with these variables when adding/changing it\"\"\"\n self.ammo1, self.ammo2 = NormalShot(self), StickyBomb(self)\n\n if TANK_WIDTH > TANK_HEIGHT:\n self.radius = int(TANK_WIDTH * 0.55)\n else:\n self.radius = int(TANK_HEIGHT * 0.55)\n\n #Load and resize tank img with right color\n if color == 'green':\n self.MasterSprites = [pygame.transform.scale(pygame.image.load(\"images/tankgreen1.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankgreen2.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankgreen3.png\"), (TANK_WIDTH, TANK_HEIGHT))]\n else:\n self.MasterSprites = [pygame.transform.scale(pygame.image.load(\"images/tankpurple1.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankpurple2.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankpurple3.png\"), (TANK_WIDTH, TANK_HEIGHT))]\n\n self.sprite = self.MasterSprites[0]\n\n self.animationindex = 0\n\n self.brain = Brain(self)\n\n def update(self):\n \"\"\" 90 down, 0 left, 270 up, 180 right \"\"\"\n for agent in self.controller.agents:\n if agent.name == self.name:\n continue\n else:\n self.brain.angle_decision(agent)\n\n\n super(Robot, self).update()\n\n\n"
},
{
"alpha_fraction": 0.711904764175415,
"alphanum_fraction": 0.7166666388511658,
"avg_line_length": 27,
"blob_id": "e275b3d782af34fbed6c51220ab41b150f9098af",
"content_id": "3c727dbf39d8a07e21e5155728bb979ab859cb3f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 420,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 15,
"path": "/README.md",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "# AI-games\nGames for making AI bots and having fun\n\nThe games are written to be modular. In other words, bots and visualizers swappable. \n\n### Games\n\n1. Tic-Tac-Toe\n - Generic tic-tac-toe\n - Only one bot implemented/uplaoded: a random bot. \n2. Wormwars\n - Snake and its variants\n + Snake w/ food, Tron, etc\n - Any number of snakes can battle\n - Only two bots implemented/uploaded: a random bot and a greedy bot\n"
},
{
"alpha_fraction": 0.6105675101280212,
"alphanum_fraction": 0.6242661476135254,
"avg_line_length": 20.29166603088379,
"blob_id": "620715383ad13416b8bcb4ec9db3eb9588da9bb4",
"content_id": "9ecd55d7b9c585b1442d8382f82137463a05857e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 511,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 24,
"path": "/wormwars/__init__.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import sys\nimport os\nsys.path.append(os.path.dirname(__file__))\n\nimport main\nimport bots\nfrom main import WormWars\nfrom bots import CarefulBot\n\ndefault_2bots = [CarefulBot, CarefulBot]\ndefault_3bots = [CarefulBot, CarefulBot, CarefulBot]\n\ndef demo():\n war = WormWars(default_3bots)\n for i in range(2000):\n try:\n war.run()\n war.initialize()\n except KeyboardInterrupt as e:\n print(\"Okay. Stopping now\")\n break\n\nif __name__ == \"__main__\":\n demo()\n"
},
{
"alpha_fraction": 0.6498906016349792,
"alphanum_fraction": 0.6542669534683228,
"avg_line_length": 23.052631378173828,
"blob_id": "491fd1f036e4d9c31c22fd0b81fce3011e7c21d3",
"content_id": "1c66d851f6d57febe4dbdc3a72512fdfb7972536",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 457,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 19,
"path": "/tankwars/fastrun.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import logging\nfrom controller import Controller\n\nDEBUG = False\n\nif DEBUG:\n logging.basicConfig(filename='example.log', level=logging.DEBUG)\nelse:\n logging.basicConfig(filename='example.log', level=logging.INFO)\n\n\nif __name__ == \"__main__\":\n logging.info('Starting...')\n c = Controller(debug=DEBUG)\n c.start_game(\"grass\")\n c.all_player_names.extend(['human', 'bot'])\n c.agents[0].name = 'human'\n c.agents[1].name = 'bot'\n c.run()\n"
},
{
"alpha_fraction": 0.5820280909538269,
"alphanum_fraction": 0.5996987819671631,
"avg_line_length": 41.20338821411133,
"blob_id": "6d58b9740d41d43c77c143f7dcbae321a6e156be",
"content_id": "f6ca70e3c040851317d5a03b8e8ce369bf080310",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9960,
"license_type": "permissive",
"max_line_length": 321,
"num_lines": 236,
"path": "/tankwars/agent.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import logging\nimport pygame\nimport math\nfrom constants import *\nfrom sound import *\nfrom ammo import *\n\nclass Player(object):\n \"\"\"The tank controlled by players\"\"\"\n def __init__(self, controller, color, k_right, k_backward, k_left, k_forward, k_weapon1, k_weapon2, x, y, rotation = 0):\n self.controller = controller\n self.screen = self.controller.screen\n self.name = \"Agent\"\n self.type = 0\n self.x, self.y = x, y\n self.health = 100\n self.max_speed = TANK_SPEED\n self.max_speed_back = TANK_SPEED_BACK\n self.acceleration = TANK_ACCELERATION\n self.rotation_speed = TANK_ROTATION_SPEED\n self.speed = 0\n self.rotation = rotation\n self.direction = None\n self.moving = False\n self.rotating = False\n self.solid = 100\n self.current_collisions = []\n self.dead = False\n\n \"\"\"Gives the player static ammo object, these objects are copied in their fire() function.\n These variables can be seen as weapons, so fiddle with these variables when adding/changing it\"\"\"\n self.ammo1, self.ammo2 = NormalShot(self), StickyBomb(self)\n\n if TANK_WIDTH > TANK_HEIGHT:\n self.radius = int(TANK_WIDTH * 0.55)\n else:\n self.radius = int(TANK_HEIGHT * 0.55)\n\n #Load and resize tank img with right color\n if color == 'green':\n self.MasterSprites = [pygame.transform.scale(pygame.image.load(\"images/tankgreen1.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankgreen2.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankgreen3.png\"), (TANK_WIDTH, TANK_HEIGHT))]\n else:\n self.MasterSprites = [pygame.transform.scale(pygame.image.load(\"images/tankpurple1.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankpurple2.png\"), (TANK_WIDTH, TANK_HEIGHT)), pygame.transform.scale(pygame.image.load(\"images/tankpurple3.png\"), (TANK_WIDTH, TANK_HEIGHT))]\n\n self.sprite = self.MasterSprites[0]\n\n self.animationindex = 0\n\n controller.register_key(k_right, self.keypress_right)\n controller.register_key(k_forward, self.keypress_forward)\n controller.register_key(k_left, self.keypress_left)\n controller.register_key(k_backward, self.keypress_backward)\n controller.register_key(k_weapon1, self.weapon1, singlepress=True)\n controller.register_key(k_weapon2, self.weapon2, singlepress=True)\n\n \"\"\"Keypress-functions are use to handle movement\"\"\"\n\n def keypress_right(self):\n if self.rotation == 0:\n self.rotation = 360\n\n self.rotation -= self.rotation_speed\n self.rotating = True\n\n def keypress_left(self):\n if self.rotation == 360:\n self.rotation = 0\n\n self.rotation += self.rotation_speed\n self.rotating = True\n\n def keypress_backward(self):\n self.moving = True #Set moving variable to true for the update method\n\n if self.direction == \"Forward\":\n self.speed -= self.acceleration\n else:\n self.direction = \"Backward\"\n if self.speed < self.max_speed_back: #Add acceleration to speed if max speed is not reached\n self.speed += self.acceleration\n\n def keypress_forward(self):\n self.moving = True\n\n if self.direction == \"Backward\":\n self.speed -= self.acceleration\n else:\n self.direction = \"Forward\"\n if self.speed < self.max_speed:\n self.speed += self.acceleration\n\n def weapon1(self, event):\n \"\"\"Fire weapon from slot 1\"\"\"\n if not self.dead:\n self.ammo1.fire()\n self.controller.stats.inform(self.name, shots_fired = 1)\n\n def weapon2(self, event):\n \"\"\"Fire weapon from slot 2\"\"\"\n if not self.dead:\n self.ammo2.fire()\n self.controller.stats.inform(self.name, stickybomb_fired = 1)\n\n def move(self):\n \"\"\"Updates posisition of player. Use different rules for movement when player is colliding\"\"\"\n\n if self.direction == \"Forward\": #If the player is moving forward, subtract from x, add to y\n self.x -= math.cos(math.radians(self.rotation)) * self.speed\n self.y += math.sin(math.radians(self.rotation)) * self.speed\n if self.speed > self.max_speed:\n self.speed -= self.acceleration\n elif self.direction == \"Backward\": #If the player is moving backward, add to x, subtract from y\n self.x += math.cos(math.radians(self.rotation)) * self.speed\n self.y -= math.sin(math.radians(self.rotation)) * self.speed\n if self.speed > self.max_speed_back:\n self.speed -= self.acceleration\n\n if self.moving == False and self.speed > 0: #Retardate if player isnt pressing keys\n self.speed -= self.acceleration\n\n if self.speed == 0: #If the players current speed is 0, set the moving direction to None\n self.direction = None\n\n self.moving = False\n self.rotating = False\n\n self.controller.stats.inform(self.name, move = self.speed)\n\n def die(self):\n \"\"\"Animate death and play sound\"\"\"\n self.dead = True\n Animation(self.screen, \"explosion\", (self.x, self.y), 9)\n Sound.Sounds[\"explosion\"].play()\n self.controller.agents.remove(self)\n\n\n def update(self):\n \"\"\"Update the player's attributes, move player, check if still alive\"\"\"\n\n if not self.dead:\n\n self.max_speed = TANK_SPEED\n self.max_speed_back = TANK_SPEED_BACK\n\n if self.moving or self.rotating:\n if self.animationindex != (len(self.MasterSprites) - 1) * ANIMATION_SPEED:\n self.animationindex += 1\n else:\n self.animationindex = 0\n\n self.detect_collisions()\n self.handle_collisions()\n self.move()\n self.sprite = pygame.transform.rotate(self.MasterSprites[self.animationindex//ANIMATION_SPEED], self.rotation)\n\n if self.health <= 0:\n self.die()\n\n \"\"\"Call the tick() function to update cooldowns\"\"\"\n for ammo in [self.ammo1, self.ammo2]:\n ammo.tick()\n\n \"\"\"Resetting some atributes each frame\"\"\"\n self.rotation_speed = TANK_ROTATION_SPEED\n self.current_collisions = []\n\n def handle_collisions(self):\n \"\"\"Handle the agent's current collisions. Also make sure that players can't drive through\n WorldObjects or outside of the maps borders by using a pushback\"\"\"\n for obj in self.current_collisions:\n if obj.type == 50: #if the obj is a powerup\n obj.pickup(self)\n\n if obj.solid == 100:\n self.speed = 0\n deltax = (self.x - obj.x)\n deltay = (obj.y - self.y)\n self.x += (deltax/3) * SOLID_OBJ_PUSHBACK\n self.y -= (deltay/3) * SOLID_OBJ_PUSHBACK\n\n else: #Player will lose speed depending on how solid the WorldObject is\n if (TANK_SPEED - (TANK_SPEED * (obj.solid/100.0))) < self.max_speed:\n self.max_speed = TANK_SPEED - (TANK_SPEED * (obj.solid/100.0))\n self.max_speed_back = TANK_SPEED_BACK - (TANK_SPEED_BACK * (obj.solid/100.0))\n\n if self.x > SCREEN_SIZE[0]-self.radius or self.x < self.radius or self.y > SCREEN_SIZE[1]-self.radius or self.y < self.radius:\n self.speed = 0\n\n if self.x > SCREEN_SIZE[0]-self.radius:\n self.x -= 10 * MAP_BORDER_PUSHBACK\n elif self.x < self.radius:\n self.x += 10 * MAP_BORDER_PUSHBACK\n elif self.y > SCREEN_SIZE[1]-self.radius:\n self.y -= 10 * MAP_BORDER_PUSHBACK\n else:\n self.y += 10 * MAP_BORDER_PUSHBACK\n\n def detect_collisions(self):\n \"\"\"Detect collisions between the player and WorldObjects or other player\"\"\"\n\n for obj in self.controller.map.objects:\n if detect_collision(self, obj):\n self.current_collisions.append(obj)\n\n for player in self.controller.agents:\n if player != self:\n other_player = player\n\n if len(self.controller.agents) == 2:\n if detect_collision(self, other_player):\n self.current_collisions.append(other_player)\n\n \"\"\"for obj in collisions: #Used for collision-detection-testing\n print \"collision with --> {} - {}\".format(obj.name, obj.type)\"\"\"\n\n def draw(self):\n \"\"\"Render the player and other connected graphics (like health-bar or hitbox) on the screen\"\"\"\n if self.health < 40:\n COLOR = (181, 53, 53)\n elif self.health < 60:\n COLOR = (232, 148, 14)\n else:\n COLOR = (90, 200, 100)\n\n self.screen.blit(self.sprite, (self.x - self.sprite.get_width()/2, self.y - self.sprite.get_height()/2))\n\n \"\"\"Draw the cooldown bars if any of your ammo1/ammo2 variables are more than 0. Also Drawing the health bars\"\"\"\n if not self.dead:\n pygame.draw.rect(self.screen, (COLOR), (self.x - self.sprite.get_width()/2, self.y - 50, self.health * HEALTHBAR_SIZE[0], HEALTHBAR_SIZE[1]))\n if self.ammo1.cd_time > 0:\n pygame.draw.rect(self.screen, (150,150,150), (self.x - self.sprite.get_width()/2, self.y - 52, ((self.ammo1.cd_time + 0.0) / self.ammo1.cooldown) * 50, 2))\n if self.ammo2.cd_time > 0:\n pygame.draw.rect(self.screen, (255,100,100), (self.x - self.sprite.get_width()/2, self.y - 45, ((self.ammo2.cd_time + 0.0) / self.ammo2.cooldown) * 50, 2))\n\n if self.controller.debug: #Collision-detection-testing\n pygame.draw.circle(self.screen, (255,0,0), (int(self.x), int(self.y)), self.radius, 2)\n"
},
{
"alpha_fraction": 0.7046818733215332,
"alphanum_fraction": 0.7106842994689941,
"avg_line_length": 35,
"blob_id": "d0c1c81049499bc9ec60e5a4c2b481c2677ca13e",
"content_id": "58eca57ae3c176f110499a6485524adc78c3d9aa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 833,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 23,
"path": "/wormwars/README.md",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "# Worm Wars\n\n## Description\n\nAn implementation of Snake with multiple snakes battling over the food. Meant for an Intro to AI course. \n\nNote: turn on PyGame with `USE_PYGAME` variable in consts.py make pygame window \nstats only with `STATS_ONLY` variable in consts.py. If `USE_PYGAME=True` and `STATS_ONLY=False`, then the pygame window will render each frame of every game. \n\n## Files\n 1. main.py \n - main game runner\n - does the game loop logic\n 2. visualizers.py\n - implemented a pygame visualizing interface\n + Adapted from [Al Sweigart](https://inventwithpython.com/)'s implementation\n 3. consts.py\n - all parameters set here\n - turns on PyGame, sets framerate, board size, etc.\n 4. bots.py\n - bots get defined here\n 5. boards.py\n - handles food placement, visualizer frame updates, etc.\n \n"
},
{
"alpha_fraction": 0.5842382907867432,
"alphanum_fraction": 0.6010760068893433,
"avg_line_length": 38.82936477661133,
"blob_id": "091b7de1ca223885a41c0cfd8c2d9ec70be93b63",
"content_id": "26f1c8f76ad14912e14fe5f392ea799da1562f64",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10037,
"license_type": "permissive",
"max_line_length": 176,
"num_lines": 252,
"path": "/tankwars/map.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import logging\nimport pygame\nimport random\nfrom pygame.locals import *\nfrom menu import MainMenu, PreGameMenu\nfrom powerup import *\nimport math\nfrom agent import Player\nfrom constants import *\nfrom functions import *\n\n\nclass World():\n \"\"\"The game world. Connects the logic of all WorldObjects with controller and the rest of the game\"\"\"\n def __init__(self, controller, map_type):\n self.screen = controller.screen\n self.controller = controller\n self.objects = []\t# Collection of all current World Objects\n self.map_type = map_type\n self.powerups = []\n self.font = pygame.font.Font(\"fonts/8bitwonder.ttf\", 14)\n self.update_world_objs = True\n\n for pup in range(0, random.randint(0, 10)):\n self.objects.append(Health(self, \"random\", \"random\"))\n\n def generate(self):\n \"\"\"Generate the game world and it's objects\"\"\"\n\n self.bg_surface = pygame.Surface(SCREEN_SIZE, flags=pygame.HWSURFACE)\n self.world_objs_surface = pygame.Surface(SCREEN_SIZE, flags=pygame.HWSURFACE + pygame.SRCALPHA)\n\n if self.map_type == \"grass\":\n self.ground_sprite = pygame.image.load(\"images/grass.png\")\n if not FASTRUN:\n for i in range(random.randint(1, 5)):\n self.objects.append(Water(self, self.world_objs_surface))\n\n for i in range(random.randint(2, 10)):\n self.objects.append(Bush(self, self.world_objs_surface))\n\n for i in range(random.randint(3, 15)):\n self.objects.append(Stone(self, self.world_objs_surface))\n\n elif self.map_type == \"sand\":\n self.ground_sprite = pygame.image.load(\"images/sand.png\")\n if not FASTRUN:\n for i in range(random.randint(1, 3)):\n self.objects.append(Water(self, self.world_objs_surface))\n\n for i in range(random.randint(1, 2)):\n self.objects.append(Bush(self, self.world_objs_surface))\n\n for i in range(random.randint(1, 10)):\n self.objects.append(DeadBush(self, self.world_objs_surface))\n\n for i in range(random.randint(3, 10)):\n self.objects.append(Stone(self, self.world_objs_surface))\n\n for i in range(random.randint(3, 12)):\n self.objects.append(DesertStone(self, self.world_objs_surface))\n\n self.ground_sprite_width = self.ground_sprite.get_width()\n self.ground_sprite_height = self.ground_sprite.get_height()\n\n for x in range(0, SCREEN_SIZE[0], self.ground_sprite_width):\n for y in range(0, SCREEN_SIZE[1], self.ground_sprite_height):\n self.bg_surface.blit(self.ground_sprite,(x,y))\n\n\n def draw(self):\n \"\"\"Draw the game-world and all it's objects\"\"\"\n self.screen.blit(self.bg_surface, (0, 0))\n self.screen.blit(self.world_objs_surface, (0, 0))\n\n if self.update_world_objs:\n self.world_objs_surface.fill((0, 0, 0, 0))\n\n for obj in self.objects:\n obj.draw()\n\n self.update_world_objs = False\n\n\nclass WorldObject(object):\n \"\"\"General attributes and methods for all WorldObjects\"\"\"\n def __init__(self, world, surface):\n self.surface = surface\n self.controller = world.controller\n self.x, self.y = 0,0\n self.name = \"Undefined WorldObject\"\n self.solid = 0\n self.destroyable = False\n\n def draw(self):\n \"\"\"General drawing-function for normal objects\"\"\"\n self.surface.blit(self.image, (self.x-self.image.get_width()/2, self.y-self.image.get_height()/2))\n\n if self.controller.debug: #Collision-detection-testing\n pygame.draw.circle(self.surface, (255,0,0), (int(self.x), int(self.y)), self.radius, 2)\n\n\nclass Object(WorldObject):\n \"\"\"Normal objects -> An image that only exists on one coordinate\"\"\"\n def __init__(self, world, surface):\n WorldObject.__init__(self, world, surface)\n self.type = 0 #worldobject\n self.name = \"Undefined Standard-object\"\n\n def check_spawn_point(self):\n \"\"\"Makes sure that Normal Objects only spawn on the screen and not on top of other WorldObjects\"\"\"\n while True:\n self.x = random.randint(self.radius, SCREEN_SIZE[0]-self.radius)\n self.y = random.randint(self.radius, SCREEN_SIZE[1]-self.radius)\n\n for obj in self.controller.map.objects:\n if detect_collision(self, obj):\n break #exit the for-loop and get new pos for self\n\n else: #object IS spawning on screen and NOT on top of other WorldObject\n break\n\n def get_random_sprite(self, sprites_list, folder, width, height):\n \"\"\"Pick a random sprite from sprites_list and set it as the objects sprite.\n\n Inputs:\n * List with strings of filenames of images\n * String with folder-path\n * Width and height of sprite\n Result:\n Set a random sprite for the object\"\"\"\n image = random.choice(sprites_list)\n full_path = folder + image\n return pygame.transform.scale(pygame.image.load(full_path), (width, height))\n\n def get_shot(self, damage):\n \"\"\"Update health of WorldObject. Remove if it gets destroyed\"\"\"\n self.health -= damage\n if self.health <= 0:\n self.controller.map.objects.remove(self)\n #update map here\n self.controller.map.update_world_objs = True\n\n\nclass Area(WorldObject):\n \"\"\"Area Objects that is made out of several smaller circle-objets to take up an area\"\"\"\n def __init__(self, world, surface):\n WorldObject.__init__(self, world, surface)\n self.type = 1 #area\n self.circles = []\n self.name = \"Undefined Area-object\"\n self.area()\n self.solid = 50\n\n def area(self):\n \"\"\"Generate area and only do so on screen\"\"\"\n radius = 40\n x = random.randint(radius, SCREEN_SIZE[0]-radius)\n y = random.randint(radius, SCREEN_SIZE[1]-radius)\n phi = random.randint(1,10) * 2 * math.pi\n circle = Circle(x, y, phi, radius)\n self.circles.append(circle)\n\n for i in range(0, random.randint(15, 30)):\n phi = random.randint(int((self.circles[-1].phi - math.radians(random.randint(1, 360)))), int((self.circles[-1].phi + math.radians(random.randint(1, 360)))))\n x = self.circles[-1].x + math.sin(phi) * radius\n y = self.circles[-1].y + math.cos(phi) * radius\n\n while True: #Only spawn next circle on screen\n if x > SCREEN_SIZE[0]-radius or x < radius or y > SCREEN_SIZE[1]-radius or y < radius:\n phi = random.randint(int((self.circles[-1].phi - math.radians(random.randint(1, 360)))), int((self.circles[-1].phi + math.radians(random.randint(1, 360)))))\n x = self.circles[-1].x + math.sin(phi) * radius\n y = self.circles[-1].y + math.cos(phi) * radius\n else: #next pos for circle is on the screen, continue\n break\n circle = Circle(x, y, phi, radius)\n self.circles.append(circle)\n\n\nclass Circle():\n \"\"\"These circle-objects make up Area-objects\"\"\"\n def __init__(self, x, y, phi, radius):\n self.x = x\n self.y = y\n self.phi = phi\n self.radius = radius\n\n\nclass Water(Area):\n \"\"\"Spawns in various sizes, shapes and colors\"\"\"\n def __init__(self, world, surface):\n Area.__init__(self, world, surface)\n self.color = (0, random.randint(0, 100), random.randint(110, 255))\n self.name = \"Water\"\n self.solid = 50\n\n def draw(self):\n for circle in self.circles:\n pygame.draw.circle(self.surface, self.color, (int(circle.x), int(circle.y)), int(circle.radius), 0)\n\n\nclass DeadBush(Object):\n \"\"\"Only spawning on sand-maps\"\"\"\n def __init__(self, world, surface):\n Object.__init__(self, world, surface)\n self.name = \"DeadBush\"\n self.solid = 20\n self.image = pygame.transform.scale(pygame.image.load(\"images/deadtree.png\"), (DEAD_BUSH_SIZE, DEAD_BUSH_SIZE))\n self.radius = self.image.get_width()//3\n self.check_spawn_point()\n\n\nclass Bush(DeadBush):\n \"\"\"Spawning on grass- and sand-maps\"\"\"\n def __init__(self, world, surface):\n DeadBush.__init__(self, world, surface)\n self.name = 'Bush'\n self.solid = 100\n self.image = pygame.image.load('images/busksten.png')\n self.radius = self.image.get_width()//2\n self.check_spawn_point()\n\n\nclass Stone(Object):\n \"\"\"Spawning in various shapes, sizes and with randomized sprites depending on world.map_type.\n Is completely solid --> Can't be driven through\"\"\"\n def __init__(self, world, surface):\n Object.__init__(self, world, surface)\n self.name = \"Stone\"\n self.solid = 100\n self.width = random.randint(80, STONE_MAX_SIZE)\n self.height = self.width #values are the same to not trash image quality or collisions\n self.health = self.width * 2.5\n sprites_list = ['a10010.png', 'a10011.png', 'a10015.png', 'a10002.png']\n folder = 'images/stones/'\n self.image = self.get_random_sprite(sprites_list, folder, self.width, self.height)\n self.radius = self.image.get_height()//4\n self.check_spawn_point()\n\n\nclass DesertStone(Stone):\n \"\"\"Much like a Stone, but a different sprite and size\"\"\"\n def __init__(self, world, surface):\n Stone.__init__(self, world, surface)\n self.name = \"DesertStone\"\n self.width = random.randint(50, DESERT_STONE_MAX_SIZE)\n self.height = self.width\n sprites_list = ['c40007.png', 'c30011.png', 'c40000.png', 'c40010.png']\n folder = 'images/stones/'\n self.image = self.get_random_sprite(sprites_list, folder, self.width, self.height)\n self.radius = self.image.get_width()//4\n self.check_spawn_point()\n"
},
{
"alpha_fraction": 0.47355082631111145,
"alphanum_fraction": 0.4813113808631897,
"avg_line_length": 36.80838394165039,
"blob_id": "2578ea7f8b02bb71cd14b5c31a194783908b426a",
"content_id": "be6825a36a3d3dd4a64bf2b141c67ec789c3afc0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12628,
"license_type": "permissive",
"max_line_length": 170,
"num_lines": 334,
"path": "/tankwars/controller.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import logging\nimport sys\nimport pygame\nimport random\nfrom pygame.locals import *\nfrom constants import * # constants are CAPITALIZED\nimport os\nfrom agent import Player\nfrom ai import Robot\nimport map\nfrom menu import MainMenu, PreGameMenu, BetweenGameMenu, AfterGameMenu\nfrom sound import *\nfrom animation import *\nfrom functions import *\n\n# Game States\nS_MENU = 1\nS_GAME = 2\nS_UPGRADES = 3\nS_ABOUT = 4\nS_SETTINGS = 5\nS_PREGAME = 6\nS_BETWEENGAME = 7\nS_AFTERGAME = 8\n\nclass Controller():\n \"\"\"The core game logic that switches states and connects all other internal modules\"\"\"\n def __init__(self, debug=False):\n\n self.debug = debug\n\n ## Centers game window, needs to be before pygame.init()\n os.environ['SDL_VIDEO_CENTERED'] = '1'\n\n\n self.state = S_MENU\n self.fps = FPS\n self.paused = False\n self.all_player_names = []\n\n self.wait = 1500 #WAITING TIME AFTER FIRST PLAYER DIES, BEFORE MENU SHOWS UP. IN MILLISECONDS.\n\n self.keymap = {} #REGISTER KEPRESS CONSTANTLY\n self.keymap_singlepress = {} #REGISTER KEYPRESS ONE TIME\n self.events = {} #REGISTER EVENT\n\n #PYGAME INIT REQUIRED\n pygame.init()\n self.screen = pygame.display.set_mode(SCREEN_SIZE)\n pygame.display.set_caption(CAPTION)\n self.font = pygame.font.Font(\"fonts/04b.ttf\", 18)\n self.scorefont = pygame.font.Font(\"fonts/04b.ttf\", 42)\n self.keys = pygame.key.get_pressed()\n self.clock = pygame.time.Clock()\n\n self.register_eventhandler(pygame.QUIT, self.quit)\n self.register_key(pygame.K_ESCAPE, self.quit, singlepress = True)\n\n #self.menu = MainMenu(self)\n self.pregame_menu = False\n self.betweengame_menu = False\n self.aftergame_menu = False\n #Sound.sounds_init()\n #Sound.Sounds[\"menumusic\"].play()\n\n self.displaytime = False\n self.ammo = []\n\n self.maps = [\"grass\", \"sand\"]\n\n if self.debug:\n self.displaytime = True\n\n def run(self):\n \"\"\"The main game loop\"\"\"\n while True:\n\n \"\"\"-------------------------------MENU------------------------------------\"\"\"\n\n if self.state == S_MENU:\n self.menu.draw()\n\n self.keys = pygame.key.get_pressed()\n\n if self.state == S_MENU:\n for event in pygame.event.get():\n # Handle generic events\n for event_type, callback in self.events.items():\n if event.type == event_type:\n callback(event)\n\n # Handle keyboard events\n if event.type == pygame.KEYDOWN:\n for event_key in self.keymap_singlepress.keys():\n if event.key == event_key:\n self.keymap_singlepress[(event_key)](event)\n\n \"\"\"-------------------------------PREGAME MENU-----------------------------\"\"\"\n\n self.keys = pygame.key.get_pressed()\n\n if self.state == S_PREGAME:\n if self.pregame_menu == False:\n del(self.menu)\n self.menu = False\n self.pregame_menu = PreGameMenu(self)\n\n self.pregame_menu.draw()\n\n for event in pygame.event.get():\n # Handle generic events\n for event_type, callback in self.events.items():\n if event.type == event_type:\n callback(event)\n\n # Handle keyboard events\n if event.type == pygame.KEYDOWN:\n for event_key in self.keymap_singlepress.keys():\n if event.key == event_key:\n self.keymap_singlepress[(event_key)](event)\n\n \"\"\"-------------------------------GAME------------------------------------\"\"\"\n\n if self.state == S_GAME:\n if not self.paused:\n for event in pygame.event.get():\n for event_type, callback in self.events.items():\n if event.type == event_type:\n callback(event)\n\n if event.type == pygame.KEYDOWN:\n for event_key in self.keymap_singlepress.keys():\n if event.key == event_key:\n self.keymap_singlepress[(event_key)](event)\n\n for event_key in self.keymap.keys():\n if self.keys[event_key]:\n self.keymap[(event_key)]()\n\n else:\n pass\n\n self.map.draw()\n\n for bullet in self.ammo:\n bullet.update()\n bullet.draw()\n\n for player in self.agents:\n player.update()\n player.draw()\n\n for animation in Animation.List:\n animation.animate()\n animation.draw()\n\n if len(self.agents) < 2:\n if self.wait > 0:\n self.wait -= self.clock.get_time()\n else:\n self.wait = 1500\n if len(self.agents) == 1:\n self.stats.inform(self.agents[0].name, score = 1)\n logging.debug(self.stats.data)\n self.agents[0].dead = True\n else:\n print(\"draw\")\n if self.debug:\n print(str(self.stats.data[self.all_player_names[0]].get('score', 0)) + \" - \" + str(self.stats.data[self.all_player_names[1]].get('score', 0)))\n print('Distance: {}, Distance: {}'.format(self.stats.data[self.all_player_names[0]].get('move', '0'),\n self.stats.data[self.all_player_names[1]].get('move', '0')))\n print('Shots: {}, Shots: {}'.format(self.stats.data[self.all_player_names[0]].get('shots_fired', '0'),\n self.stats.data[self.all_player_names[1]].get('shots_fired', '0')))\n print('Stickybombs: {}, Stickybombs: {}'.format(self.stats.data[self.all_player_names[0]].get('stickybomb_fired', '0'),\n self.stats.data[self.all_player_names[1]].get('stickybomb_fired', '0')))\n\n self.agents = []\n self.ammo = []\n Animation.List = []\n\n self.betweengame_menu = False\n self.state = S_BETWEENGAME\n\n\n \"\"\"------------------------------BETWEEN-----------------------------------\"\"\"\n if self.state == S_BETWEENGAME:\n if self.betweengame_menu == False:\n del(self.menu)\n self.menu = False\n self.betweengame_menu = BetweenGameMenu(self)\n\n self.betweengame_menu.draw()\n\n self.keys = pygame.key.get_pressed()\n\n if self.state == S_BETWEENGAME:\n for event in pygame.event.get():\n for event_type, callback in self.events.items():\n if event.type == event_type:\n callback(event)\n\n\n if event.type == pygame.KEYDOWN:\n for event_key in self.keymap_singlepress.keys():\n if event.key == event_key:\n self.keymap_singlepress[(event_key)](event)\n\n \"\"\"-------------------------------AFTER---------------------------------------\"\"\"\n if self.state == S_AFTERGAME:\n if self.aftergame_menu == False:\n del(self.menu)\n self.menu = False\n self.aftergame_menu = AfterGameMenu(self)\n\n self.aftergame_menu.draw()\n\n self.keys = pygame.key.get_pressed()\n\n if self.state == S_AFTERGAME:\n for event in pygame.event.get():\n for event_type, callback in self.events.items():\n if event.type == event_type:\n callback(event)\n\n if event.type == pygame.KEYDOWN:\n for event_key in self.keymap_singlepress.keys():\n if event.key == event_key:\n self.keymap_singlepress[(event_key)](event)\n\n \"\"\"-------------------------------UPGRADES------------------------------------\"\"\"\n\n if self.state == S_UPGRADES:\n pygame.quit()\n sys.exit()\n\n \"\"\"-------------------------------ABOUT------------------------------------\"\"\"\n\n if self.state == S_ABOUT:\n self.menu = Menu()\n self.menu.about()\n\n \"\"\"-------------------------------SETTINGS------------------------------------\"\"\"\n\n if self.state == S_SETTINGS:\n self.menu = Menu()\n self.menu.settings()\n\n if self.displaytime:\n self.screen.blit(self.font.render(str(int(self.clock.get_fps())), True, (255,255,255)), (10,10))\n\n pygame.display.flip()\n self.clock.tick(self.fps)\n\n\n def quit(self, event):\n logging.info('Quitting!')\n pygame.quit()\n sys.exit()\n\n def start_game(self, map_type):\n self.agents = [Player(self, 'green', pygame.K_d, pygame.K_s, pygame.K_a, pygame.K_w, pygame.K_f, pygame.K_g, 100, 100, 180),\n Robot(self, 'purple', pygame.K_RIGHT, pygame.K_DOWN, pygame.K_LEFT, pygame.K_UP, pygame.K_k, pygame.K_l, 900, 600)]\n self.map = map.World(self, map_type)\n self.map.generate()\n self.stats = Stats(*self.agents)\n self.state = S_GAME\n\n def continue_game(self):\n self.agents = [Player(self, 'green', pygame.K_d, pygame.K_s, pygame.K_a, pygame.K_w, pygame.K_f, pygame.K_g, 100, 100),\n Player(self, 'purple', pygame.K_RIGHT, pygame.K_DOWN, pygame.K_LEFT, pygame.K_UP, pygame.K_k, pygame.K_l, 900, 600)]\n self.agents[0].name = self.all_player_names[0]\n self.agents[1].name = self.all_player_names[1]\n self.map = map.World(self, random.choice(self.maps))\n self.map.generate()\n self.state = S_GAME\n\n def start_pregame(self):\n self.pregame_menu = False # Make sure there's no old menu\n self.state = S_PREGAME\n\n def register_key(self, event_key, callback, singlepress = False):\n \"\"\"Binds keys to callback-functions\"\"\"\n if singlepress == False:\n self.keymap[(event_key)] = callback\n else:\n self.keymap_singlepress[(event_key)] = callback\n\n\n def register_eventhandler(self, event_type, callback):\n \"\"\"Binds events to callback-functions\"\"\"\n logging.debug('{}: Registering eventhandler ({}, {})'.format(self.__class__.__name__, event_type, callback))\n self.events[event_type] = callback\n\n def unregister_eventhandler(self, event_type, callback):\n value = self.events.get(event_type)\n if value is not None and value == callback:\n logging.debug('{}: Unregistering eventhandler ({}, {})'.format(self.__class__.__name__, event_type, callback))\n del(self.events[event_type])\n\nclass UnknownStatError(Exception):\n pass\n\nclass Stats():\n VALID_STATS = ('shots_fired', 'move', 'score', 'stickybomb_fired')\n\n def __init__(self, *args):\n self.data = {}\n\n for player in args:\n self.data[player] = {}\n\n\n def inform(self, player, **kwargs):\n # Increments stat with given number.\n #\n # Example:\n # inform(player, score = 7)\n #\n # Will increase score stat with 7.\n #\n\n if not (len(kwargs) == 1 and list(kwargs.keys())[0] in Stats.VALID_STATS):\n raise UnknownStatError('Unknown keyword argument to stat.')\n\n # Fetch the dictionary associated with player, create a new if it doesn't exist.\n stats = self.data.get(player)\n if stats == None:\n self.data[player] = {}\n stats = self.data[player]\n\n key, value = list(kwargs.keys())[0], list(kwargs.values())[0]\n\n # Register the stat\n stats[key] = value + stats.get(key, 0)\n"
},
{
"alpha_fraction": 0.5469411611557007,
"alphanum_fraction": 0.5581836700439453,
"avg_line_length": 29.990949630737305,
"blob_id": "5d94917f6b5a579c71cc3f7b0315a2f5f514cd9e",
"content_id": "83131f6c39366822e67e83231b6b7dfecb9edb0f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6849,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 221,
"path": "/wormwars/bots.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "from __future__ import print_function\nimport random\nimport consts\nimport itertools\nimport heapq\n\nclass GenericWormBot:\n def __init__(self, bot_id, initial_position=(0,0)):\n self.bot_id = bot_id\n\n x,y = initial_position\n self.body_parts = [{'x':x, 'y':y}]\n self.FAILED = False\n self.FAILURE_REASON = \"Hasn't Failed\"\n self.last_history = []\n\n\n @classmethod\n def new_instance(cls, bot_id, starting_position):\n bot_id = \"<{}>.{}\".format(cls.__name__, bot_id)\n return cls(bot_id, starting_position)\n\n def act(self, game, bots):\n t = self.think(game, bots)\n print(t)\n move_x, move_y = t\n new_head_x = self.body_parts[consts.HEAD]['x'] + move_x\n new_head_y = self.body_parts[consts.HEAD]['y'] + move_y\n\n self.body_parts.insert(consts.HEAD, {'x':new_head_x, 'y':new_head_y})\n\n def delete_tail(self):\n cell = self.body_parts[-1]\n del self.body_parts[-1]\n return cell\n\n def think(self, game, bots):\n raise NotImplementedError\n\n @property\n def head(self):\n return self.body_parts[consts.HEAD]\n\n def _single_collision(self, part):\n head = self.head\n return head['x'] == part['x'] and head['y'] == part['y']\n\n def _collision_helper(self, body):\n return any([self._single_collision(part) for part in body])\n\n def self_collision(self):\n return self._collision_helper(self.body_parts[1:])\n\n def other_collision(self, bots):\n for bot in bots:\n if bot.bot_id == self.bot_id:\n continue\n if self._collision_helper(bot.body_parts):\n return True\n return False\n\n def failed(self, reason):\n self.FAILED = True\n self.FAILURE_REASON = reason\n\n def bad_move(self, new_coords, game=None):\n if game:\n if new_coords[0] < 0 or new_coords[1] < 0:\n return True\n if new_coords[0] > game.right_edge or new_coords[1] > game.bottom_edge:\n return True\n if len(self.body_parts) > 1:\n for part in self.body_parts[1:]:\n if part['x'] == new_coords[0] and part['y'] == new_coords[1]:\n return True\n\n return False\n\nclass PriorityQueue:\n def __init__(self):\n self.items = []\n\n def push(self, item):\n heapq.heappush(self.items, item)\n\n def push_many(self, items):\n for item in items:\n self.push(item)\n\n def pop(self):\n return heapq.heappop(self.items)\n\n def not_empty(self):\n return len(self.items) > 0\n\nclass AwesomeBot(GenericWormBot):\n def calc_dist(self, coord1, coord2):\n ## coord1 = (x1,y1)\n ## coord2 = (x2,y2)\n xsq = (coord1[0]-coord2[0])**2\n ysq = (coord1[1]-coord2[1])**2\n return (xsq + ysq)**0.5\n\n def apply_move(self, move, xy):\n ## move = (-1,0)\n ## xy = (curx, cury)\n new_coord = (move[0]+xy[0], move[1]+xy[1])\n return new_coord\n\n\n def apply_moves(self, xy, foodxy, game):\n all_moves = []\n for move_name, move_value in consts.MOVES.items():\n new_coord = self.apply_move(move_value, xy)\n move_dist = self.calc_dist(new_coord, foodxy)\n if not self.bad_move(new_coord, game):\n all_moves.append((move_dist, new_coord, move_value))\n return all_moves\n\n def apply_moves_bare(self, xy, game):\n all_moves = []\n for move_name, move_value in consts.MOVES.items():\n new_coord = self.apply_move(move_value, xy)\n if not self.bad_move(new_coord, game):\n all_moves.append((new_coord, move_value))\n return all_moves\n\n def think(self, game, bots):\n n = len(self.last_history)\n # this version makes it crazy\n #if n > 0 and n % 20 != 0:\n if n > 0:\n return self.last_history.pop()\n head = self.body_parts[consts.HEAD]\n curxy = (head['x'], head['y'])\n move = astar(game, self)\n return move\n\ndef astar(game, bot):\n foodxy = (game.food['x'], game.food['y'])\n head = bot.body_parts[consts.HEAD]\n curxy = (head['x'], head['y'])\n starting_point = (0, 0,curxy, None, 0)\n\n frontier = PriorityQueue()\n frontier.push(starting_point)\n came_from = dict()\n graveyard = set()\n best_move = None\n\n\n while frontier.not_empty():\n astarval, move_dist, next_move, move_value, move_num = frontier.pop()\n if next_move == foodxy:\n best_move = (astarval, move_dist, next_move, move_value, move_num)\n break\n moves = bot.apply_moves(next_move, foodxy, game)\n for move in moves:\n move = move + (move_num+1, )\n move_coord = move[1]\n next_astarval = move[0] + move[-1]\n move = (next_astarval, ) + move\n if move_coord not in graveyard:\n came_from[move] = (astarval, move_dist, next_move, move_value, move_num)\n frontier.push(move)\n graveyard.add(move_coord)\n\n if best_move is None:\n return astar_dontdie(game, bot)\n\n\n bot.last_version = \"normal\"\n #astarval, move_dist, next_move, move_value, move_num = \\\n get_best(bot, best_move, came_from, pos=3)\n return bot.last_history.pop()\n\ndef astar_dontdie(game, bot):\n print('in dont die')\n head = bot.body_parts[consts.HEAD]\n curxy = (head['x'], head['y'])\n butt = bot.body_parts[-1]\n buttxy = (butt['x'], butt['y'])\n starting_point = (0, curxy, None)\n\n frontier = PriorityQueue()\n frontier.push(starting_point)\n came_from = dict()\n graveyard = set()\n best_move = None\n\n\n while frontier.not_empty():\n move_num, next_move, move_value = frontier.pop()\n moves = bot.apply_moves_bare(next_move, game)\n for move in moves:\n move = (move_num-1, move[0], move[1])\n move_coord = move[1]\n if move_coord not in graveyard:\n came_from[move] = (move_num, next_move, move_value)\n frontier.push(move)\n graveyard.add(move_coord)\n if best_move == None:\n print(\"this is none\")\n if len(came_from) == 0:\n print('we are screwed and dead')\n return list(consts.MOVES.values())[1]\n best_move = min(came_from.items(), key=lambda x: x[0][0])[1]\n\n bot.last_version = \"dontdie\"\n get_best(bot, best_move, came_from, -1, 2)\n return bot.last_history.pop()\n\ndef get_best(bot, best_move, came_from, mod=1, pos=1):\n bot.last_history = []\n justincase = 0\n print(pos)\n bot.last_history.append(best_move[pos])\n while mod*came_from[best_move][0] > 0 and justincase < 10**4:\n best_move = came_from[best_move]\n justincase += 1\n bot.last_history.append(best_move[pos])\n"
},
{
"alpha_fraction": 0.7985074520111084,
"alphanum_fraction": 0.7985074520111084,
"avg_line_length": 25.600000381469727,
"blob_id": "03461c32db20fdce0626c4bfcd5e3c33b93661c8",
"content_id": "7c1e2da6e5fe2d76d5bcc761a37810d995a4135e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 134,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 5,
"path": "/pacman/README.md",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "# README\n\nThese files come from ai.berkeley.edu\n\nI have modified them to suit the purposes of my particular educational environment. \n"
},
{
"alpha_fraction": 0.4442906677722931,
"alphanum_fraction": 0.4906574487686157,
"avg_line_length": 22.30645179748535,
"blob_id": "f3c967db5ed1dbb8965a618fd66070ca22568990",
"content_id": "00c49d62b43592fde8e7a92b39cdb39a17fe3cbf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1445,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 62,
"path": "/tictactoe/utils.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "import logging\n\nclass consts:\n \"\"\"\n SYNTACTIC SUGAR\n ===============\n \"\"\"\n\n EMPTY = \"-\"\n X = \"X\"\n O = \"O\"\n LOGGER = logging.getLogger(\"tictactoe\")\n # R G B\n WHITE = (255, 255, 255)\n BLACK = ( 0, 0, 0)\n RED = (255, 0, 0)\n GREEN = ( 0, 255, 0)\n DARKGREEN = ( 0, 155, 0)\n BLUE = ( 0, 0, 255)\n DARKBLUE = ( 0, 0, 155)\n PURPLE = (200, 0, 200)\n DARKPURPLE = (115, 0, 115)\n DARKGRAY = ( 40, 40, 40)\n BGCOLOR = WHITE\n\n \"\"\"\n PYGAME SETTINGS\n ===============\n \"\"\"\n\n USE_PYGAME = False\n WINDOWWIDTH = 500\n WINDOWHEIGHT = 300\n FPS = 5\n\n \"\"\"\n GAME SETTINGS\n =============\n \"\"\"\n\n # this will randomize the order, so that X doesn't always start first\n MAKE_IT_FAIR = False\n\ndef pretty_board(spaces):\n return \"\\n\".join(\"|\".join(row) for row in spaces)\n\ndef debug(use_console=True, filename=None):\n if use_console:\n ch = logging.StreamHandler()\n else:\n assert filename is not None\n ch = logging.FileHandler(filename)\n\n ch.setLevel(logging.DEBUG)\n consts.LOGGER.addHandler(ch)\n consts.LOGGER.setLevel(logging.DEBUG)\n\ndef other_player(player):\n return consts.X if player == consts.O else consts.O\n\ndef all_same(vec):\n return vec[0] == vec[1] == vec[2] != consts.EMPTY\n"
},
{
"alpha_fraction": 0.5149760246276855,
"alphanum_fraction": 0.5279552936553955,
"avg_line_length": 28.45294189453125,
"blob_id": "8276de4c6b3d4efab8d357fae8b87ae7ef266a5f",
"content_id": "32d941b3da5bcfaea4ea5663a996693c3ec8c817",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5008,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 170,
"path": "/tictactoe/main.py",
"repo_name": "edgehub-cloud/AI-games",
"src_encoding": "UTF-8",
"text": "\nimport sys\nimport bots\nimport visualizers\nimport utils\nfrom utils import consts\n\nclass TicTacToe:\n def __init__(self, p1, p2):\n self.game = None\n self.p1 = p1\n self.p2 = p2\n\n def run(self, num_games=100):\n #### set everything up\n stats = {'draws': 0, 'total_games': num_games}\n stats[self.p1] = {'wins': 0, 'losses': 0}\n stats[self.p2] = {'wins': 0, 'losses': 0}\n consts.LOGGER.debug(\"Game initialized to run {} times\".format(num_games))\n\n\n self.game = GameBoard.NewBoard()\n self.game.turn_on_screen()\n\n for game_index in range(num_games):\n consts.LOGGER.debug(\"New game beginning\")\n\n\n first = self.p1\n other = self.p2\n\n if consts.MAKE_IT_FAIR:\n if game_index % 2 == 1:\n first = self.p2\n other = self.p1\n\n #### this the actual game loop\n while self.game.no_winner():\n first.move(self.game)\n first, other = other, first\n\n #### game has ended\n if not self.game.draw():\n consts.LOGGER.debug(\"{} has won\".format(self.game.winner))\n stats[self.game.winner]['wins'] += 1\n stats[self.game.loser]['losses'] += 1\n else:\n consts.LOGGER.debug(\"Game is a graw\")\n stats['draws']+=1\n\n #### start new game\n self.game = GameBoard.NewBoard(screen=self.game.screen)\n\n if game_index % 100 == 0 and consts.USE_PYGAME:\n print(self.pretty_stats(stats))\n\n #### done running simulations\n print(self.pretty_stats(stats))\n\n\n def pretty_stats(self, stats):\n pretty_string = \"\"\"\n After {} games, the results are:\n {} draws\n {} has won {} times\n {} has won {} times\n \"\"\".format(stats['total_games'], stats['draws'],\n consts.X, stats[consts.X]['wins'],\n consts.O, stats[consts.O]['wins'])\n return pretty_string\n\nclass GameBoard:\n def __init__(self, spaces):\n self.spaces = spaces\n self.screen = None\n self.winner = None\n self.loser = None\n\n def turn_on_screen(self):\n if consts.USE_PYGAME:\n self.screen = visualizers.PygameScreen()\n\n @classmethod\n def NewBoard(cls, spaces = None, screen=None):\n \"\"\" reset the board.\n\n This uses row-major indexing.\n In other words, the first index is row, the second is column.\n \"\"\"\n\n spaces = spaces or [[consts.EMPTY for _ in range(3)] for _ in range(3)]\n board = cls(spaces)\n if screen:\n board.screen = screen\n screen.reset()\n return board\n\n\n\n def update(self, move, player):\n i, j = move\n self.spaces[i][j] = player.symbol\n if self.screen:\n self.screen.draw(move, player)\n\n\n\n def draw(self):\n game_over = all([cell!=consts.EMPTY for row in self.spaces for cell in row])\n no_winner = self.winner is None\n return game_over and no_winner\n\n def set_winner(self, winner):\n self.winner = winner\n self.loser = utils.other_player(self.winner)\n\n def vert_winner(self):\n spaces_by_col = [[self.spaces[i][j] for i in range(3)] for j in range(3)]\n for col in spaces_by_col:\n if utils.all_same(col):\n self.set_winner(col[0])\n return True\n return False\n\n def row_winner(self):\n for row in self.spaces:\n if utils.all_same(row):\n self.set_winner(row[0])\n return True\n return False\n\n def diag_winner(self):\n diag1 = [(0,0), (1,1), (2,2)]\n diag2 = [(0,2), (1,1), (2,0)]\n diags = [diag1, diag2]\n for diag in diags:\n if utils.all_same([self.spaces[i][j] for i,j in diag]):\n self.set_winner(self.spaces[1][1])\n return True\n return False\n\n def no_winner(self):\n if self.vert_winner() or self.row_winner() or self.diag_winner():\n consts.LOGGER.debug(\"Found a winner\")\n consts.LOGGER.debug(utils.pretty_board(self.spaces))\n return False\n elif self.draw():\n consts.LOGGER.debug(\"Draw game\")\n return False\n consts.LOGGER.debug(\"No winner yet\")\n consts.LOGGER.debug(utils.pretty_board(self.spaces))\n return True\n\n def hypothetical(self, move, player):\n new_spaces = deepcopy(self.spaces)\n newgame = GameBoard.NewBoard(new_spaces)\n newgame.update(move, player)\n return newgame\n\n\ndef test():\n p1 = bots.RandomBot(consts.X)\n p2 = bots.RandomBot(consts.O)\n game = TicTacToe(p1,p2)\n game.run(100000)\n\nif __name__ == \"__main__\":\n #utils.debug()\n if len(sys.argv) > 1:\n consts.FPS = int(sys.argv[1])\n test()\n"
}
] | 21 |
revolc/euler | https://github.com/revolc/euler | 901a70ad85458cf9da0c832bff3581dfe6d6deb0 | 1afebe4852f39a83cbb77d79814bbd941e40d280 | 62d23376158246693f4ddc3a275edfd225847429 | refs/heads/master | 2021-01-01T18:01:51.719680 | 2012-12-13T12:05:01 | 2012-12-13T12:05:01 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5417867302894592,
"alphanum_fraction": 0.5878962278366089,
"avg_line_length": 20.6875,
"blob_id": "0c8339dbb7ddbd9a0fd3cce0759415129d0e67f2",
"content_id": "6c18b49bf51f3d82e7d0eb72e73906a11494c300",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 347,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 16,
"path": "/problem4.py",
"repo_name": "revolc/euler",
"src_encoding": "UTF-8",
"text": "def is_palindromic(str):\n\tfor i in range(0, len(str)/2):\n\t\tif (str[i] != str[len(str) - 1 - i]):\n\t\t\treturn False\n\treturn True\n\ndef solution():\n\tresult=0\n\tfor i in range (100, 1000):\n\t\tfor j in range (i, 1000):\n\t\t\ttmp = i * j\n\t\t\tif is_palindromic(repr(tmp)):\n\t\t\t\tif result == 0 or tmp > result:\n\t\t\t\t\t#print i,j, tmp\n\t\t\t\t\tresult = i*j\n\tprint result\n"
},
{
"alpha_fraction": 0.49235475063323975,
"alphanum_fraction": 0.5321100950241089,
"avg_line_length": 13.217391014099121,
"blob_id": "49a7ad67bf9c17123fa1aafe05df4335e59ca005",
"content_id": "0a03496a09eb7aa4ec8a0cc5000b47f26ca03239",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 327,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 23,
"path": "/problem7.py",
"repo_name": "revolc/euler",
"src_encoding": "UTF-8",
"text": "import math\ndef is_prime(n):\n\tif (n == 2):\n\t\treturn True\n\ti = 2\n\twhile i <= math.sqrt(n):\n\t\tif n % i == 0:\n\t\t\treturn False\n\t\ti += 1\n\treturn True\n\ndef solve():\n\torder = 0\n\tnum = 2\n\tN = 10001\n\twhile order < N:\n\t\t#print num, order\n\t\tif is_prime(num):\n\t\t\torder += 1\n\t\t\tif (order == N):\n\t\t\t\tprint N,\"th: \", num\n\t\t\t\tbreak\n\t\tnum += 1\n"
},
{
"alpha_fraction": 0.7894737124443054,
"alphanum_fraction": 0.7894737124443054,
"avg_line_length": 55,
"blob_id": "1fe3d99b7a7e49af3cda2ec02367f0b444f2b775",
"content_id": "d65e46dfb063b6a7099e08436b9cf851d48001db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 57,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 1,
"path": "/README.md",
"repo_name": "revolc/euler",
"src_encoding": "UTF-8",
"text": "\nThis is my personal repo for problems in Project Euler.\n"
}
] | 3 |
gminator/weather | https://github.com/gminator/weather | 2af41b4c4232d3554e31092a0bcf8a83d640b6cc | 3ffaf5b0a171de93b4e6d1f894a9fc0a20e5ed7a | 491ad9ae1d27499518de2850748cbe0840d9fb1f | refs/heads/master | 2023-01-01T03:52:22.890713 | 2020-10-23T06:14:56 | 2020-10-23T06:14:56 | 295,025,053 | 0 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.6316070556640625,
"alphanum_fraction": 0.6406834125518799,
"avg_line_length": 22.566038131713867,
"blob_id": "ffc61e8358f3392918033d0d1638a852f38c686b",
"content_id": "9acfb7f925b4a4dea4057084663ff8d6ce9ae0d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3746,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 159,
"path": "/openweather/models.py",
"repo_name": "gminator/weather",
"src_encoding": "UTF-8",
"text": "from django.db import models\nimport requests\nfrom datetime import datetime, timedelta\nimport math\n\nclass Day(object): \n\n\tdef unit(self,temp):\n\t\treturn round({\n\t\t\t\"c\" : temp-273.15,\n\t\t\t\"k\" : temp,\n\t\t\t\"f\" : (((temp-274.15)/5) * 9) + 32,\n\t\t}[self.units], 2)\n\n\tdef __init__(self, **kwargs):\n\t\tself.lat = kwargs[\"lat\"] if \"lat\" in kwargs else None\n\t\tself.lng = kwargs[\"lon\"] if \"lon\" in kwargs else None\n\t\tself.tz = kwargs[\"timezone\"] if \"timezone\" in kwargs else None\n\t\tself.hourly = kwargs[\"hourly\"] if \"hourly\" in kwargs else []\n\t\tself.units = kwargs[\"units\"] if \"units\" in kwargs else \"c\"\n\n\n\t\tself.tmps = [self.unit(hour[\"temp\"]) if \"temp\" in hour else None for hour in kwargs[\"hourly\"]]\n\t\tself.humids = [hour[\"humidity\"] if \"humidity\" in hour else None for hour in kwargs[\"hourly\"]]\n\t\tself.winds = [round(hour[\"wind_speed\"] /1.60934,2) if \"wind_speed\" in hour else None for hour in kwargs[\"hourly\"]]\n\n\t\n\t@property\n\tdef median_humidity(self,):\n\t\treturn self.median(self.humids)\n\n\t@property\n\tdef min_humidity(self,):\n\t\treturn self.min(self.humids)\n\n\t@property\n\tdef max_humidity(self,):\n\t\treturn self.max(self.humids)\n\n\n\t@property\n\tdef avg_humidity(self,):\n\t\treturn self.average(self.humids)\n\n\t@property\n\tdef avg_tmp(self,):\n\t\treturn self.average(self.tmps)\n\n\t@property\n\tdef median_tmp(self,):\n\t\treturn self.median(self.tmps)\n\n\t@property\n\tdef min_tmp(self,):\n\t\treturn self.min(self.tmps)\n\n\t@property\n\tdef max_tmp(self,):\n\t\treturn self.max(self.tmps)\n\n\tdef min(self,data):\n\t\treturn min(data)\n\n\tdef max(self,data):\n\t\treturn max(data)\n\n\tdef median(self,data):\n\t\tdata.sort()\n\t\tl = len(data) \n\t\t#Event Numbers\n\t\tif l % 2 == 0:\n\t\t\tf = int((l/2) - 1)\n\t\t\treturn round((data[f] + data[f + 1])/2,2)\n\n\t\ti = l//2\n\t\treturn data[i]\n\n\tdef average(sefl,data):\n\t\treturn round(sum(data)/len(data),2)\n\n\tdef serialize(self,):\n\t\treturn {\n\t\t\t\"temp\" : {\n\t\t\t\t\"min\" : self.min_tmp,\n\t\t\t\t\"max\" : self.max_tmp,\n\t\t\t\t\"median\" : self.median_tmp,\n\t\t\t\t\"avg\" : self.avg_tmp,\n\t\t\t},\n\t\t\t\"humidity\" : {\n\t\t\t\t\"min\" : self.min_humidity,\n\t\t\t\t\"max\" : self.max_humidity,\n\t\t\t\t\"median\" : self.median_humidity,\n\t\t\t\t\"avg\" : self.avg_humidity,\n\t\t\t},\n\t\t\t\"graph\" : self.graph()\n\t\t}\n\n\tdef graph(self,):\n\t\tdata = {}\n\t\tfor row in self.hourly:\n\t\t\tkey = datetime.fromtimestamp(row[\"dt\"]).strftime(\"%Y-%m-%d %H:%M\")\n\t\t\tif key not in data:\n\t\t\t\tdata[key] = [self.unit(row[\"temp\"]), row[\"humidity\"]]\n\t\tgraph = [[\"Date\", \"Temp\", \"Humidity\"]]\n\n\t\tfor date,values in data.items():\n\t\t\tgraph.append([date] + values) \n\t\treturn graph\n\n\n\n# Create your models here.\nclass OpenWeather(object):\n\tdef __init__(self,):\n\t\tself.uri = \"https://community-open-weather-map.p.rapidapi.com/\"\n\t\tself.key = \"06eafc15dbmsh348f712812a3bf8p136ec0jsn8b48402aa070\"\n\n\tdef headers(self,):\n\t\treturn {\n\t\t\t#\"x-rapidapi-host\": \"community-open-weather-map.p.rapidapi.com\",\n\t\t\t\"x-rapidapi-key\": self.key,\n\t\t\t#\"useQueryString\": 'true'\n\t\t}\n\n\tdef weather_on(self,**kwargs): \n\t\t\"\"\"\n\t\tPast Data\n\t\tGet pass Weather Data \n\t\tUp to 5 Days\n\n\t\t@param dt int Unix Time Stamp \n\t\t\"\"\"\n\t\tcurrent_date = datetime.now()\n\n\t\tif \"stub\" in kwargs:\n\t\t\tcurrent_date = kwargs[\"stub\"] \n\n\t\tlimit = current_date - timedelta(days=5) \n\t\tif kwargs[\"dt\"] < limit.timestamp():\n\t\t\traise BadDateException(\"Your date exceeds the the 5 day limit\")\n\t\t\t\n\t\tif \"dt\" not in kwargs or kwargs[\"dt\"] == None: raise BadRequest(\"Please set a time\")\n\t\tif \"lat\" not in kwargs or kwargs[\"lat\"] == None: raise BadRequest(\"Please set a lattitude\")\n\t\tif \"lon\" not in kwargs or kwargs[\"lon\"] == None: raise BadRequest(\"Please set a longtitude\")\n\n\t\t\n\n\t\t#raise Exception(kwargs)\n\t\tresponse = requests.request(\"GET\", self.uri + \"onecall/timemachine\", \n\t\t\t\t\t\t\t\t\theaders=self.headers(), \n\t\t\t\t\t\t\t\t\tparams=kwargs)\n\n\t\treturn Day(units=kwargs[\"units\"], **response.json())\n\n\n\n\nclass BadDateException(Exception): pass\nclass BadRequest(Exception): pass"
},
{
"alpha_fraction": 0.728122889995575,
"alphanum_fraction": 0.7501670122146606,
"avg_line_length": 34.595237731933594,
"blob_id": "f6c74e3b1cb0f91cee083044a34b82bb08325ade",
"content_id": "23ba049d6a8cf408c439c61ab8ec726d011080be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1499,
"license_type": "no_license",
"max_line_length": 279,
"num_lines": 42,
"path": "/README.md",
"repo_name": "gminator/weather",
"src_encoding": "UTF-8",
"text": "# Weather App\nA basic weather app that returns Min, Max, Median, and Average for a Given Day (Up to 5 Days In The Past). \n\nIt backs off the OpenWeatherMaps API and makes use of Python3 with Django & Django-rest-framework. \n\nI did not make use of any of the Auth Backends on the FrontEnd or API for ease of use.\n\n# Front End \n\nIt makes use of Vali Admin for the Front-end Libs, this package gives out of the box bootstrap and mobile responsiveness support. The application is hosted on an EC2 instance running Gunicorn and Nginx. It has Cloudflare in front of it for caching, SSL, and additional security. \n\nThe front end can be found at: \n\n```\nhttps://weather.statnav.co.za/weather/\n```\n\n# Rest API \n\nThe API Can be found here, I did not make use of the serializers and ModelViewSets since I’m not writing anything into a DB.\n\nTha API Accepts a date (up to 5 days in the past), a Lat, Lng, and a unit of measurement for the Temperature. \n\nK (Kelvin)\nC (Celsius)\nF (Fahrenheit)\n\n```\ncurl -X GET \\\n 'https://weather.statnav.co.za/api/weather/?unit=k&date=2020%2F09%2F12&lat=-33.8767921&lng=18.5311788'\n ```\n \nThis API is also consumed by the FrontEnd, it will return the ranges as well data suitable for plotting on Google Charts. \n\n# Unit Tests\n\nThe application supports unit tests for basic scenarios for the calculation of Min, Max, Median & Average. \nIt also tests the API Integration through the use of python Mock Framework \n\n# Hosting\n\nHosted on EC2, caching & ssl fullfiled by Cloudflare \n\n"
},
{
"alpha_fraction": 0.717825710773468,
"alphanum_fraction": 0.7250199913978577,
"avg_line_length": 45.14814758300781,
"blob_id": "46b94738b7ef3e05b40d5f8f2bb1b44054979f12",
"content_id": "9ed375b51346503edc7de0f7641cac8d1bbdf001",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1251,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 27,
"path": "/openweather/api.py",
"repo_name": "gminator/weather",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication \nfrom rest_framework import viewsets\nfrom rest_framework.permissions import IsAuthenticated\nfrom rest_framework.response import Response\nfrom openweather.models import OpenWeather, BadRequest, BadDateException\nfrom datetime import datetime\nfrom rest_framework import status\n\nclass WeatherViewSet(viewsets.ViewSet): \n def list(self, request):\n\n \tweather = OpenWeather()\n \ttry: \n \t\tday = weather.weather_on(\n\t\t\t\tdt=int(datetime.strptime(request.GET[\"date\"], \"%Y/%m/%d\").timestamp()) if \"date\" in request.GET else None, \n\t\t\t\tlat=request.GET[\"lat\"] if \"lat\" in request.GET else None,\n\t \t\tlon=request.GET[\"lng\"] if \"lng\" in request.GET else None,\n\t \t\tunits=request.GET[\"unit\"] if \"unit\" in request.GET else \"c\"\n\t\t\t)\n \texcept BadRequest as e:\n \t\treturn Response({\"error\" : str(e)}, status=status.HTTP_400_BAD_REQUEST) \n \texcept BadDateException as e:\n \t\treturn Response({\"error\" : str(e)}, status=status.HTTP_400_BAD_REQUEST)\n \texcept Exception as e:\n \t\treturn Response({\"error\" : str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)\n \treturn Response(day.serialize())\n \t"
},
{
"alpha_fraction": 0.821052610874176,
"alphanum_fraction": 0.821052610874176,
"avg_line_length": 36.79999923706055,
"blob_id": "6b3d4fbc60ee796cedc5059b32239b69900aec24",
"content_id": "d3f7b1c71726ee5bbb4c50a2b754c7a5e13e6618",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 5,
"path": "/openweather/views.py",
"repo_name": "gminator/weather",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.views.generic import TemplateView, ListView,ListView, DetailView,View \n\nclass OpenWeatherView(TemplateView):\n\ttemplate_name = \"weather.html\"\n\n"
},
{
"alpha_fraction": 0.5029729604721069,
"alphanum_fraction": 0.6229729652404785,
"avg_line_length": 31.752212524414062,
"blob_id": "5a7018afadcb819662c23837008faf6a20549066",
"content_id": "500852dda2cd1f1cf8f4f533fe02d279684ac724",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3700,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 113,
"path": "/openweather/tests.py",
"repo_name": "gminator/weather",
"src_encoding": "UTF-8",
"text": "from django.test import TestCase\nfrom openweather.models import OpenWeather,BadDateException, Day\nfrom unittest.mock import patch\nfrom datetime import datetime\n\n# Create your tests here.\nclass OpenWeatherTests(TestCase):\n\n\tdef test_test_median_humidity(self,):\n\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t]) \n\t\tself.assertEquals(day.median_humidity, 50) \n\n\tdef test_test_max_humidity(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t])\n\n\t\tself.assertEquals(day.max_humidity, 100) \n\n\tdef test_test_min_humidity(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t])\n\n\t\tself.assertEquals(day.min_humidity, 10) \n\n\n\tdef test_day_average(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802},\n\t\t\t {\"temp\": 277.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t]) \n\t\tself.assertEquals(day.median_tmp, 2.5) \n\n\tdef test_test_median_tmp_no_middle(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802},\n\t\t\t {\"temp\": 277.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t]) \n\t\tself.assertEquals(day.median_tmp, 2.5) \n\n\tdef test_test_median_tmp(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t]) \n\t\tself.assertEquals(day.median_tmp, 2.0) \n\n\tdef test_test_max_tmp(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t])\n\n\t\tself.assertEquals(day.max_tmp, 3.0) \n\n\tdef test_test_min_tmp(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t])\n\n\t\tself.assertEquals(day.min_tmp, 1.0) \n\n\tdef test_celius_constructor(self,):\n\t\tday = Day(hourly=[\n\t\t\t{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}, \n\t\t\t{\"temp\": 275.15,\"humidity\": 50, \"wind_speed\" : 3.21868},\n\t\t\t {\"temp\": 276.15, \"humidity\": 100, \"wind_speed\": 4.82802}\n\t\t])\n\t\tself.assertEquals(day.tmps, [1.0,2.0,3.0]) \n\t\tself.assertEquals(day.humids, [10,50,100]) \n\t\tself.assertEquals(day.winds, [1,2,3]) \n\n\tdef test_past_days_invalid_date(self,):\n\t\tweather = OpenWeather() \n\t\twith self.assertRaises(BadDateException) as context:\n\t\t\tweather.weather_on(units=\"c\",lat=0, lon=0,dt=123453)\t\n\n\tdef test_past_days(self,):\n\t\tnow = datetime.now()\n\t\tweather = OpenWeather() \n\t\t\n\t\tdef api_response(*args, **kwargs):\n\t\t\treturn FakeResponse({\"hourly\" : [{\"temp\": 274.15,\"humidity\": 10, \"wind_speed\" : 1.60934}]})\n\n\t\twith patch('requests.request', api_response): \n\t\t\tday = weather.weather_on(units=\"c\",lat=0, lon=0,dt=now.timestamp(), stub=now)\n\t\t\tself.assertIsInstance(day, Day)\n\n\t\t\nclass FakeResponse(object):\n\tdef __init__(self,json):\n\t\tself.data = json\n\n\tdef json(self,):\n\t\treturn self.data"
}
] | 5 |
coding-potato/gosurf | https://github.com/coding-potato/gosurf | 8c2cffae326c31dddc3f03807faf4d518d3db887 | 9539636a21d8dfb3a2192f3b3719c4caa04cc229 | 1857550660d134fc83b740661f44779c911d204e | refs/heads/master | 2022-10-18T01:36:11.767705 | 2019-08-03T18:12:10 | 2019-08-03T18:12:10 | 270,362,819 | 0 | 0 | null | 2020-06-07T16:20:29 | 2019-08-03T18:13:42 | 2019-08-26T05:02:09 | null | [
{
"alpha_fraction": 0.5440940856933594,
"alphanum_fraction": 0.555852472782135,
"avg_line_length": 27.78461456298828,
"blob_id": "87017952a6d8acd91646ed6534a023f675275652",
"content_id": "4e8dc66def7935079ce75d7929016e8e4936e507",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1871,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 65,
"path": "/gosurf/app.py",
"repo_name": "coding-potato/gosurf",
"src_encoding": "UTF-8",
"text": "import json\nfrom datetime import datetime\n\nimport grequests\nimport pandas as pd\nfrom flask import Flask, render_template\n\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef home():\n days, spots = surfline_api(days=3)\n return render_template('index.html', days=days, spots=spots)\n\n\ndef surfline_api(days=3):\n condition_to_num = {\n None: 0,\n 'FLAT': 1,\n 'VERY_POOR': 2,\n 'POOR': 3,\n 'POOR_TO_FAIR': 4,\n 'FAIR': 5,\n 'FAIR_TO_GOOD': 6,\n 'GOOD': 7,\n 'VERY_GOOD': 8,\n 'GOOD_TO_EPIC': 9,\n 'EPIC': 10\n }\n url = \"http://services.surfline.com/kbyg/spots/forecasts/conditions?spotId={}&days={}\"\n\n locations = pd.read_csv('static/locations.csv')\n\n # Surfline API\n rs = (grequests.get(url.format(_id, days)) for _id in locations._id)\n results = grequests.map(rs)\n\n forecast_list = []\n for _id, result in zip(locations._id, results):\n j = json.loads(result.text)['data']['conditions']\n for day in j:\n dt = datetime.fromtimestamp(day['timestamp'])\n am, pm = day['am']['rating'], day['pm']['rating']\n score = (condition_to_num[am] + condition_to_num[pm]) / 2 * 10\n forecast_list.append({'_id': _id, 'date': dt, 'score': int(score)})\n\n forecasts = pd.DataFrame(forecast_list)\n forecasts = forecasts.pivot_table(index='_id', values='score',\n columns='date')\n\n days = [dt.strftime('%a %-m/%-d') for dt in forecasts.columns]\n forecasts = (forecasts.apply(lambda x: list(x), axis=1)\n .rename('scores')\n .reset_index())\n\n spots = pd.merge(locations, forecasts, on='_id').drop(columns='_id')\n spot_dict = spots.to_dict('records')\n\n return days, spot_dict\n\n\nif __name__ == '__main__':\n app.run(debug=True, host='0.0.0.0')\n"
}
] | 1 |
rudsonramon/machine_learning_udacity_ud120-projects | https://github.com/rudsonramon/machine_learning_udacity_ud120-projects | b9217c19bb10fcccf1f98da8d95afe3f32f8212e | b3254e3b053c84283a779005d3dcc4f84bfae4b5 | f3f4a1b325b52f991a78cfd447db2bb1c3147bf7 | refs/heads/master | 2021-03-31T01:10:42.025373 | 2018-03-18T23:34:04 | 2018-03-18T23:34:04 | 125,122,666 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6136674880981445,
"alphanum_fraction": 0.620406448841095,
"avg_line_length": 33.409420013427734,
"blob_id": "12c0dff132d24fd7c7296d22c2c9f263faaf71ad",
"content_id": "a36bac434678372d1af6a28d9d321056ae070a08",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9507,
"license_type": "permissive",
"max_line_length": 122,
"num_lines": 276,
"path": "/datasets_questions/explore_enron_data.py",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n\"\"\" \n Starter code for exploring the Enron dataset (emails + finances);\n loads up the dataset (pickled dict of dicts).\n\n The dataset has the form:\n enron_data[\"LASTNAME FIRSTNAME MIDDLEINITIAL\"] = { features_dict }\n\n {features_dict} is a dictionary of features associated with that person.\n You should explore features_dict as part of the mini-project,\n but here's an example to get you started:\n\n enron_data[\"SKILLING JEFFREY K\"][\"bonus\"] = 5600000\n \n\"\"\"\nimport pickle\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.patches as mpatches\n\n#enron_data = pickle.load(open('../final_project/final_project_dataset_unix.pkl', 'rb'))\n\ndef open_dataset(url, param_permission='rb'):\n \"\"\"\n open the file that will be used in this project.\n \"\"\"\n enron_data = pickle.load(open(url, param_permission))\n return enron_data\n\ndef extract_data(data_file):\n \"\"\"\n TRANSFORM A PICKLE FILE IN THE PANDAS DATA FRAME\n \"\"\"\n df = pd.DataFrame.from_dict(data_file)\n # print(df)\n # df.info()\n # df.head()\n return df\n\n## OPEN THE FILE\n###### USING A PIVOT WAY TO TRY GETTING BETTER VIEW\nenron_data = open_dataset('../final_project/final_project_dataset_unix.pkl', 'rb')\n\ndf = extract_data(enron_data)\n\n\n# df = enron_data.iloc[:5 , :3]\n\n# stacked = df.stack()\n\n# print('stacked: ' + '\\r\\n', stacked)\n## unstacked = stacked.unstack()\n## print('unstacked: ' + '\\r\\n', unstacked)\n\nenron_df = pd.DataFrame.from_dict(df)\n\n###XXX: USING PANDAS DATA FRAME WITH ILOC TO ORGANIZE THE DATASET\n# enron_df.fillna(0, inplace=True) ## fill the NAN values with zero '0'\n# enron_df['BADUM JAMES P'] = enron_df['BADUM JAMES P'].fillna(0, inplace=True)\n# enron_df.fillna(0)\n\nemployee = {'ALLEN PHILLIP K': 0} ## EMPLOYEE_NAME replaces the NAN values for zero.\nenron_print = enron_df.fillna(value=employee)\nenron_print = enron_print.filter(items=['ALLEN PHILLIP K', 'BADUM JAMES P']) ## filter columns \nenron_print = enron_print.filter(like='salary', axis=0) ## filter rows\n\n# print(enron_print)\n# enron_print['BADUM JAMES P'].replace('nan', '')\n# enron_print['Max'] = df[['ALLEN PHILLIP K','BADUM JAMES P']].idxmax(axis=1)\n\nenron_df = enron_df.iloc[16:17,:]\n# print(enron_print)\n# print('\\n\\r', 'print(enron_df.describe())==>>', enron_df.describe())\n# print('\\n')\n# print('\\n\\r', 'print(enron_print.describe())==>>', enron_print.describe())\nprint(enron_df)\nenron_df.info()\n\nenron_stacked = enron_df.stack()\n# print('enron_stacked==>>', enron_stacked)\nprint(enron_stacked)\n# enron_stacked['salary'].astype(int)\nold_label = 0\nold_label = int(old_label)\n\nfor key, label in enron_stacked.items():\n if label != np.nan:\n if int(label) > int(old_label):\n big_salary = label\n print(big_salary)\n pass\n else:\n pass\n\n old_label = int(label)\n print('key==>>', key, 'label==>>', label)\n\n pass\n\n\n\"\"\"\nenron_df = df.iloc[[0], [0, 1]]\n\nfor key, label in enron_df.items():\n print('key ==>> ', key, 'label ==>> ', label)\n\n\nred_patch = mpatches.Patch(color='red', label=key)\nplt.legend(handles=[red_patch])\n## subplot(211) ==>> DEFINE THE UP POSITION OF THE PLOT\n# plt.subplot(211)\n## label=\"Line 1\", linestyle='--' DEFINE THE TYPE OF THE LINE\n# .plot(kind='bar')\nline1 = plt.plot(enron_df, linestyle='--', label=key) ## , marker='o', linestyle='--'\n\nplt.show()\n\nprint('iloc: ' + '\\r\\n', enron_df)\n\"\"\"\n\n ############# ALTERNATIVE SOLUTION TO SEE THE RESULTS\n # for feature, label in data_file.items():\n # # print(enron_data[\"SKILLING JEFFREY K\"])\n # # print(enron_data[label])\n # # return data_file[label][\"email_address\"]\n # # print('feature ==>> ', feature, 'label ==>> ', label)\n \n # print('feature ==>> ', feature, 'df ==>>', df)\n # for item_label, item_description in label.items():\n # return print('item_label ==>> ', item_label, 'item_description ==>> ', item_description)\n ######################################################\n\n# Start processing the data\n\n# RECEIVE THE CONTENT OF THE FILE FROM THE METHOD ==>> open_dataset(url, param_permission='rb')\n\n##################################################################\n##################################################################\n##################################################################\n# enron_data = open_dataset('../final_project/final_project_dataset_unix.pkl', 'rb')\n## from final_project.poi_email_addresses import *\n#def open_text_file(url, param_permission):\n# fp = open(url, param_permission)\n# return fp\n## How many data points (people) are in the dataset?\n#len(enron_data)\n#len(enron_data.keys())\n## print('len(enron_data)==>>', len(enron_data))\n## print('len(enron_data.keys())==>>', len(enron_data.keys()))\n\n\n## How Many POIs Exist?\n#url = 'C:/Users/rudson.r.rodrigues/PythonProjects/Udacity/Intro Machine Learn/ud120-projects/final_project/poi_names.txt'\n#poi_text = url\n#poi_names = open(poi_text, 'r')\n#fr = poi_names.readlines()\n#len(fr[2:])\n#poi_names.close()\n\n## print poi_names.read()\n\n#num_lines = sum(1 for line in open(poi_text))\n\n# def file_len(fname):\n # with open(fname) as f:\n # for i, l in enumerate(f):\n # pass\n # return i + 1\n\n# file_len(url)\n# print('file_len(url)==>>', file_len(url))\n\n\n## poi_names = open_dataset('../final_project/poi_names.txt', 'rb')\n## poi_mail_list = poiEmails()\n## print('poi_names==>', poi_names)\n## print('poi_e-mail_list==>', poi_mail_list)\n## EXTRACT ALL THE DATA FROM THE METHOD ==>> extract_data(enron_data)\n## print('enron_data ==>>', enron_data[\"SKILLING JEFFREY K\"][\"bonus\"])\n\n## feature_train, label_train = extract_data(enron_data)\n#label_train_df = extract_data(enron_data)\n## print('FILE CONTENT ==>> ', extract_data(enron_data))\n## print('feature_train ==>> ', feature_train, 'label_train==>> ', label_train)\n\n## label_train.info()\n## label_train.pivot_table(index='ALLEN PHILLIP K', columns='ALLEN PHILLIP K', values='deferred_income', aggfunc='mean')\n## label_train = label_train.iloc[0:3,:2]\n## label_train = label_train[['ALLEN PHILLIP K', 'BAZELIDES PHILIP J']]\n## print(label_train)\n## import matplotlib.pyplot as plt\n## plt.plot(label_train)\n## plt.show()\n## label_train = label_train.iloc[13:14,:]\n#label_train = label_train_df.loc['poi',:]\n#count_poi = 0\n#for poi in label_train_df:\n# if poi == True:\n# count_poi += 1\n# pass\n# # print('poi==>> ', poi, 'count_poi==>> ', count_poi)\n#print('GRAND TOTAL ==> count_poi==>> ', count_poi)\n\n#count_stock = label_train_df[['PRENTICE JAMES'][0]]\n#for stock_label, total_stock_value in count_stock.items():\n# if stock_label == 'total_stock_value':\n# print('PRENTICE JAMES:','total_stock_value==>>', total_stock_value)\n# pass\n# pass\n\n#wesley_colwell = label_train_df[['COLWELL WESLEY'][0]]\n#for stock_label, total_stock_value in wesley_colwell.items():\n# if stock_label == 'from_this_person_to_poi':\n# print('WESLEY COLWELL:','from_this_person_to_poi==>>', total_stock_value)\n# pass\n# pass\n\n#jeffrey_k_skilling = label_train_df[['SKILLING JEFFREY K'][0]]\n#for stock_label, total_stock_value in jeffrey_k_skilling.items():\n# if stock_label == 'exercised_stock_options':\n# print('JEFFREY K SKILLING:','exercised_stock_options==>>', total_stock_value)\n# pass\n# pass\n# How much money did that person get?\n#print(\"['SKILLING JEFFREY K']['total_payments']\", label_train_df['SKILLING JEFFREY K']['total_payments'])\n#print(\"['FASTOW ANDREW S']['total_payments']\", label_train_df['FASTOW ANDREW S']['total_payments'])\n#print(\"['LAY KENNETH L']['total_payments']\", label_train_df['LAY KENNETH L']['total_payments'])\n\n## How is an unfilled feature denoted?\n#label_train_df['FASTOW ANDREW S']['deferral_payments']\n\n## How many folks in this dataset have a quantified salary?\n## What about a known email address?\n#count_salary = 0\n#count_email = 0\n#for key in label_train_df.keys():\n# if label_train_df[key]['salary'] != 'NaN':\n# count_salary+=1\n# if label_train_df[key]['email_address'] != 'NaN':\n# count_email+=1\n#print('count_salary==>>', count_salary)\n#print('count_email==>', count_email)\n\n## How many people in the E+F dataset (as it currently exists) have “NaN” for their total payments?\n## What percentage of people in the dataset as a whole is this?\n#count_NaN_tp = 0\n#for key in label_train_df.keys():\n# if label_train_df[key]['total_payments'] == 'NaN':\n# count_NaN_tp+=1\n#print('count_NaN_tp==>>', count_NaN_tp)\n#print('float(count_NaN_tp)/len(label_train_df.keys())==>>', float(count_NaN_tp)/len(label_train_df.keys()))\n\n## How many POIs in the E+F dataset have “NaN” for their total payments? \n## What percentage of POI’s as a whole is this? \n#count_NaN_tp = 0\n#for key in label_train_df.keys():\n# if label_train_df[key]['total_payments'] == 'NaN' and label_train_df[key]['poi'] == True :\n# count_NaN_tp+=1\n# print('count_NaN_tp+=1==>>', count_NaN_tp)\n\n#print('count_NaN_tp==>>', count_NaN_tp)\n#print('float(count_NaN_tp)/len(label_train_df.keys())==>>', float(count_NaN_tp)/len(label_train_df.keys()))\n\n#print('label_train_df.iloc[:, :]==>', label_train_df.iloc[:, :])\n## print('count_stock==>>', count_stock)\n## for count_poi in label_train.iloc[0:10,]:\n# # count_poi +=1\n## print('count_poi ==>', count_poi)\n## count_poi = label_train.iloc[-8,:] == True\n## print('label_train==>> ', label_train)\n## print('count_poi ==>', count_poi)\n## print(label_train)\n## poi = []\n## def find_poi(label_train)\n"
},
{
"alpha_fraction": 0.6336178779602051,
"alphanum_fraction": 0.650689423084259,
"avg_line_length": 31.36170196533203,
"blob_id": "9fccfd73633e19959b0fd9a0250778cbe758f6ab",
"content_id": "4110bb0059a7e37b5ef0265ec37982da10c2eec4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1523,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 47,
"path": "/decision_tree/dt_author_id.py",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n\"\"\" \n This is the code to accompany the Lesson 3 (decision tree) mini-project.\n\n Use a Decision Tree to identify emails from the Enron corpus by author: \n Sara has label 0\n Chris has label 1\n\"\"\"\n \nimport sys\nfrom time import time\nsys.path.append(\"../tools/\")\nimport matplotlib.pyplot as plt\nfrom email_preprocess import preprocess\nfrom sklearn import tree\nfrom sklearn.metrics import accuracy_score\n\n\n###XXX: features_train and features_test are the features for the training\n### and testing datasets, respectively\n### labels_train and labels_test are the corresponding item labels\nfeatures_train, features_test, labels_train, labels_test = preprocess()\n\n#########################################################\n### your code goes here ###\n\nprint('features_train ==> ', len(features_train[0]))\n\nclf = tree.DecisionTreeClassifier(min_samples_split=40)\nt0 = time()\nclf.fit(features_train, labels_train)\nprint(\"training time:\", round(time()-t0, 3), \"s\")\nt0 = time()\npred = clf.predict(features_test)\nprint(\"predict time:\", round(time()-t0, 3), \"s\")\n\nprint(\"DecisionTree accuracy: %r\" % accuracy_score(pred, labels_test))\nprint(\"10th: %r, 26th: %r, 50th: %r\" % (pred[10], pred[26], pred[50]))\nprint(\"No. of predicted to be in the 'Chris'(1): %r\" % sum(pred))\nacc = accuracy_score(pred, labels_test) ### you fill this in!\n### be sure to compute the accuracy on the test set\n\ndef submitAccuracies():\n return {\"acc\":round(acc,3)}\n\n#########################################################\n\n\n"
},
{
"alpha_fraction": 0.5832453370094299,
"alphanum_fraction": 0.6001408100128174,
"avg_line_length": 43.25,
"blob_id": "2b9611f4e8801bf37cd6a0dce44a2786fc675705",
"content_id": "0cd036f65811bb50134f170ae637adca17868066",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2841,
"license_type": "permissive",
"max_line_length": 122,
"num_lines": 64,
"path": "/choose_your_own/your_algorithm_random_forest.py",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": " #!/usr/bin/python\n # -*- coding: utf-8 -*-\n #######\n ## ALGORITM USED: from sklearn.ensemble import RandomForestClassifier\n ## find more detail at: http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html\n #######\n \n import matplotlib.pyplot as plt\n from prep_terrain_data import makeTerrainData\n from class_vis import prettyPicture\n \n features_train, labels_train, features_test, labels_test = makeTerrainData()\n \n \n ### the training data (features_train, labels_train) have both \"fast\" and \"slow\"\n ### points mixed together--separate them so we can give them different colors\n ### in the scatterplot and identify them visually\n grade_fast = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==0]\n bumpy_fast = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==0]\n grade_slow = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==1]\n bumpy_slow = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==1]\n \n #### initial visualization\n plt.xlim(0.0, 1.0)\n plt.ylim(0.0, 1.0)\n plt.scatter(bumpy_fast, grade_fast, color = \"b\", label=\"fast\")\n plt.scatter(grade_slow, bumpy_slow, color = \"r\", label=\"slow\")\n plt.legend()\n plt.xlabel(\"bumpiness\")\n plt.ylabel(\"grade\")\n plt.show()\n \n ################################################################################\n \n ### your code here! name your classifier object clf if you want the \n ### visualization code (prettyPicture) to show you the decision boundary\n \n from sklearn.ensemble import RandomForestClassifier\n from sklearn.metrics import accuracy_score\n from time import time\n \n features_train, labels_train = RandomForestClassifier.make_classification(n_samples=10, n_features=1,\n n_informative=2, n_redundant=0,\n random_state=0, shuffle=False)\n clf = RandomForestClassifier(max_depth=2, random_state=0)\n t0 = time()\n clf.fit(features_train, labels_train)\n print(\"training time:\", round(time()-t0, 3), \"s\")\n t0 = time()\n pred = clf.predict(features_test)\n \n print(\"predict time:\", round(time()-t0, 3), \"s\")\n \n ## call the method to show the updated chart.\n \n print(\"Accuracy precision for Random Forest: %r\" % accuracy_score(pred, labels_test))\n print(\"10th: %r, 26th: %r, 50th: %r\" % (pred[10], pred[26], pred[50]))\n print(\"No. of predicted to be in the 'Chris'(1): %r\" % sum(pred))\n \n \n try:\n prettyPicture(clf, features_test, labels_test)\n except NameError:\n pass\n \n"
},
{
"alpha_fraction": 0.6391448378562927,
"alphanum_fraction": 0.6583118438720703,
"avg_line_length": 39.47761154174805,
"blob_id": "6320c3f880bcdd215f722f4a4cf9e0d93e118712",
"content_id": "2ab5794edaf3a684021699b0d38bc0cb6386fcae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2713,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 67,
"path": "/choose_your_own/your_algorithm_adaboost.py",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n#######\n## ALGORITM USED: from sklearn.ensemble import AdaBoostClassifier\n## find more detail at: http://scikit-learn.org/stable/modules/ensemble.html#adaboost\n#######\n\nimport matplotlib.pyplot as plt\nfrom prep_terrain_data import makeTerrainData\nfrom class_vis import prettyPicture\n\nfeatures_train, labels_train, features_test, labels_test = makeTerrainData()\n\n\n### the training data (features_train, labels_train) have both \"fast\" and \"slow\"\n### points mixed together--separate them so we can give them different colors\n### in the scatterplot and identify them visually\ngrade_fast = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==0]\nbumpy_fast = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==0]\ngrade_slow = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==1]\nbumpy_slow = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==1]\n\n#### initial visualization\nplt.xlim(0.0, 1.0)\nplt.ylim(0.0, 1.0)\nplt.scatter(bumpy_fast, grade_fast, color = \"b\", label=\"fast\")\nplt.scatter(grade_slow, bumpy_slow, color = \"r\", label=\"slow\")\nplt.legend()\nplt.xlabel(\"bumpiness\")\nplt.ylabel(\"grade\")\nplt.show()\n\n################################################################################\n\n### your code here! name your classifier object clf if you want the \n### visualization code (prettyPicture) to show you the decision boundary\nfrom sklearn.tree import DecisionTreeRegressor\nfrom sklearn.ensemble import AdaBoostRegressor\nfrom sklearn.metrics import accuracy_score\nfrom time import time\n\n#features_train, labels_train = RandomForestClassifier.make_classification(n_samples=10, n_features=1,\n# n_informative=2, n_redundant=0,\n# random_state=0, shuffle=False)\n\nclf = AdaBoostRegressor(DecisionTreeRegressor(max_depth=4),\n n_estimators=300, random_state=rng)\n#clf = RandomForestClassifier(max_depth=2, random_state=0)\nt0 = time()\nclf.fit(features_train, labels_train)\nprint(\"training time:\", round(time()-t0, 3), \"s\")\nt0 = time()\npred = clf.predict(features_test)\n\nprint(\"predict time:\", round(time()-t0, 3), \"s\")\n\n## call the method to show the updated chart.\n\nprint(\"Accuracy precision for Random Forest: %r\" % accuracy_score(pred, labels_test))\nprint(\"10th: %r, 26th: %r, 50th: %r\" % (pred[10], pred[26], pred[50]))\nprint(\"No. of predicted to be in the 'Chris'(1): %r\" % sum(pred))\n\n\ntry:\n prettyPicture(clf, features_test, labels_test)\nexcept NameError:\n pass\n\n"
},
{
"alpha_fraction": 0.7478547692298889,
"alphanum_fraction": 0.7696369886398315,
"avg_line_length": 49.5,
"blob_id": "85686bbaec3f9a5b52f3b3386230af49ee3790d6",
"content_id": "fc4f781d4a727cfc345921d2cc95b3ceb40aa378",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1515,
"license_type": "permissive",
"max_line_length": 338,
"num_lines": 30,
"path": "/README.md",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": "ud120-projects\n==============\n\n### This version is FULL compatible with Python 3!! ###\n### All the references in: https://github.com/udacity/ud120-projects ###\n\nStarter project code for students taking Udacity ud120\n## Intro To Machine Learning (ud120) Projects \nIntro To Machine Learning (ud120) Projects\nStarter project code for students taking Udacity \"Intro To Machine Learning\" (ud120)\n\nInstallation\nThis source code requires you to have working environment for Python 2.5 or Python 2.7. The source code will not work on Python 3.x\n\nBEFORE YOU TRY TO RUN YOUR CODE: please read the prerequisites To download this source code.\n\ngit clone https://github.com/udacity/ud120-projects\nOnce you are done open a command window and type in \"Spyder\". This will launch an IDE to run python\n\nOpen the code and hit the play button to run your code.\n\nPrerequisites\nThe source code needs a working environment for Python 2.5 / Python 2.7. The best and easiest ( Probably not the quickest ) way to do this is, to use Anaconda which is a data science platform that not only allows you to manage multiple python environments but also installs all the required packages that you need to run this source code.\n\nFor Windows Platform: To download Anaconda please go to the url https://www.continuum.io/downloads and download \"Python 2.7 version\". Once downloaded double click the .exe file to install Anaconda and follow the instructions on the screen.\n\nOnce done continue with Installation\n\nLicense\nMIT license.\n"
},
{
"alpha_fraction": 0.6245004534721375,
"alphanum_fraction": 0.6915155053138733,
"avg_line_length": 37.2529411315918,
"blob_id": "249a111ea35a4f78fd91d5d08a1465db66805f0b",
"content_id": "089c5271da5ac7895f7c37b99222448e387cf510",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6508,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 170,
"path": "/svm/svm_author_id.py",
"repo_name": "rudsonramon/machine_learning_udacity_ud120-projects",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n\"\"\" \n This is the code to accompany the Lesson 2 (SVM) mini-project.\n\n Use a SVM to identify emails from the Enron corpus by their authors: \n Sara has label 0\n Chris has label 1\n\"\"\"\n \nimport sys\nfrom time import time\nsys.path.append(\"../tools/\")\nimport matplotlib.pyplot as plt\nfrom email_preprocess import preprocess\nfrom sklearn.svm import SVC\nfrom sklearn.metrics import accuracy_score\n\n\n#XXX: features_train and features_test are the features for the training\n### and testing datasets, respectively\n### labels_train and labels_test are the corresponding item labels\nfeatures_train, features_test, labels_train, labels_test = preprocess()\n\n#One way to speed up an algorithm is to train it on a smaller training dataset.\n#The tradeoff is that the accuracy almost always goes down when you do this.\n#Let’s explore this more concretely: add in the following two lines immediately before training your classifier. \n#features_train = features_train[:len(features_train)/100]\n#labels_train = labels_train[:len(labels_train)/100] \n\"\"\"\n REDUCING THE DATA TO IMPROVE THE PERFORMANCE\n\"\"\"\n### organize the feature to avoid any error message\n#qtd_features = len(features_train)\n#qtd_features = qtd_features/100\n#qtd_features = int(qtd_features)\n#### the same with the label\n#qtd_labels = len(labels_train)\n#qtd_labels = qtd_labels/100\n#qtd_labels = int(qtd_labels)\n##print('qtd_features: ==>> ', qtd_features)\n##print('qtd_labels : ==>> ', qtd_labels )#features_train = features_train[:qtd_features]\n#labels_train = labels_train[:qtd_labels] \n\n#print(features_train[:qtd_features])\n#########################################################\n### your code goes here ###\n# clf = SVC(kernel=\"linear\")\n\n#clf = SVC(kernel='linear')\n#clf = SVC(kernel='rbf')\n#clf = SVC(kernel='rbf', C=10.0) ## SVM accuracy: 0.6160409556313993\n#clf = SVC(kernel='rbf', C=100) ## SVM accuracy: 0.6160409556313993\n#clf = SVC(kernel='rbf', C=1000) ## SVM accuracy: 0.8213879408418657\nclf = SVC(kernel='rbf', C=10000) ## SVM accuracy: 0.8924914675767918\nt0 = time()\n\n\"\"\"\n\tOne way to speed up an algorithm is to train it on a smaller training \n\tdataset. The tradeoff is that the accuracy almost always goes down when \n\tyou do this. Let's explore this more concretely:\n\t\n\toriginal (linear):\n\ttraining time: 188.996 s \n\tpredict time: 20.275 s \n\tSVM accuracy: 0.98407281001137659 \n\tThese lines effectively slice the training dataset down to 1 percent of its \n\toriginal size, tossing out 99 percent of the training data. \n\tSliced (linear):\n\ttraining time: 0.09 s\n\tpredict time: 0.961 s\n\taccuracy: 0.88452787258248011\n\tIf speed is a major consideration (and for many real-time machine learning \n\tapplications, it certainly is) then you may want to sacrifice a bit of \n\taccuracy if it means you can train/predict faster.\n\tDifferent Kernel:\n\tclf = SVC(kernel=\"rbf\", C=10000)\n\tAlso, C is very effective in this assignment, try to change it and see.\n\t\tSliced data set:\n\t\ttraining time: 0.098 s\n\t\tpredict time: 0.833 s\n\t\taccuracy: 0.89249146757679176\n\t\tFull sized data set:\n\t\ttraining time: 118.729 s\n\t\tpredict time: 13.075 s\n\t\taccuracy: 0.99089874857792948 #FTW\n\"\"\"\n# comment out those two lines if you want to see original one\n#features_train = features_train[:len(features_train)/100] \n#labels_train = labels_train[:len(labels_train)/100] \n\nclf.fit(features_train, labels_train)\nprint(\"training time:\", round(time()-t0, 3), \"s\")\nt0 = time()\npred = clf.predict(features_test)\nprint(\"predict time:\", round(time()-t0, 3), \"s\")\n\n# originally: 0.98407281001137659 acc... FTW but it takes time\nprint(\"SVM accuracy: %r\" % accuracy_score(pred, labels_test))\n\n\"\"\"\n\tWhat class does your SVM (0 or 1, corresponding to Sara and Chris respectively) \n\tpredict for element 10 of the test set? The 26th? The 50th? \n\t(Use the RBF kernel, C=10000, and 1% of the training set. Normally you'd get \n\tthe best results using the full training set, but we found that using 1% sped up \n\tthe computation considerably and did not change our results--so feel free to use \n\tthat shortcut here.)\n\"\"\"\nprint(\"10th: %r, 26th: %r, 50th: %r\" % (pred[10], pred[26], pred[50]))\n\n# There are over 1700 test events, how many are predicted to be in the \"Chris\" (1) class?\nprint(\"No. of predicted to be in the 'Chris'(1): %r\" % sum(pred))\n\n\"\"\"\n\tHopefully it's becoming clearer what they told us about the Naive Bayes -- is \n\tgreat for text -- it's faster and generally gives better performance than an SVM \n\tfor this particular problem. Of course, there are plenty of other problems where \n\tan SVM might work better. Knowing which one to try when you're tackling a problem \n\tfor the first time is part of the art and science of machine learning. In addition \n\tto picking your algorithm, depending on which one you try, there are parameter \n\ttunes to worry about as well, and the possibility of overfitting (especially if \n\tyou don't have lots of training data).\n\tOur general suggestion is to try a few different algorithms for each problem. \n\tTuning the parameters can be a lot of work, but just sit tight for now--toward \n\tthe end of the class we will introduce you to GridCV, a great sklearn tool that \n\tcan find an optimal parameter tune almost automatically.\n\"\"\"\n\n#predicts = clf.predict(features_test)\n#accuracy = accuracy_score(labels_test, predicts)\n#print(\"accuracy=\", accuracy )\n\n#########################################################\n\n########## \n########## RESULTS c/ parametro c=150000\n########## \n#no. of Chris training emails: 7936\n#no. of Sara training emails: 7884\n#training time: 245.032 s\n#predict time: 22.848 s\n#SVM accuracy: 0.9943117178612059\n#10th: 1, 26th: 0, 50th: 1\n#No. of predicted to be in the 'Chris'(1): 871\n\n########## \n########## RESULTS c/ parametro c=10000\n########## \n#no. of Chris training emails: 7936\n#no. of Sara training emails: 7884\n#training time: 309.46 s\n#predict time: 26.414 s\n#SVM accuracy: 0.9908987485779295\n#10th: 1, 26th: 0, 50th: 1\n#No. of predicted to be in the 'Chris'(1): 877\n\n########## ANSWER ACEPTED\n########## RESULTS c/ parametro \"\"clf = SVC(kernel=\"linear\", gamma=1.0)\"\"\n########## \n#no. of Chris training emails: 7936\n#no. of Sara training emails: 7884\n#training time: 432.789 s\n#predict time: 43.84 s\n#SVM accuracy: 0.9840728100113766\n#10th: 1, 26th: 0, 50th: 1\n#No. of predicted to be in the 'Chris'(1): 881\n\n########## \n########## RESULTS c/ parametro \"\"clf = SVC(kernel='linear')\"\"\n########## \n\n"
}
] | 6 |
akshay-bahulikar/Machine_Learning | https://github.com/akshay-bahulikar/Machine_Learning | 632f30808c12309377fb3333f52bc3e6d2211cfa | 19b0cada1904ccc82d01e091b9270578fb3f8b81 | 306de4085741a002493094287f55664a1da6743e | refs/heads/master | 2022-06-22T20:29:36.923067 | 2022-05-25T10:56:30 | 2022-05-25T10:56:30 | 245,089,161 | 0 | 0 | null | 2020-03-05T06:45:42 | 2020-03-05T06:50:08 | 2020-03-05T07:02:51 | Python | [
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 7,
"blob_id": "d1e2c66ad051320e7a6937b468d6952ab4f456c9",
"content_id": "5ce0e42650faa3b122dd1dfaf6394115a885b424",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 24,
"license_type": "no_license",
"max_line_length": 11,
"num_lines": 3,
"path": "/README.md",
"repo_name": "akshay-bahulikar/Machine_Learning",
"src_encoding": "UTF-8",
"text": "# Git_Demo\n\nDescription\n"
},
{
"alpha_fraction": 0.6080489754676819,
"alphanum_fraction": 0.6137357950210571,
"avg_line_length": 29.20547866821289,
"blob_id": "1d2ce77b47af71eceb3dfd895f3b6c97b341de0f",
"content_id": "4e248bf13d64a4906fc8dc879be89891ce22766b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2286,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 73,
"path": "/KNN_from_Scratch.py",
"repo_name": "akshay-bahulikar/Machine_Learning",
"src_encoding": "UTF-8",
"text": "from scipy.spatial import distance\r\nfrom sklearn.datasets import load_iris\r\nfrom sklearn.metrics import accuracy_score\r\nfrom sklearn.model_selection import train_test_split\r\n\r\ndef euclidean_dist(a,b):\r\n return distance.euclidean(a,b)\r\n\r\nclass KNN_Scratch():\r\n\r\n def fit(self,training_data,target_data):\r\n self.training_data=training_data\r\n self.target_data=target_data\r\n\r\n def predict(self,test_data):\r\n predictions=[]\r\n for row in test_data:\r\n label=self.closest(row)\r\n predictions.append(label)\r\n return predictions\r\n\r\n def closest(self,row):\r\n best_distance=euclidean_dist(row,self.training_data[0])\r\n best_index=0\r\n for i in range(1,len(self.training_data)):\r\n dist=euclidean_dist(row,self.training_data[i])\r\n if dist<best_distance:\r\n best_distance=dist\r\n best_index=i\r\n return self.target_data[best_index]\r\n\r\ndef KNeighbor():\r\n\r\n border=\"-\"*50\r\n iris=load_iris()\r\n\r\n data=iris.data\r\n target=iris.target\r\n print(border)\r\n print('Actual data set: ')\r\n print(border)\r\n for i in range(len(iris.target)):\r\n print(\"ID: %d, Label: %s, Feature: %s\"%(i,iris.data[i],iris.target[i]))\r\n print(\"Size of Actual dataset: %d\"%(i+1))\r\n\r\n data_train,data_test,target_train,target_test=train_test_split(data,target,test_size=0.5)\r\n print(border)\r\n print(\"Training Data set\")\r\n print(border)\r\n\r\n for i in range(len(data_train)):\r\n print(\"ID: %d, Label: %s, Feature: %s\" % (i, data_train[i], target_train[i]))\r\n print(\"Size of Training dataset: %d\" % (i + 1))\r\n\r\n print(border)\r\n print(\"Target data set\")\r\n print(border)\r\n for i in range(len(data_train)):\r\n print(\"ID: %d, Label: %s, Feature: %s\" % (i, data_test[i], target_test[i]))\r\n print(\"Size of Testing dataset: %d\" % (i + 1))\r\n\r\n classifier=KNN_Scratch()\r\n classifier.fit(data_train,target_train)\r\n predictions=classifier.predict(data_test)\r\n accuracy=accuracy_score(target_test,predictions)\r\n return accuracy\r\n\r\ndef main():\r\n accuracy=KNeighbor()\r\n print(\"Accuracy of classification algorithm with K Neighbor classifier is \",accuracy*100,\"%\")\r\n\r\nif __name__==\"__main__\":\r\n main()\r\n \r\n\r\n"
}
] | 2 |
nachopaank/MUS | https://github.com/nachopaank/MUS | ec4a6554fac275e194b0c1a8f17fd633c606a9fc | 6cf6afa15f40f0568c2a94d655198c30127095a8 | 0471549dfa91be08761f87f0c0807de76a5b3c7e | refs/heads/master | 2022-10-18T05:53:00.888558 | 2020-06-17T10:00:59 | 2020-06-17T10:00:59 | 259,973,581 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5013683438301086,
"alphanum_fraction": 0.5259988903999329,
"avg_line_length": 23.70270347595215,
"blob_id": "7f5d3bf563d3a3bd00d24b1bc878c111de31953a",
"content_id": "c6f8ee90f1d40c65f2776393a0b416a9fb7e9b63",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1827,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 74,
"path": "/Jaime/Entrega2/Filtro_Low_variable.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nimport os\nfrom scipy.io import wavfile\n\nSRATE, data = wavfile.read(\"tormenta.wav\")\nCHUNK = 1024\np = pyaudio.PyAudio()\nclear = lambda: os.system('cls')\n#numbloque = 0\nbloque = np.arange(CHUNK, dtype = data.dtype)\nkb = kbhit.KBHit()\nc = ' '\n#cAux = ' '\n#multi=False\n#dataPlay = np.zeros(len(data),dtype=int)\nframe = 0\n\nif data.dtype.name =='int16':fmt = 2\nelif data.dtype.name =='int32': fmt = 4\nelif data.dtype.name =='float32': fmt = 4\nelif data.dtype.name =='uint8': fmt = 1\nelse: raise Exception('Not supported')\n\nstream = p.open(format = p.get_format_from_width(fmt),\n channels = len(data.shape),\n rate = SRATE,\n frames_per_buffer = CHUNK,\n output = True)\n # stream_callback=callback)\n\n\nalpha = 1\nprev = 0\nwhile c!= 'q':\n bloqueAux = data[frame*CHUNK:(frame+1)*CHUNK]\n bloque = np.copy(bloqueAux)\n\n bloque.setflags(write=1) # para poder escribir\n\n if len(bloque) > 0:\n bloque[0] = prev + alpha * (bloque[0]-prev)\n for i in range(1,len(bloque)):\n bloque[i] = bloque[i-1] + alpha * (bloque[i]-bloque[i-1])\n\n if kb.kbhit():\n c = kb.getch() \n if c == 'c':\n if 1 < alpha + 0.1:\n alpha = 1\n else:\n alpha+=0.1\n #frame = 0\n elif c == 'v':\n if 0 > alpha - 0.1:\n alpha = 0\n else:\n alpha-=0.1\n \n #frame = 0\n elif c == 'x':\n frame = 0\n clear()\n print(\"Alpha= \",alpha)\n if len(bloque) > 0:\n prev = bloque[len(bloque)-1]\n stream.write(bloque.astype((data.dtype)).tobytes())\n frame+=1\n \n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\np.terminate()"
},
{
"alpha_fraction": 0.5333333611488342,
"alphanum_fraction": 0.553259551525116,
"avg_line_length": 23.938650131225586,
"blob_id": "a5230edf80a5e459f0371963cf6600517e7295fd",
"content_id": "909661a6aa0c047a47881dbc2e4cdd60c752189c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4066,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 163,
"path": "/Jaime/Entrega3/thereminFM.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import sys\nimport pygame\nfrom pygame.locals import *\nimport numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\nimport os\nfrom ctypes import windll, Structure, c_long, byref\nimport tkinter\n\n\"\"\"\nclass POINT(Structure):\n _fields_ = [(\"x\", c_long), (\"y\", c_long)]\n\n\n\ndef queryMousePosition():\n pt = POINT()\n windll.user32.GetCursorPos(byref(pt))\n return { pt.x, pt.y}\n\nimport logging\nimport sys\n\nlogging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',\n level=logging.DEBUG,\n stream=sys.stdout)\n\n\ndef get_mouse_position():\n Get the current position of the mouse.\n\n Returns\n -------\n dict :\n With keys 'x' and 'y'\n mouse_position = None\n import sys\n if sys.platform in ['linux', 'linux2']:\n pass\n elif sys.platform == 'Windows':\n try:\n import win32api\n except ImportError:\n logging.info(\"win32api not installed\")\n win32api = None\n if win32api is not None:\n x, y = win32api.GetCursorPos()\n mouse_position = {'x': x, 'y': y}\n elif sys.platform == 'Mac':\n pass\n else:\n try:\n import Tkinter # Tkinter could be supported by all systems\n except ImportError:\n logging.info(\"Tkinter not installed\")\n Tkinter = None\n if Tkinter is not None:\n p = Tkinter.Tk()\n x, y = p.winfo_pointerxy()\n mouse_position = {'x': x, 'y': y}\n print(\"sys.platform={platform} is unknown. Please report.\"\n .format(platform=sys.platform))\n print(sys.version)\n return mouse_position\n\"\"\"\n\n\nWIDTH = 480\nHEIGHT = 480\nscreen = pygame.display.set_mode((WIDTH, HEIGHT))\npygame.display.set_caption(\"Theremin\")\n\n\nRATE= 44100\nCHUNK = 1024\np = pyaudio.PyAudio()\nkb = kbhit.KBHit()\nc = ' '\nclear = lambda: os.system('cls')\n\nstream = p.open(format = pyaudio.paFloat32,\n channels = 1,\n rate = RATE,\n frames_per_buffer = CHUNK,\n output = True)\n\ndef oscFM(fc,fm,beta,vol,frame):\n interval = np.arange(CHUNK)+frame # array para el chunk\n mod = (RATE*beta)*np.sin(2*np.pi*fm*interval/RATE) # moduladora\n res = np.sin((2*np.pi*fc*interval+mod)/RATE) # portadora\n return vol*res\n# tabla de ondas -> se copia la tabla cíclicamente hasta rellenenar un CHUNK\ndef synthWaveTable(wavetable, frame):\n samples = np.zeros(CHUNK, dtype=np.float32) \n t = frame % (len(wavetable))\n #t = 0\n for i in range(CHUNK):\n samples[i] = wavetable[t]\n t = (t+1) % len(wavetable)\n \n return samples\ndef nextF(fIni,fFin):\n if (fIni < fFin):\n return fIni+1\n elif (fIni > fFin):\n return fIni-1\n return fIni\n# tabla de ondas para un seno de 800 Hz: se almacena un ciclo\nfrec = 800\nwaveTable = np.sin(2*np.pi*frec*np.arange(RATE/frec,dtype=np.float32)/RATE)\nfc = 440\nfm = 300\nbeta = 1\nvol = 1\nframe = 0\npreFrec = 440\npostFrec = 440\np=tkinter.Tk()\nwhile c != 'q': \n samples = synthWaveTable(oscFM(fc,fm,beta,vol,frame),frame)\n stream.write(samples.tobytes())\n frame += CHUNK\n \n if kb.kbhit(): \n c = kb.getch()\n if c == 'F':\n fc = fc + 1\n elif c == 'f':\n fc = fc - 1\n elif c == 'G':\n clear()\n fm = fm + 1\n elif c == 'g':\n fm = fm - 1\n elif c == 'H':\n beta = beta + 0.5\n elif c == 'h':\n beta = beta - 0.5\n elif c == 'J':\n vol = vol + 0.05\n elif c == 'j':\n vol = vol - 0.05\n clear()\n \n print(\"Fc = \", fc) \n print(\"Fm: \", fm)\n print(\"Beta: \", beta)\n print(\"Vol: \", vol)\n #p=tkinter.Tk()\n for event in pygame.event.get():\n if event.type == pygame.MOUSEMOTION:\n x, y = event.pos\n #x, y = p.winfo_pointerxy()\n fc = x/WIDTH*1000\n vol = y/HEIGHT \npygame.quit()\n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\nkb.set_normal_term()\np.terminate()\n"
},
{
"alpha_fraction": 0.49531397223472595,
"alphanum_fraction": 0.5149953365325928,
"avg_line_length": 24.117647171020508,
"blob_id": "7a38362bc8416d8ddb0aba709716cda914b13670",
"content_id": "789528cb268ba9bf58c3313bf30be330422846d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2134,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 85,
"path": "/Jaime/Entrega2/Filtro_High.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\nimport os\n\n\nSRATE, data = wavfile.read(\"tormenta.wav\")\nCHUNK = 1024\np = pyaudio.PyAudio()\n#numbloque = 0\nbloque = np.arange(CHUNK, dtype = data.dtype)\nkb = kbhit.KBHit()\nc = ' '\nclear = lambda: os.system('cls')\nframe = 0\n\nif data.dtype.name =='int16':fmt = 2\nelif data.dtype.name =='int32': fmt = 4\nelif data.dtype.name =='float32': fmt = 4\nelif data.dtype.name =='uint8': fmt = 1\nelse: raise Exception('Not supported')\n\nstream = p.open(format = p.get_format_from_width(fmt),\n channels = len(data.shape),\n rate = SRATE,\n frames_per_buffer = CHUNK,\n output = True)\n # stream_callback=callback)\n\n\nalpha = 0.5\nprev = 0\nflag = True\nprint(\"Alpha= \",alpha)\nwhile c!= 'q':\n bloqueAux = data[frame*CHUNK:(frame+1)*CHUNK]\n bloque = np.copy(bloqueAux)\n\n bloque.setflags(write=1) # para poder escribir\n\n if len(bloque) > 0:\n bloque[0] = prev + alpha * (bloque[0]-prev)\n for i in range(1,len(bloque)):\n bloque[i] = bloque[i-1] + alpha * (bloque[i]-bloque[i-1])\n\n if kb.kbhit():\n c = kb.getch() \n if c == 'c':\n if 1 < alpha * 2:\n alpha = 1\n else:\n alpha*=2\n #frame = 0\n elif c == 'x':\n if 0 > alpha / 2:\n alpha = 0\n else:\n alpha/=2\n \n #frame = 0\n elif c == 'r':\n frame = 0\n elif c == 'a':\n if flag:\n flag = False # High-Pass\n else:\n flag = True # Low-Pass\n clear()\n print(\"Alpha= \",alpha)\n\n if len(bloque) > 0:\n prev = bloque[len(bloque)-1]\n bloqueHigh = np.copy(bloqueAux)\n bloqueHigh = np.subtract(bloqueHigh,bloque)\n \n if flag:\n stream.write(bloqueHigh.astype((data.dtype)).tobytes())\n else:\n stream.write(bloque.astype((data.dtype)).tobytes())\n frame+=1\n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\np.terminate()"
},
{
"alpha_fraction": 0.5765224099159241,
"alphanum_fraction": 0.6101762652397156,
"avg_line_length": 27.05617904663086,
"blob_id": "e76c94c46d2a64ecd72178df2f6bb851f2d0bf05",
"content_id": "bc74524057d6b583ca53b155db296ca38d16fe3e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2496,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 89,
"path": "/Jaime/Entrega3/test.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np # arrays\nimport pyaudio, kbhit\nimport pygame\nfrom pygame.locals import *\n\nRATE = 44100; CHUNK = 1024\n\nWIDTH = 64\nHEIGHT = 480\n\n# frecuencia dada f_k, frame inicial, volumen\ndef osc(frec,vol,frame):\n return vol*np.sin(2*np.pi*(np.arange(CHUNK)+frame)*frec/RATE) \n\n# fc carrier (pitch), fm frecuencia moduladora, beta = indice de modulacion\ndef oscFM(fc,fm,beta,vol,frame):\n # sin(2*pi*fc + beta * sin(2*pi*fm))\n interval = np.arange(CHUNK)+frame # array para el chunk\n mod = (RATE*beta)*np.sin(2*np.pi*fm*interval/RATE) # moduladora\n res = np.sin((2*np.pi*fc*interval+mod)/RATE) # portadora\n return vol*res\n\ndef timeToFrame(t): return int(t*RATE) # conversion tiempo a frame\n\ndef env(lst):\n last = timeToFrame(lst[len(lst)-1][0])\n last = last + CHUNK\n samples = np.zeros(last, dtype=np.float32) # senial con ceros\n for i in range(1,len(lst)):\n f1, f2 = timeToFrame(lst[i-1][0]), timeToFrame(lst[i][0])\n v1, v2 = lst[i-1][1], lst[i][1]\n for j in range(f1,f2): # formula de interpolacion\n samples[j] = v1 + (j-f1) * (v2-v1)/(f2-f1)\n\n return samples\n\n\nfc = 300.0\nfm = 1.0\nbeta = 1.0\n\nvol = 0.0\n\nmaxFrec = 1000.0\nmaxVol = 1.0\n\nptosEnv = [(0.0,1.0),(100.0,1.0)]\nlast = len(ptosEnv)-1 # ultimo punto de la envolvente\nendFrame = timeToFrame(ptosEnv[last][0]) # ultimo frame\nprint(ptosEnv[last][0])\nenvSamples = env(ptosEnv) # generamos samples envolvente\nframe = 0\n\np = pyaudio.PyAudio()\nstream = p.open(format=pyaudio.paFloat32, channels=1, rate=RATE, frames_per_buffer=CHUNK, output=True)\nscreen = pygame.display.set_mode((WIDTH, HEIGHT))\npygame.display.set_caption(\"Theremin\")\n\nkb = kbhit.KBHit()\n\nwhile frame < endFrame:\n \n samples = np.zeros(CHUNK,dtype=np.float32)\n\n c= ' '\n if kb.kbhit():\n c = kb.getch()\n if (c=='f'): fm = max(0,fm - 5)\n elif (c=='F'): fm = fm + 5\n if (c=='b'): beta = max(0,beta - 1)\n elif (c=='B'): beta = beta + 1\n print('\\n')\n print(\"FC = \", fc)\n print(\"Vol = \", vol)\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n exit()\n if event.type == pygame.MOUSEMOTION:\n mouseX, mouseY = event.pos\n fc = (mouseX*maxFrec)/WIDTH\n vol = (mouseY*maxVol)/HEIGHT\n\n samples = samples+oscFM(fc,fm,beta, vol,frame)\n \n samples = samples * envSamples[frame:frame+CHUNK]\n frame += CHUNK\n stream.write(samples.tobytes())"
},
{
"alpha_fraction": 0.5347810983657837,
"alphanum_fraction": 0.556381344795227,
"avg_line_length": 28.427562713623047,
"blob_id": "1d641666c056364f9311e796c11d4e1545db82e6",
"content_id": "51ab3e78266e2b0c38a8b259bf4d2828b9096193",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8611,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 283,
"path": "/Proyecto Final/Builds/VisualStudio2019/Header.h",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "#pragma once\r\n\r\n\r\n//==============================================================================\r\n\r\nusing namespace dsp;\r\n\r\nclass MainContentComponent : public AudioAppComponent,\r\n public ChangeListener\r\n{\r\npublic:\r\n MainContentComponent()\r\n : state(Stopped)\r\n {\r\n addAndMakeVisible(&openButton);\r\n openButton.setButtonText(\"Open mp3/wav file\");\r\n openButton.onClick = [this] { openButtonClicked(); };\r\n\r\n addAndMakeVisible(&playButton);\r\n playButton.setButtonText(\"Play the audio\");\r\n playButton.onClick = [this] { playButtonClicked(); };\r\n playButton.setColour(TextButton::buttonColourId, Colours::green);\r\n playButton.setEnabled(false);\r\n\r\n addAndMakeVisible(&stopButton);\r\n stopButton.setButtonText(\"Stop the audio\");\r\n stopButton.onClick = [this] { stopButtonClicked(); };\r\n stopButton.setColour(TextButton::buttonColourId, Colours::red);\r\n stopButton.setEnabled(false);\r\n\r\n addAndMakeVisible(&midiButton);\r\n midiButton.setButtonText(\"Convert to midi\");\r\n midiButton.onClick = [this] { midiButtonClicked(); };\r\n midiButton.setColour(TextButton::buttonColourId, Colours::blue);\r\n midiButton.setEnabled(false);\r\n setSize(800, 600);\r\n\r\n formatManager.registerBasicFormats(); // [1]\r\n transportSource.addChangeListener(this); // [2]\r\n\r\n setAudioChannels(0, 2);\r\n }\r\n\r\n ~MainContentComponent() override\r\n {\r\n shutdownAudio();\r\n }\r\n\r\n void prepareToPlay(int samplesPerBlockExpected, double sampleRate) override\r\n {\r\n transportSource.prepareToPlay(samplesPerBlockExpected, sampleRate);\r\n }\r\n\r\n void getNextAudioBlock(const AudioSourceChannelInfo& bufferToFill) override\r\n {\r\n if (readerSource.get() == nullptr)\r\n {\r\n bufferToFill.clearActiveBufferRegion();\r\n return;\r\n }\r\n transportSource.getNextAudioBlock(bufferToFill);\r\n\r\n FFT wow = STFT(bufferToFill, 256);\r\n\r\n\r\n\r\n for (auto channel = 0; channel < 1; ++channel)\r\n {\r\n auto* inBuffer = bufferToFill.buffer->getReadPointer(channel,\r\n bufferToFill.startSample);\r\n auto* outBuffer = bufferToFill.buffer->getWritePointer(channel, bufferToFill.startSample);\r\n for (auto sample = 0; sample < bufferToFill.numSamples; ++sample) {\r\n //outBuffer[sample] = inBuffer[sample] * random.nextFloat() * level;\r\n }\r\n }\r\n ///\r\n //analyze();\r\n ///\r\n\r\n }\r\n dsp::FFT STFT(const AudioSourceChannelInfo& bufferToFill, size_t hop) {\r\n const float* data = bufferToFill.buffer->getReadPointer(0);\r\n const size_t dataCount = bufferToFill.buffer->getNumSamples();\r\n\r\n // fftSize will be the number of bins we used to initialize the ASpectroMaker.\r\n ptrdiff_t fftSize = forwardFFT.getSize();\r\n\r\n // forwardFFT works on the data in place, and needs twice as much space as the input size.\r\n std::vector<float> fftBuffer(fftSize * 2UL);\r\n\r\n // While data remains\r\n\r\n {\r\n std::memcpy(fftBuffer.data(), data, fftSize * sizeof(float));\r\n\r\n // prepare fft data...\r\n\r\n forwardFFT.performFrequencyOnlyForwardTransform(fftBuffer.data());\r\n\r\n // ...copy the frequency information from fftBuffer to the spectrum\r\n\r\n // Next chunk\r\n data += hop;\r\n }\r\n\r\n return forwardFFT;\r\n //...\r\n }\r\n void analyze() {\r\n int cuando;\r\n int nota;\r\n int velocity = 100;\r\n //aqui esto\r\n MidiMessage m(MidiMessage::noteOn(1, nota, (uint8)velocity));\r\n m.setTimeStamp(cuando);\r\n sequence.addEvent(m);\r\n }\r\n\r\n void releaseResources() override\r\n {\r\n transportSource.releaseResources();\r\n }\r\n\r\n void resized() override\r\n {\r\n openButton.setBounds(20, 20, 760, 40);\r\n playButton.setBounds(20, 80, 760, 40);\r\n stopButton.setBounds(20, 140, 760, 40);\r\n midiButton.setBounds(20, 200, 760, 40);\r\n }\r\n\r\n void changeListenerCallback(ChangeBroadcaster* source) override\r\n {\r\n if (source == &transportSource)\r\n {\r\n if (transportSource.isPlaying())\r\n changeState(Playing);\r\n else\r\n changeState(Stopped);\r\n }\r\n }\r\n\r\nprivate:\r\n enum TransportState\r\n {\r\n Stopped,\r\n Starting,\r\n Playing,\r\n Stopping,\r\n Noising\r\n };\r\n\r\n void changeState(TransportState newState)\r\n {\r\n if (state != newState)\r\n {\r\n state = newState;\r\n\r\n switch (state)\r\n {\r\n case Stopped:\r\n stopButton.setEnabled(false);\r\n playButton.setEnabled(true);\r\n transportSource.setPosition(0.0);\r\n break;\r\n\r\n case Starting:\r\n playButton.setEnabled(false);\r\n transportSource.start();\r\n break;\r\n\r\n case Playing:\r\n stopButton.setEnabled(true);\r\n break;\r\n\r\n case Stopping:\r\n transportSource.stop();\r\n break;\r\n }\r\n }\r\n }\r\n\r\n void openButtonClicked()\r\n {\r\n FileChooser chooser(\"Select a mp3 file to play...\",\r\n {},\r\n \"*.mp3\");\r\n\r\n if (chooser.browseForFileToOpen())\r\n {\r\n auto file = chooser.getResult();\r\n auto* reader = formatManager.createReaderFor(file);\r\n\r\n if (reader != nullptr)\r\n {\r\n std::unique_ptr<AudioFormatReaderSource> newSource(new AudioFormatReaderSource(reader, true));\r\n transportSource.setSource(newSource.get(), 0, nullptr, reader->sampleRate);\r\n playButton.setEnabled(true);\r\n midiButton.setEnabled(true);\r\n readerSource.reset(newSource.release());\r\n }\r\n }\r\n }\r\n\r\n void playButtonClicked()\r\n {\r\n changeState(Starting);\r\n }\r\n void midiButtonClicked()\r\n {\r\n File fi;\r\n FileChooser chooser(\"Select a name...\",\r\n {},\r\n \"*.mid\");\r\n\r\n if (chooser.browseForFileToSave(true))\r\n {\r\n fi = chooser.getResult();\r\n auto* reader = formatManager.createReaderFor(fi);\r\n }\r\n\r\n MidiFile mfile;\r\n FileOutputStream stream = fi;\r\n /*\r\n MidiMessage m(MidiMessage::noteOn(1, 30, (uint8)100));\r\n m.setTimeStamp(0);\r\n sequence.addEvent(m);\r\n MidiMessage m2(MidiMessage::noteOn(1, 5, (uint8)100));\r\n m2.setTimeStamp(0);\r\n sequence.addEvent(m2);\r\n MidiMessage m3(MidiMessage::noteOn(1, 10, (uint8)50));\r\n m3.setTimeStamp(0);\r\n sequence.addEvent(m3);\r\n sequence.updateMatchedPairs();\r\n MidiMessage m4(MidiMessage::noteOn(1, 15, (uint8)100));\r\n m4.setTimeStamp(0);\r\n sequence.addEvent(m4);\r\n MidiMessage m5(MidiMessage::noteOn(1, 25, (uint8)100));\r\n m5.setTimeStamp(0);\r\n sequence.addEvent(m5);\r\n MidiMessage m6(MidiMessage::noteOff(1, 30, (uint8)100));\r\n m6.setTimeStamp(300);\r\n sequence.addEvent(m6);\r\n MidiMessage m7(MidiMessage::noteOff(1, 5, (uint8)100));\r\n m7.setTimeStamp(300);\r\n sequence.addEvent(m7);\r\n MidiMessage m8(MidiMessage::noteOff(1, 10, (uint8)50));\r\n m8.setTimeStamp(400);\r\n sequence.addEvent(m8);\r\n MidiMessage m9(MidiMessage::noteOff(1, 15, (uint8)100));\r\n m9.setTimeStamp(400);\r\n sequence.addEvent(m9);\r\n MidiMessage m10(MidiMessage::noteOff(1, 25, (uint8)100));\r\n m10.setTimeStamp(400);\r\n sequence.addEvent(m10);\r\n sequence.updateMatchedPairs();\r\n */\r\n mfile.setTicksPerQuarterNote(100);\r\n mfile.addTrack(sequence);\r\n mfile.writeTo(stream);\r\n\r\n }\r\n void stopButtonClicked()\r\n {\r\n changeState(Stopping);\r\n }\r\n\r\n //==========================================================================\r\n TextButton openButton;\r\n TextButton playButton;\r\n TextButton stopButton;\r\n TextButton midiButton;\r\n Label levelLabel;\r\n Random random;\r\n AudioFormatManager formatManager;\r\n std::unique_ptr<AudioFormatReaderSource> readerSource;\r\n AudioTransportSource transportSource;\r\n TransportState state;\r\n MidiMessageSequence sequence;\r\n\r\n\r\n JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(MainContentComponent)\r\n};\r\n"
},
{
"alpha_fraction": 0.7783595323562622,
"alphanum_fraction": 0.7923211455345154,
"avg_line_length": 80.14286041259766,
"blob_id": "3ec0c7600fd5fb3d02893bc7d2f5dffb39baa910",
"content_id": "34d6c5555d5044ab1f55edb93e974d8f9ce29b41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 575,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 7,
"path": "/Reaper/Entrega1/README.txt",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "Nombres: Ignacio de la Cruz Crespo / Sergio José Gómez Cortés\r\n\r\nHemos creado el happy birthday en la que hay un canal para la melodia y otro para la melodia\r\nLa melodia lleva el efecto ReaDelay con wet y dry disminuidos\r\ndry es el volumen del sonido inicial (0-100) y wet el volumen de cada repeticion (0-100)\r\nNo se distingue demasiado pero hay sustain de piano, se sobrecarga demasiado el sonido si añado mas, en el Happy birthday no tiene mucho sentido usar excesivamente esto\r\nHemos creado una pista de percusion con bucle y síncrona aunque no cuadra del todo porque tenemos conocimientos nulos de bateria"
},
{
"alpha_fraction": 0.5069573521614075,
"alphanum_fraction": 0.5746753215789795,
"avg_line_length": 24.678571701049805,
"blob_id": "8be9c85a2d80a5e708b81960534ee99340f9f575",
"content_id": "8709eb3a3f3f1fd63fbbb8b9aeec8ad738646684",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2156,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 84,
"path": "/Jaime/Entrega1/HappyEntregable.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np \nimport pyaudio, wave, kbhit\n\nCHUNK = 1024\nSRATE = 44100\nframe = 0\n\nkb = kbhit.KBHit()\np = pyaudio.PyAudio()\ndata = []\nvol = 1.0\nc = ' '\nnumBloque = 0\n\n# Escribo la cancion Happy Birthday de forma que sea facilmente leible\nhappy = [('G',0.5),('G',0.5),('A',1),('G',1),('c',1),('B',2),\n('G',0.5),('G',0.5),('A',1),('G',1),('d',1),('c',2),\n('G',0.5),('G',0.5),('g',1),('e',1),('c',1),('B',1),('A',1),\n('f',0.5),('f',0.5),('e',1),('c',1),('d',1),('c',2)]\n\nFORMAT = pyaudio.paFloat32; CHANNELS = 1\nstream = p.open(format=FORMAT, channels=CHANNELS,\nrate=SRATE, output=True,\nframes_per_buffer=CHUNK) \n\n\n#Genero un oscilador con todos los parametros necesarios\ndef oscC2(frec, frame, vol, dur):\n data = vol*np.sin(2*np.pi*frec*(np.arange(CHUNK*dur)+frame*CHUNK)/SRATE)\n frame += CHUNK\n return data\n\n# Me defino una funcion switch que no existe en python\ndef switch(nota):\n if nota == 'A':\n return 880\n elif nota == 'B':\n return 987.767 \n elif nota == 'C':\n return 523.251\n elif nota == 'D':\n return 587.33\n elif nota == 'E':\n return 659.255\n elif nota == 'F':\n return 698.456\n elif nota == 'G':\n return 783.991\n elif nota == 'a':\n return 880*2\n elif nota == 'b':\n return 987.767*2\n elif nota == 'c':\n return 523.251*2\n elif nota == 'd':\n return 587.33*2\n elif nota == 'e':\n return 659.255*2\n elif nota == 'f':\n return 698.456*2\n elif nota == 'g':\n return 783.991*2\n else:\n return 0\n\n# Construtyo el data con las notas de Happy y le intercalo silencios \n# para que las notas consecutivas iguales suenen bien\nfor x in happy:\n data2 = oscC2(switch(x[0]), frame, vol, x[1]*20)\n data = np.append(data,data2)\n data = np.append(data, oscC2(0, frame, vol, 1))\n\n# Y las reproduzco por bloques\nwhile c != 'q':\n bloque = data[numBloque*CHUNK : numBloque*CHUNK+CHUNK]\n stream.write(bloque.astype((data.dtype)).tobytes())\n if kb.kbhit(): c = kb.getch()\n numBloque += 1\n \nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\nkb.set_normal_term()\np.terminate()"
},
{
"alpha_fraction": 0.6288014054298401,
"alphanum_fraction": 0.6457960605621338,
"avg_line_length": 32.878787994384766,
"blob_id": "db2358986f0ec025b8577d36a965acbb9ec884d0",
"content_id": "2e2b255cc5b190f07bd63d504bfc5b04ac9d144e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1118,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 33,
"path": "/Jaime/Entrega1/Idiotizadorentrega.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "# basic/record0.py Grabacion de un archivo de audio 'q' para terminar\nimport pyaudio, wave, kbhit\nimport numpy as np\nCHUNK = 1024; FORMAT = pyaudio.paInt16; CHANNELS = 2; RATE = 44100\np = pyaudio.PyAudio()\nstream = p.open(format=FORMAT, channels=CHANNELS,\n rate=RATE, input=True, # ahora es flujo de entrada\n frames_per_buffer=CHUNK) # tamanio buffer == CHUNK !!\nstream2 = p.open(format = FORMAT,\n channels = CHANNELS,\n rate = RATE,\n frames_per_buffer = CHUNK,\n output = True)\nframes = [] # lista de samples\nkb = kbhit.KBHit()\nc = ' '\ndata = stream.read(CHUNK)\ndef delay(data,nb,dur,CHUNK):\n dataTemp=[]\n dataTemp=np.append(dataTemp,data[:nb*CHUNK+CHUNK])\n dataTemp=np.append(dataTemp,np.zeros(dur*20*CHUNK,dtype=int))\n dataTemp=np.append(dataTemp,data[nb*CHUNK+CHUNK:])\n return dataTemp\nwhile c != 'q': # grabando\n\n data = stream.read(CHUNK)\n #data = delay(data,0,1,CHUNK)\n stream2.write(data)\n if kb.kbhit(): \n c = kb.getch()\n\nkb.set_normal_term(); stream.stop_stream(); stream.close(); p.terminate()\n#guardamos wav\n"
},
{
"alpha_fraction": 0.5025193095207214,
"alphanum_fraction": 0.5277124643325806,
"avg_line_length": 26.574073791503906,
"blob_id": "8333bdbb1685c87a240dc8fc11f83383eec7b93c",
"content_id": "f8563ec299f8e6bd3ca4bc47c690ae5015c0696e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2977,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 108,
"path": "/Jaime/Entrega2/Filtro_BP.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\nimport os\nclear = lambda: os.system('cls')\nSRATE, data = wavfile.read(\"tormenta.wav\")\nCHUNK = 1024\np = pyaudio.PyAudio()\nbloque = np.arange(CHUNK, dtype = data.dtype)\nkb = kbhit.KBHit()\nc = ' '\nframe = 0\n\nif data.dtype.name =='int16':fmt = 2\nelif data.dtype.name =='int32': fmt = 4\nelif data.dtype.name =='float32': fmt = 4\nelif data.dtype.name =='uint8': fmt = 1\nelse: raise Exception('Not supported')\n\nstream = p.open(format = p.get_format_from_width(fmt),\n channels = len(data.shape),\n rate = SRATE,\n frames_per_buffer = CHUNK,\n output = True)\n\nlow = 0.4\nhigh = 0.6\nprev = 0\nflag = False\n\n\"\"\"alpha = 0.5\nprev = 0\nflag = True\"\"\"#Cambiar alpha y su bloque para tener la suma de los 2 filtros\n#Conseguir cambiar la posicion del ancho de banda\n#Conseguir cambiar el ancho de banda\n\nwhile c!= 'q':\n bloqueAux = data[frame*CHUNK:(frame+1)*CHUNK]\n bloqueLow = np.copy(bloqueAux)\n bloqueHigh = np.copy(bloqueAux)\n\n bloqueLow.setflags(write=1) # para poder escribir\n bloqueHigh.setflags(write=1)\n\n #Construccion del paso bajo\n if len(bloqueLow) > 0:\n bloqueLow[0] = prev + low * (bloqueLow[0]-prev)\n for i in range(1,len(bloqueLow)):\n bloqueLow[i] = bloqueLow[i-1] + low * (bloqueLow[i]-bloqueLow[i-1])\n\n #Construccion del paso alto\n if len(bloqueHigh) > 0:\n bloqueHigh[0] = prev + high * (bloqueHigh[0]-prev)\n for i in range(1,len(bloqueHigh)):\n bloqueHigh[i] = bloqueHigh[i-1] + high * (bloqueHigh[i]-bloqueHigh[i-1])\n #Contruccion del bloque final\n bloqueFin = np.subtract(bloqueHigh,bloqueLow)\n\n if kb.kbhit():\n c = kb.getch() \n #Desplazar banda a la derecha \n if c == 'c':\n if high + 0.1 <= 1:\n high += 0.1\n low += 0.1\n #Desplazar banda a la izquierda\n elif c == 'x':\n if low - 0.1 >= 0:\n low -= 0.1\n high -= 0.1\n\n #Desplazar independientemente\n elif c == 's':\n if low - 0.1 >= 0:\n low -= 0.1\n elif c == 'd':\n if low + 0.1 <= high:\n low += 0.1\n elif c == 'w':\n if high - 0.1 >= low:\n high -= 0.1\n elif c == 'e':\n if high + 0.1 <= 1:\n high += 0.1\n #Inversion de Filtro\n elif c == 'a':\n if flag:\n flag = False\n else:\n flag = True\n \n #Reset\n elif c == 'r':\n frame = 0\n clear()\n print(\"Low= \",low,\" High= \", high)\n\n if len(bloqueLow) > 0:\n if flag:\n bloqueFin = np.subtract(bloqueAux,bloqueFin)\n stream.write(bloqueFin.astype((data.dtype)).tobytes())\n frame+=1\n \n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\np.terminate()"
},
{
"alpha_fraction": 0.48935335874557495,
"alphanum_fraction": 0.5242325067520142,
"avg_line_length": 35.611793518066406,
"blob_id": "7be85b75be414f29ab4386f11734938a11146def",
"content_id": "5e76b69a6f7548d2dfed06d4815d80b964fab7f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 15313,
"license_type": "no_license",
"max_line_length": 199,
"num_lines": 407,
"path": "/Proyecto Final/Source/MainComponent.h",
"repo_name": "nachopaank/MUS",
"src_encoding": "ISO-8859-10",
"text": "\r\n#pragma once\r\n\r\n//==============================================================================\r\nclass MainContentComponent : public AudioAppComponent,\r\n public ChangeListener\r\n{\r\npublic:\r\n MainContentComponent()\r\n : state(Stopped),\r\n forwardFFT(fftOrder)\r\n {\r\n //SETUP BOTONS\r\n addAndMakeVisible(&openButton);\r\n openButton.setButtonText(\"Open mp3/wav file\");\r\n openButton.onClick = [this] { openButtonClicked(); };\r\n\r\n addAndMakeVisible(&playButton);\r\n playButton.setButtonText(\"Play the audio\");\r\n playButton.onClick = [this] { playButtonClicked(); };\r\n playButton.setColour(TextButton::buttonColourId, Colours::green);\r\n playButton.setEnabled(false);\r\n\r\n addAndMakeVisible(&stopButton);\r\n stopButton.setButtonText(\"Stop the audio\");\r\n stopButton.onClick = [this] { stopButtonClicked(); };\r\n stopButton.setColour(TextButton::buttonColourId, Colours::red);\r\n stopButton.setEnabled(false);\r\n\r\n addAndMakeVisible(&midiButton);\r\n midiButton.setButtonText(\"Convert to midi\");\r\n midiButton.onClick = [this] { midiButtonClicked(); };\r\n midiButton.setColour(TextButton::buttonColourId, Colours::blue);\r\n midiButton.setEnabled(false);\r\n setSize(800, 600);\r\n\r\n addAndMakeVisible(audioSelector);\r\n\r\n //SELECCIONAR TIPO DE AUDIO\r\n audioSelector.addItem(\"mp3\", 1);\r\n audioSelector.addItem(\"wav\", 2);\r\n audioSelector.setSelectedId(1);\r\n audioSelector.onChange = [this] {\r\n switch (audioSelector.getSelectedId())\r\n {\r\n case 1: currentAudio = \"mp3\"; break;\r\n case 2: currentAudio = \"wav\"; break;\r\n }};\r\n formatManager.registerBasicFormats(); \r\n transportSource.addChangeListener(this); \r\n currentNote = 0; //al empezar no hay nota\r\n fillNotes(); //mapeamos las notas\r\n startTime = 0; //empiezo en 0\r\n setAudioChannels(0, 2);//sin canales de entrada y si de salida\r\n }\r\n\r\n ~MainContentComponent() override\r\n {\r\n shutdownAudio();\r\n }\r\n\r\n void prepareToPlay(int samplesPerBlockExpected, double sampleRate) override\r\n {\r\n transportSource.prepareToPlay(samplesPerBlockExpected, sampleRate); //para reproducir el audio\r\n }\r\n void fillNotes() {\r\n mapNotes.set(7, 155.56); //RE#3\r\n mapNotes.set(700, 164.81); //MI3 \r\n mapNotes.set(8, 174.61); //FA3\r\n mapNotes.set(800, 185); //FA#3 \r\n mapNotes.set(80000, 196); //SOL3 Cuidao\r\n mapNotes.set(9, 207.65); //SOL#3 Cuidao\r\n mapNotes.set(900, 220); //LA3 \r\n mapNotes.set(10, 233.08); //LA#3 \r\n mapNotes.set(11, 246.94); //SI3 \r\n mapNotes.set(1100, 261.63); //DO4\r\n mapNotes.set(12, 277.18); //DO#4 \r\n mapNotes.set(13, 293.66); //RE4 \r\n mapNotes.set(1300, 311.13); //RE#4 \r\n mapNotes.set(14, 329.63); //MI4\r\n mapNotes.set(15, 349.23); //FA4\r\n mapNotes.set(16, 369.99); //FA#4\r\n mapNotes.set(17, 392); //SOL4\r\n mapNotes.set(18, 415.3); //SOL#4 \r\n mapNotes.set(19, 440); //LA4\r\n mapNotes.set(20, 466.16); //LA#4\r\n mapNotes.set(21, 493.88); //SI4\r\n mapNotes.set(22, 523.25); //DO5\r\n mapNotes.set(24, 554.37); //DO#5\r\n mapNotes.set(25, 587.33); //RE5\r\n mapNotes.set(26, 622.25); //RE#5\r\n mapNotes.set(28, 659.25); //MI5\r\n mapNotes.set(30, 698.46); //FA5 \r\n mapNotes.set(32, 739.99); //FA#5 \r\n mapNotes.set(34, 783.99); //SOL5\r\n mapNotes.set(36, 830.61); //SOL#5 \r\n mapNotes.set(37, 880); //LA5\r\n mapNotes.set(38, 880); //LA5 marcado\r\n mapNotes.set(39, 880); //LA5\r\n mapNotes.set(40, 932.33); //LA#5 marcado\r\n mapNotes.set(41, 932.33); //LA#5\r\n mapNotes.set(42, 932.33); //LA#5\r\n mapNotes.set(43, 987.77); //SI5 \r\n mapNotes.set(44, 987.77); //SI5 \r\n mapNotes.set(45, 1046.5); //DO6 \r\n }\r\n void getNextAudioBlock(const AudioSourceChannelInfo& bufferToFill) override\r\n {\r\n if (readerSource.get() == nullptr) //si hemos acabado\r\n {\r\n bufferToFill.clearActiveBufferRegion();\r\n return;\r\n }\r\n\r\n transportSource.getNextAudioBlock(bufferToFill); //siguiente bloque\r\n\r\n for (auto channel = 0; channel < 1; ++channel)\r\n {\r\n auto* inBuffer = bufferToFill.buffer->getReadPointer(channel,\r\n bufferToFill.startSample);\r\n auto* outBuffer = bufferToFill.buffer->getWritePointer(channel, bufferToFill.startSample);\r\n for (auto sample = 0; sample < bufferToFill.numSamples; ++sample)\r\n pushNextSampleIntoFifo(inBuffer[sample]); //meto las samples del bloque al fifo\r\n }\r\n double sampleRate = 44100;\r\n double freq = mapNotes.operator[](trueMax()); //averiguamos la frecuencia del bloque a traves de un analisis nuestro que realiza trueMax()\r\n int m = round(((log(freq * 32 / 440) / log(2)) * 12) + 9); //transformo esa frecuencia al numero de nota que es para construir despues el mensaje midi (ej. do3 = 48)\r\n if (nextFFTBlockReady) //para no perder informacion\r\n { \r\n forwardFFT.performFrequencyOnlyForwardTransform(fftData); //aplico la transformada de fourier (ver memoria para explicacion)\r\n nextFFTBlockReady = false;\r\n }\r\n \r\n if (trueMax() == 0 && currentNote != 0 ) { //caso particular de nuestro analisis, que si devuelve 0 quiere decir que no hay sonido\r\n endNote(currentNote);\r\n currentNote = 0;\r\n }\r\n if (currentNote != m && freq < 1500 && m >= 0 && freq > 0) { //si tengo una nota distinta a la que habia con una frecuencia y numero de nota valido, termino la anterior y empiezo la nueva\r\n if (currentNote != 0) {\r\n endNote(currentNote); //terminar la nota\r\n }\r\n newNote(m); //empiezo nota nueva\r\n currentNote = m; //ahora la nota actual es la nueva\r\n }\r\n startTime = startTime + (bufferToFill.numSamples / sampleRate); //num_samples = 480, esto es el tiempo en segundos al que equivale un bloque\r\n\r\n }\r\n void newNote(int note) {\r\n int velocity = 100;\r\n MidiMessage m(MidiMessage::noteOn(1, note, (uint8)velocity)); //creo mensaje de tipo midi de que empieza nota\r\n\r\n m.setTimeStamp((startTime*1000)*0.2); //le asigno un lugar en el tiempo y multiplico por la constante 0.2 para transformar de milisegundos a beats\r\n sequence.addEvent(m); //aņado este mensaje a una lista\r\n }\r\n void endNote(int note) {\r\n int velocity = 100;\r\n MidiMessage m(MidiMessage::noteOff(1, note, (uint8)velocity)); //creo mensaje de tipo midi de que empieza nota\r\n m.setTimeStamp((startTime*1000)*0.2); //le asigno un lugar en el tiempo y multiplico por la constante 0.2 para transformar de milisegundos a beats\r\n sequence.addEvent(m); //aņado este mensaje a una lista\r\n\r\n }\r\n void pushNextSampleIntoFifo(float sample) noexcept //relleno fftData con politica first in first out\r\n {\r\n if (fifoIndex == fftSize)\r\n {\r\n if (!nextFFTBlockReady)\r\n {\r\n zeromem(fftData, sizeof(fftData));\r\n memcpy(fftData, fifo, sizeof(fifo));\r\n nextFFTBlockReady = true;\r\n }\r\n fifoIndex = 0;\r\n }\r\n\r\n fifo[fifoIndex++] = sample;\r\n }\r\n\r\n \r\n double trueMax() { //analisis de la transformada\r\n int umbral = 30;\r\n double empty = fftData[0];\r\n for (int i = 1; i < fftSize / 2; i++) {\r\n empty = empty + fftData[i];\r\n if (fftData[i] < 0) { //si tengo una onda y no la transformada quiere decir que sigo en la misma nota, -1 equivale a no hacer nada con el bloque\r\n return -1;\r\n }\r\n if (fftData[i] > fftData[i + 1] && fftData[i] > fftData[i - 1]) { //observo un pico dentro de la fft\r\n //Cuanto mas alto sea el indice en el que encuentro un pico, cuyo valor debe ser mayor que un umbral que determina dicho indice, mas alta sera la nota\r\n //Existen casos particulares (mas que particulares cuando la nota es muy grave) en los que un mismo indice de pico corresponde a varias notas, en cuyo caso lo distinguimos en funcion \r\n //de la distancia del siguiente pico, es complejo de ver en el codigo, vease NotasMapaConceptual o en la defensa de este proyecto oralmente\r\n\r\n if (i <= 11 && fftData[i] > 20) {\r\n if (i == 7) {\r\n if (fftData[14] > 120) {\r\n return i * 100;\r\n }\r\n else {\r\n return i;\r\n }\r\n }\r\n if (i == 8) {\r\n if (fftData[15] > 150) {\r\n return i;\r\n }\r\n else if (fftData[16] > 100) {\r\n return i * 100;\r\n } \r\n else{\r\n return i * 10000;\r\n }\r\n }\r\n if (i == 9) {\r\n if (fftData[18] > 150) {\r\n return i;\r\n }\r\n else {\r\n return i*100;\r\n }\r\n }\r\n if (i == 11) {\r\n if (fftData[21] > 80) {\r\n return i;\r\n }\r\n else {\r\n return i*100;\r\n }\r\n }\r\n return i;\r\n }\r\n\r\n if (i > 11 && i < 22 && fftData[i] > 42) {\r\n if (i == 13) {\r\n if (fftData[25] > 50) {\r\n return i;\r\n }\r\n else {\r\n return i * 100;\r\n }\r\n }\r\n return i;\r\n }\r\n if (i >= 22 && i < 50 && fftData[i] > 100) { //a partir de ciertas notas agudas el problema del indice no ocurre, por eso nuestro programa es mas exacto cuanto mas agudo\r\n return i;\r\n }\r\n }\r\n }\r\n if (empty < 2) { //si la suma de los valores de la transformada es <2 quiere decir que no hay sonido, o es despreciable\r\n return 0;\r\n }\r\n return -1;\r\n \r\n }\r\n\r\n\r\n void releaseResources() override\r\n {\r\n transportSource.releaseResources();\r\n }\r\n\r\n void resized() override //defino los bordes de los botones, puramente visual\r\n {\r\n openButton.setBounds(20, 20, 640, 40);\r\n audioSelector.setBounds(680, 20, 80, 40);\r\n playButton.setBounds(20, 80, 760, 40);\r\n stopButton.setBounds(20, 140, 760, 40);\r\n midiButton.setBounds(20, 200, 760, 40);\r\n }\r\n\r\n void changeListenerCallback(ChangeBroadcaster* source) override //introduzco instrucciones mientras este sonando algo, cuando pare puede escribirse a archivo\r\n {\r\n if (source == &transportSource)\r\n {\r\n if (transportSource.isPlaying())\r\n changeState(Playing);\r\n else {\r\n changeState(Stopped);\r\n midiButton.setEnabled(true);\r\n }\r\n }\r\n }\r\n\r\nprivate:\r\n enum\r\n {\r\n fftOrder =11,\r\n fftSize = 1 << fftOrder,\r\n scopeSize = 1024\r\n };\r\n enum TransportState\r\n {\r\n Stopped,\r\n Starting,\r\n Playing,\r\n Stopping\r\n };\r\n\r\n void changeState(TransportState newState) //en que estado estoy de la reproduccion, para no poder reproducir si no tengo archivo disponible, por ejemplo\r\n {\r\n if (state != newState)\r\n {\r\n state = newState;\r\n\r\n switch (state)\r\n {\r\n case Stopped:\r\n stopButton.setEnabled(false);\r\n playButton.setEnabled(true);\r\n transportSource.setPosition(0.0);\r\n break;\r\n\r\n case Starting:\r\n playButton.setEnabled(false);\r\n transportSource.start();\r\n break;\r\n\r\n case Playing:\r\n stopButton.setEnabled(true);\r\n break;\r\n\r\n case Stopping:\r\n transportSource.stop();\r\n break;\r\n }\r\n }\r\n }\r\n\r\n void openButtonClicked()\r\n {\r\n //reseteo todo lo que hubiese hecho antes porque tengo un archivo nuevo\r\n sequence.clear();\r\n zeromem(fftData, sizeof(fftData));\r\n zeromem(fifo, sizeof(fifo));\r\n fifoIndex = 0;\r\n startTime = 0;\r\n FileChooser chooser(\"Select an audio file to play...\",\r\n {},\r\n \"*.\"+currentAudio);\r\n\r\n if (chooser.browseForFileToOpen())\r\n {\r\n auto file = chooser.getResult();\r\n auto* reader = formatManager.createReaderFor(file);\r\n\r\n if (reader != nullptr)\r\n {\r\n std::unique_ptr<AudioFormatReaderSource> newSource(new AudioFormatReaderSource(reader, true));\r\n transportSource.setSource(newSource.get(), 0, nullptr, reader->sampleRate);\r\n playButton.setEnabled(true);\r\n readerSource.reset(newSource.release());\r\n }\r\n }\r\n }\r\n\r\n void playButtonClicked()\r\n {\r\n changeState(Starting);\r\n }\r\n void midiButtonClicked() //guardar el archivo midi\r\n {\r\n \r\n File fi;\r\n FileChooser chooser(\"Select a name...\",\r\n {},\r\n \"*.mid\");\r\n\r\n if (chooser.browseForFileToSave(true))\r\n {\r\n fi = chooser.getResult();\r\n auto* reader = formatManager.createReaderFor(fi);\r\n }\r\n\r\n MidiFile mfile;\r\n FileOutputStream stream = fi;\r\n mfile.setTicksPerQuarterNote(100);\r\n mfile.addTrack(sequence); //aņado los mensaje de la lista\r\n mfile.writeTo(stream);\r\n\r\n }\r\n void stopButtonClicked()\r\n {\r\n changeState(Stopping);\r\n }\r\n\r\n //==========================================================================\r\n TextButton openButton;\r\n TextButton playButton;\r\n TextButton stopButton;\r\n TextButton midiButton;\r\n Random random;\r\n AudioFormatManager formatManager;\r\n std::unique_ptr<AudioFormatReaderSource> readerSource;\r\n AudioTransportSource transportSource;\r\n TransportState state;\r\n MidiMessageSequence sequence;\r\n dsp::FFT forwardFFT;\r\n float fifo[fftSize];\r\n float fftData[2 * fftSize];\r\n int fifoIndex = 0;\r\n bool nextFFTBlockReady = false;\r\n int currentNote;\r\n double startTime;\r\n float high;\r\n HashMap<int, double> mapNotes;\r\n int fftIndex;\r\n String currentAudio = \"mp3\"; //por defecto supongo que el archivo es mp3\r\n ComboBox audioSelector;\r\n JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(MainContentComponent)\r\n};\r\n"
},
{
"alpha_fraction": 0.5137900114059448,
"alphanum_fraction": 0.5309163928031921,
"avg_line_length": 29.59183692932129,
"blob_id": "e55b2c9419ade22ab9fe02672e307b638231c0b3",
"content_id": "2d0271bf79aa7d4dc3d672a75fcdbcf1ee4f3ad5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4497,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 147,
"path": "/Jaime/Entrega1/PianoEntrega.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\n\"\"\" Iniciar con 'c' (suena un do)\n 'e' = bajar 1 semitono\n 'w' = subir 1 semitono\n entre 'c' y '-' las teclas blancas\n entre 'f' y 'ñ' las teclas negras (en sus respectivas posiciones respecto a las teclas blancas)\n 't' = activar/desactivar mas de un tono a la vez\n 'r' = delay de un segundo de lo que este sonando\n\"\"\"\nSRATE, data = wavfile.read(\"piano.wav\")\nCHUNK = 1024\np = pyaudio.PyAudio()\nnumbloque = 0\nbloque = np.arange(CHUNK, dtype = data.dtype)\nkb = kbhit.KBHit()\nc = ' '\ncAux = ' '\nmulti=False\ndataPlay = np.zeros(len(data),dtype=int)\n\nif data.dtype.name =='int16':fmt = 2\nelif data.dtype.name =='int32': fmt = 4\nelif data.dtype.name =='float32': fmt = 4\nelif data.dtype.name =='uint8': fmt = 1\nelse: raise Exception('Not supported')\n\nstream = p.open(format = p.get_format_from_width(fmt),\n channels = len(data.shape),\n rate = SRATE,\n frames_per_buffer = CHUNK,\n output = True)\n # stream_callback=callback)\ndef delay(data,nb,dur,CHUNK):\n dataTemp=[]\n dataTemp=np.append(dataTemp,data[:nb*CHUNK+CHUNK])\n dataTemp=np.append(dataTemp,np.zeros(dur*20*CHUNK,dtype=int))\n dataTemp=np.append(dataTemp,data[nb*CHUNK+CHUNK:])\n return dataTemp\n\ndef changeTone(tone,dattt):\n cont = 0\n dataPlay=np.zeros(len(dattt),dtype=int)\n for x in range(len(dattt)):\n if x % tone == 0:\n if(x!= 0):\n cont = cont + 1\n if (x+cont < len(dattt)):\n dataPlay[x] = dattt[x+cont]\n return dataPlay\n\ndef numTones(num,dataPlay):\n dat = dataPlay\n for i in range(num):\n dat = changeTone(17,dat)\n return dat\ndef changeToneDown(tone,dattt):\n cont = 0\n dataPlay=np.zeros(len(dattt)+(len(dattt)//tone),dtype=int)\n for x in range(len(dataPlay)):\n if x % tone == 0:\n if(x!= 0):\n dataPlay[x] = (dattt[x-cont] + dattt[x-cont-1])/2\n cont = cont + 1\n elif (x+cont < len(dattt)):\n dataPlay[x] = dattt[x-cont]\n return dataPlay\ndef merge(dataAux,da,currBloc):\n dataAux = dataAux[currBloc*CHUNK:]\n if (len(da) > len(dataAux)):\n da2 = da\n for x in range(len(dataAux)):\n da2[x] = (da[x] + dataAux[x]) / 2\n else:\n da2 = dataAux\n for x in range(len(da)):\n da2[x] = (da[x] + dataAux[x]) / 2\n return da2\n\nwhile c != 'q':\n bloque = dataPlay[numbloque*CHUNK:numbloque*CHUNK+CHUNK]\n stream.write(bloque.astype((data.dtype)).tobytes())\n bloqueAux = numbloque\n dataAux = dataPlay\n if kb.kbhit():\n c = kb.getch() \n if c == 'c':\n numbloque = 0\n dataPlay = data\n dataPlay = changeTone(17,dataPlay)\n dataPlay = changeToneDown(17,dataPlay)\n elif c == 'w':\n numbloque = 0\n dataPlay = changeTone(17,dataPlay)\n elif c == 'r':\n dataPlay = delay(dataPlay,numbloque,1,CHUNK)\n elif c == 'e':\n numbloque = 0\n dataPlay = changeToneDown(17,dataPlay)\n elif c == 'f':\n numbloque = 0\n dataPlay = numTones(1,data)\n elif c == 'v':\n numbloque = 0\n dataPlay = numTones(2,data)\n elif c == 'g':\n numbloque = 0\n dataPlay = numTones(3,data)\n elif c == 'b':\n numbloque = 0\n dataPlay = numTones(4,data)\n elif c == 'n':\n numbloque = 0\n dataPlay = numTones(5,data)\n elif c == 'j':\n numbloque = 0\n dataPlay = numTones(6,data)\n elif c == 'm':\n numbloque = 0\n dataPlay = numTones(7,data)\n elif c == 'k':\n numbloque = 0\n dataPlay = numTones(8,data)\n elif c == ',':\n numbloque = 0\n dataPlay = numTones(9,data)\n elif c == 'l':\n numbloque = 0\n dataPlay = numTones(10,data)\n elif c == '.':\n numbloque = 0\n dataPlay = numTones(11,data)\n elif c == 't':\n if multi == True:\n multi = False\n else:\n multi = True \n if(len(bloque) != 0 and cAux != c and c != ' ' and multi == True):\n dataPlay = merge(dataAux,dataPlay,bloqueAux)\n cAux = c\n numbloque += 1\n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\np.terminate()"
},
{
"alpha_fraction": 0.5192691683769226,
"alphanum_fraction": 0.545246958732605,
"avg_line_length": 30,
"blob_id": "3d0c97f469c3841500642fac70c6c592dbdbc768",
"content_id": "388defaf1365139631299c61dcfb48bdb5a2a55f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3503,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 113,
"path": "/Jaime/Entrega3/Karplus.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\n\n\np = pyaudio.PyAudio()\nRATE = 44100\nc = ''\nCHUNK = 1024\nkb = kbhit.KBHit()\nnumbloque = 0\nfs = 8000\n\n\nstream2 = p.open(format = pyaudio.paFloat32,\n channels = 2,\n rate = fs,\n frames_per_buffer = CHUNK,\n output = True)\n\ndef karplus_strong(wavetable, n_samples):\n \"\"\"Synthesizes a new waveform from an existing wavetable, modifies last sample by averaging.\"\"\"\n samples = []\n current_sample = 0\n previous_value = 0\n while len(samples) < n_samples:\n wavetable[current_sample] = 0.5 * (wavetable[current_sample] + previous_value)\n samples.append(wavetable[current_sample])\n previous_value = samples[-1]\n current_sample += 1\n current_sample = current_sample % wavetable.size\n return np.array(samples)\n\ndataPlay = np.zeros(0,dtype=int)\nfreqs = np.logspace(0, 1, num=12, base=2) * 55\nmulti = False\n\ndef cal_wavetable(size):\n wavetable_size = fs // int(size) \n np.random.seed(1)\n return (2 * np.random.randint(0, 2, wavetable_size) - 1).astype(np.float32)\n\ndef merge(dataAux,da,currBloc):\n dataAux = dataAux[currBloc*CHUNK:]\n if (len(da) > len(dataAux)):\n da2 = da\n for x in range(len(dataAux)):\n da2[x] = (da[x] + dataAux[x]) / 2\n else:\n da2 = dataAux\n for x in range(len(da)):\n da2[x] = (da[x] + dataAux[x]) / 2\n return da2\n\n\nwhile c != 'q':\n bloque = dataPlay[numbloque*CHUNK:numbloque*CHUNK+CHUNK]\n stream2.write(bloque.astype((np.float32)).tobytes())\n bloqueAux = numbloque\n dataAux = dataPlay\n if kb.kbhit():\n c = kb.getch() \n if c == 'z':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[0]), 2*fs)\n elif c == 'x':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[2]), 2*fs)\n elif c == 'c':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[4]), 2*fs)\n elif c == 'v':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[5]), 2*fs)\n elif c == 'b':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[7]), 2*fs)\n elif c == 'n':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[9]), 2*fs)\n elif c == 'm':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[11]), 2*fs)\n elif c == 's':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[1]), 2*fs)\n elif c == 'd':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[3]), 2*fs)\n elif c == 'g':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[6]), 2*fs)\n elif c == 'h':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[8]), 2*fs)\n elif c == 'j':\n numbloque = 0\n dataPlay = karplus_strong(cal_wavetable(freqs[10]), 2*fs)\n\n elif c == 't':\n if multi == True:\n multi = False\n else:\n multi = True \n if(len(bloque) != 0 and cAux != c and c != ' ' and multi == True):\n dataPlay = merge(dataAux,dataPlay,bloqueAux)\n cAux = c\n numbloque += 1\n\nkb.set_normal_term()\nstream2.stop_stream()\nstream2.close()\np.terminate()\n"
},
{
"alpha_fraction": 0.5175107717514038,
"alphanum_fraction": 0.545527994632721,
"avg_line_length": 30.19327735900879,
"blob_id": "87dfde0996546606dace1c0b5f399e24e3ae1a5a",
"content_id": "65addb74d25a835bd5060ab8da653023693c43d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3712,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 119,
"path": "/Jaime/Entrega3/KarplusPoly.py",
"repo_name": "nachopaank/MUS",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pyaudio, kbhit\nfrom scipy.io import wavfile\n\n\np = pyaudio.PyAudio()\nRATE = 44100\nc = ''\nCHUNK = 1024\nkb = kbhit.KBHit()\nnumbloque = 0\nfs = 8000\n\nstream = p.open(format = pyaudio.paFloat32,\n channels = 2,\n rate = fs,\n frames_per_buffer = CHUNK,\n output = True)\n\nstream2 = p.open(format = pyaudio.paFloat32,\n channels = 2,\n rate = fs,\n frames_per_buffer = CHUNK,\n output = True)\n\nstream3 = p.open(format = pyaudio.paFloat32,\n channels = 2,\n rate = fs,\n frames_per_buffer = CHUNK,\n output = True)\n\ndef karplus_strong(wavetable, n_samples):\n \"\"\"Synthesizes a new waveform from an existing wavetable, modifies last sample by averaging.\"\"\"\n samples = []\n current_sample = 0\n previous_value = 0\n while len(samples) < n_samples:\n wavetable[current_sample] = 0.5 * (wavetable[current_sample] + previous_value)\n samples.append(wavetable[current_sample])\n previous_value = samples[-1]\n current_sample += 1\n current_sample = current_sample % wavetable.size\n return np.array(samples)\n\n\nfreqs = np.logspace(0, 1, num=12, base=2) * 55\n\ndef cal_wavetable(size):\n wavetable_size = fs // int(size)\n np.random.seed(1)\n return (2 * np.random.randint(0, 2, wavetable_size) - 1).astype(np.float32)\n\narrayBool = [False,True,True]\narrayNum = [0,0,0]\narrayData = [np.zeros(0,dtype=int),np.zeros(0,dtype=int),np.zeros(0,dtype=int)]\ncAux='q'\n\nwhile c != 'q':\n i=0\n stream.write(arrayData[1][arrayNum[1]*CHUNK:arrayNum[1]*CHUNK+CHUNK].astype((np.float32)).tobytes())\n stream2.write(arrayData[0][arrayNum[0]*CHUNK:arrayNum[0]*CHUNK+CHUNK].astype((np.float32)).tobytes())\n stream3.write(arrayData[2][arrayNum[2]*CHUNK:arrayNum[2]*CHUNK+CHUNK].astype((np.float32)).tobytes())\n\n if kb.kbhit():\n c = kb.getch() \n if c == 'z':\n dataAux = karplus_strong(cal_wavetable(freqs[0]), 2*fs)\n elif c == 'x':\n dataAux = karplus_strong(cal_wavetable(freqs[2]), 2*fs)\n elif c == 'c':\n dataAux = karplus_strong(cal_wavetable(freqs[4]), 2*fs)\n elif c == 'v':\n dataAux = karplus_strong(cal_wavetable(freqs[5]), 2*fs)\n elif c == 'b':\n dataAux = karplus_strong(cal_wavetable(freqs[7]), 2*fs)\n elif c == 'n':\n dataAux = karplus_strong(cal_wavetable(freqs[9]), 2*fs)\n elif c == 'm':\n dataAux = karplus_strong(cal_wavetable(freqs[11]), 2*fs)\n elif c == 's':\n dataAux = karplus_strong(cal_wavetable(freqs[1]), 2*fs)\n elif c == 'd':\n dataAux = karplus_strong(cal_wavetable(freqs[3]), 2*fs)\n elif c == 'g':\n dataAux = karplus_strong(cal_wavetable(freqs[6]), 2*fs)\n elif c == 'h':\n dataAux = karplus_strong(cal_wavetable(freqs[8]), 2*fs)\n elif c == 'j':\n dataAux = karplus_strong(cal_wavetable(freqs[10]), 2*fs)\n\n \n for x in arrayBool:\n if not(x):\n if c!=cAux:\n arrayBool[i] = True\n arrayNum[i] = 0\n arrayData[i] = dataAux\n #print(\"Asignado a \",i)\n arrayBool[(i+1)%len(arrayBool)]=False\n break\n else:\n arrayNum[(i-1)%len(arrayNum)] = 0\n i+=1\n \n\n cAux=c\n \n for i in range(len(arrayNum)):\n arrayNum[i] += 1\n i+=1\n \n\n\nkb.set_normal_term()\nstream.stop_stream()\nstream.close()\nstream2.stop_stream()\nstream2.close()\np.terminate()\n"
}
] | 13 |
lioenel/mybookshelf2 | https://github.com/lioenel/mybookshelf2 | bea277599711405aeb58bec0b193beb3c6cfa213 | f494f6d6df75eaf3a986ba4cff3bb4e36fce6934 | c5f68a570943436a47aa565fb4d3b4039f5bc871 | refs/heads/master | 2021-01-22T07:03:02.993493 | 2016-07-16T08:08:55 | 2016-07-16T08:08:55 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6776729822158813,
"alphanum_fraction": 0.6776729822158813,
"avg_line_length": 24.440000534057617,
"blob_id": "4920094534a292bf89704f0ffbcbae63e70af40d",
"content_id": "da878901e11e40722f7408dc25ebac4908039c10",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 636,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 25,
"path": "/client/src/pages/ebook.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {inject} from 'aurelia-framework';\nimport {ApiClient} from 'lib/api-client';\nimport {LogManager} from 'aurelia-framework';\nimport {Access} from 'lib/access';\n\nlet logger = LogManager.getLogger('ebooks');\n\n@inject(ApiClient, Access)\nexport class Ebook {\n ebook\n constructor(client, access, ) {\n this.client=client;\n this.access=access;\n this.token=access.token;\n this.canDownload=access.hasRole('user');\n this.canConvert=access.hasRole('user');\n }\n\n activate(params) {\n this.client.getOne('ebooks', params.id)\n .then(b => this.ebook=b)\n .catch(err => logger.error(`Failed to load ${err}`));\n }\n\n}\n"
},
{
"alpha_fraction": 0.6514983177185059,
"alphanum_fraction": 0.6522382497787476,
"avg_line_length": 28.07526969909668,
"blob_id": "9a141ecaf04966ecb8f31bf88ac13bfbb2869c0b",
"content_id": "ce5d033d70f67327891432e59b720a7b2dbe35e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2703,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 93,
"path": "/manage.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nfrom flask_script import Manager, prompt_pass, prompt_bool\nfrom flask_script.commands import InvalidCommand\nfrom app import app,db\nimport app.model as model\nfrom app.utils import hash_pwd\nimport app.schema as schema\nimport sys\nfrom sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound\nfrom sqlalchemy import or_\nimport os.path\n\n\nDATA_DIR='./app/data'\n\nmanager = Manager(app)\n\n\ndef read_pwd():\n password=prompt_pass('Password')\n again=prompt_pass('Repeat password')\n if password!=again:\n raise InvalidCommand('Repeated password differs!')\n return password\n\[email protected]\ndef create_user(user, email, password=None, role='user'):\n if not password:\n password=read_pwd()\n \n data=dict(user_name=user, email=email, password=hash_pwd(password), active=True) \n errors=schema.UserSchema(exclude=('version_id',)).validate(data, db.session)\n if errors:\n raise InvalidCommand('Validation Error: %s'%errors)\n \n role=model.Role.query.filter_by(name=role).one()\n u=model.User(**data)\n u.roles.append(role)\n db.session.add(u) # @UndefinedVariable\n db.session.commit() # @UndefinedVariable\n\n \[email protected]\ndef change_password(user, password=None):\n try: \n u=model.User.query.filter(or_(model.User.user_name == user, model.User.email == user)).one() # @UndefinedVariable\n except NoResultFound:\n raise InvalidCommand('No such User')\n if not password:\n password=read_pwd()\n \n u.password=hash_pwd(password)\n db.session.commit() # @UndefinedVariable\n\[email protected]\ndef create_tables():\n db.create_all()\n connection = db.engine.raw_connection() # @UndefinedVariable\n try:\n c=connection.cursor()\n for fname in ('create_ts.sql', 'create_functions.sql'):\n script=open(os.path.join(os.path.dirname(__file__), DATA_DIR, fname), 'rt', encoding='utf-8-sig').read()\n #print(script)\n res=c.execute(script)\n connection.commit()\n finally:\n connection.close()\n \[email protected]\ndef update_fulltext():\n \n connection = db.engine.raw_connection() # @UndefinedVariable\n \n try:\n c=connection.cursor()\n res=c.execute(\"update ebook set full_text=to_tsvector('custom', ebook_full_title(id))\")\n connection.commit()\n finally:\n connection.close()\n \n # \[email protected] \ndef drop_tables():\n if prompt_bool('Are you REALLY sure? You will loose all data!'):\n db.drop_all() \n\nif __name__ == \"__main__\":\n try:\n manager.run()\n except InvalidCommand as err:\n print(err, file=sys.stderr)\n sys.exit(1)"
},
{
"alpha_fraction": 0.6497710943222046,
"alphanum_fraction": 0.6559842824935913,
"avg_line_length": 28.83414649963379,
"blob_id": "9527b386d3f7d061dce91ee71ae395ec8efdc2a7",
"content_id": "a8eab3204986c08d2783042e5da2fde5974c6eb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6116,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 205,
"path": "/app/api.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from flask import Blueprint, request, abort, current_app, jsonify\nfrom flask_restful import Resource as BaseResource, Api\nimport app.model as model\nimport app.schema as schema\nimport app.logic as logic\nfrom app.utils import success_error, mimetype_from_file_name\nfrom app import db\nfrom app.cors import add_cors_headers\nfrom app.access import role_required\nfrom werkzeug import secure_filename\nimport os.path\nimport logging\nimport tempfile\n\nlogger = logging.getLogger('api')\n\n\nbp = Blueprint('api', __name__)\napi = Api(bp)\n\n\[email protected]_request\ndef after_request(response):\n return add_cors_headers(response)\n\n\nclass Resource(BaseResource):\n decorators = [role_required('user')]\n pass\n\n\nclass Ebooks(Resource):\n\n @logic.paginated(sortings=model.sortings['ebook'])\n def get(self, page=1, page_size=20, sort=None, **kwargs):\n q = model.Ebook.query\n return logic.paginate(q, page, page_size, sort, schema.ebooks_list_serializer())\n\n def post(self):\n pass\n\n\nclass Authors(Resource):\n\n @logic.paginated(sortings=model.sortings['author'])\n def get(self, page=1, page_size=20, sort=None, **kwargs):\n q = model.Author.query\n return logic.paginate(q, page, page_size, sort, schema.authors_list_serializer())\n\n\nclass Series(Resource):\n\n @logic.paginated(sortings=model.sortings['series'])\n def get(self, page=1, page_size=20, sort=None, **kwargs):\n q = model.Series.query\n return logic.paginate(q, page, page_size, sort, schema.series_list_serializer())\n\n\nclass Ebook(Resource):\n\n def get(self, id):\n b = model.Ebook.query.get_or_404(id)\n return schema.ebook_serializer().dump(b).data # @UndefinedVariable\n\n @role_required('superuser')\n def delete(self, id):\n b = model.Ebook.query.get_or_404(id)\n r = db.session.delete(b) # @UndefinedVariable\n db.session.commit()\n\n def put(self, id):\n pass\n\n\nclass Author(Resource):\n\n def get(self, id):\n a = model.Author.query.get_or_404(id)\n return schema.author_serializer().dump(a).data\n\n\nclass Search(Resource):\n\n @logic.paginated()\n def get(self, search, page=1, page_size=20, **kwargs):\n q = logic.search_query(model.Ebook.query, search)\n return logic.paginate(q, page, page_size, None, schema.ebooks_list_serializer())\n\n\nclass AuthorEbooks(Resource):\n\n @logic.paginated(sortings=model.sortings['ebook'])\n def get(self, id, page=1, page_size=20, sort=None):\n q = model.Ebook.query.join(\n model.Author, model.Ebook.authors).filter(model.Author.id == id)\n if request.args.get('filter'):\n q = logic.filter_ebooks(q, request.args.get('filter'))\n return logic.paginate(q, page, page_size, sort, schema.ebooks_list_serializer())\n\n\nclass UploadMeta(Resource):\n\n def get(self, id):\n upload = model.Upload.query.get_or_404(id)\n data = schema.upload_serializer().dump(upload).data\n# search = upload.meta.get('title', '') + ' ' + ' '.join(upload.meta.get('authors', []))\n# search = search.strip()\n# if search:\n# q = logic.search_query(model.Ebook.query, search)\n# if q:\n# data['proposed_ebook'] = schema.ebook_serializer().dump(q[0]).data\n\n return data\n\n\[email protected]('/upload', methods=['POST'])\n@role_required('user')\ndef upload():\n file = request.files['file']\n if file:\n filename = secure_filename(file.filename)\n temp_dir = tempfile.mkdtemp(dir=current_app.config['UPLOAD_DIR'])\n tdir = os.path.split(temp_dir)[1]\n full_name = os.path.join(temp_dir, filename)\n file.save(full_name)\n result = logic.check_uploaded_file(file.mimetype, full_name)\n if result:\n os.remove(full_name)\n return jsonify(**result)\n return jsonify(result='ok', file=os.path.join(tdir, filename))\n return jsonify(error='no file')\n\n\[email protected]('/upload/check', methods=['POST'])\n@role_required('user')\ndef check_upload():\n file_info = request.json\n logger.debug('File info %s' % file_info)\n err = schema.FileInfoSchema().validate(file_info)\n print(err, file_info)\n if err:\n logger.warn('Invalid file info: %s', err)\n return jsonify(error='Invalid schema')\n r = logic.check_file(**file_info)\n if r:\n return jsonify(**r)\n return jsonify(result='ok')\n\n\[email protected]('/download/<int:id>')\n@role_required('user')\ndef download(id):\n return logic.download(id)\n\n\[email protected]('/cover-meta/<int:id>/<string:size>')\n@role_required('user')\ndef cover_meta(id, size='normal'):\n upload = model.Upload.query.get_or_404(id)\n if not upload.cover:\n abort(404, 'No cover')\n\n fname = os.path.join(current_app.config['UPLOAD_DIR'], upload.cover)\n mimetype = mimetype_from_file_name(fname)\n if not mimetype:\n abort(500, 'Invalid cover file')\n\n return logic.stream_response(fname, mimetype)\n\n\[email protected]('/series/index/<string:start>')\n@role_required('user')\ndef series_index(start):\n total, items = logic.series_index(start)\n serializer = schema.series_list_serializer()\n return jsonify(total=total,\n items=serializer.dump(items).data)\n\n\[email protected]('/authors/index/<string:start>')\n@role_required('user')\ndef authors_index(start):\n total, items = logic.authors_index(start)\n serializer = schema.authors_list_serializer()\n return jsonify(total=total,\n items=serializer.dump(items).data)\n\n\[email protected]('/ebooks/index/<string:start>')\n@role_required('user')\ndef ebookss_index(start):\n total, items = logic.ebooks_index(start)\n serializer = schema.ebooks_list_serializer()\n return jsonify(total=total,\n items=serializer.dump(items).data)\n\n\napi.add_resource(Ebooks, '/ebooks')\napi.add_resource(Ebook, '/ebooks/<int:id>')\napi.add_resource(AuthorEbooks, '/ebooks/author/<int:id>')\napi.add_resource(Authors, '/authors')\napi.add_resource(Author, '/authors/<int:id>')\napi.add_resource(Series, '/series')\napi.add_resource(Search, '/search/<string:search>')\napi.add_resource(UploadMeta, '/uploads-meta/<int:id>')\n"
},
{
"alpha_fraction": 0.554430365562439,
"alphanum_fraction": 0.5586497783660889,
"avg_line_length": 36.03125,
"blob_id": "ca3079a38d01072efa167012da43d445b464d8b8",
"content_id": "a002b332baafcb44189575fc32ad4d88697cd043",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3555,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 96,
"path": "/engine/tasks.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import os.path\nimport re\nimport asyncio\nfrom asexor.task import BaseTask, TaskError\nfrom settings import UPLOAD_DIR, IMAGE_MAGIC\nfrom app.utils import file_hash\nimport logging\nimport engine.dal as dal\n\nlogger = logging.getLogger('tasks')\n\n\nclass MetadataTask(BaseTask):\n NAME = 'metadata'\n COMMAND = 'ebook-meta'\n MAX_TIME = 60\n\n async def validate_args(self, *args, **kwargs):\n f = args[0]\n fname = os.path.join(UPLOAD_DIR, f)\n base_name = os.path.splitext(f)[0]\n self.cover_name = base_name + '_tmp.jpg'\n if not os.access(fname, os.R_OK):\n raise TaskError('File %s does not exists or is not readable')\n self.fname = f\n self.fname_full = fname\n return ('--get-cover=%s' % os.path.join(UPLOAD_DIR, self.cover_name), fname)\n\n AUTHORS_RE = re.compile(\n r'^Author\\(s\\)\\s*:\\s*(.+)', re.UNICODE | re.MULTILINE)\n TITLE_RE = re.compile(r'^Title\\s*:\\s*(.+)', re.UNICODE | re.MULTILINE)\n TAGS_RE = re.compile(r'^Tags\\s*:\\s*(.+)', re.UNICODE | re.MULTILINE)\n SERIES_RE = re.compile(r'^Series\\s*:\\s*(.+)', re.UNICODE | re.MULTILINE)\n LANGUAGES_RE = re.compile(\n r'^Languages\\s*:\\s*(.+)', re.UNICODE | re.MULTILINE)\n\n async def _parse_data(self, data):\n def strip(l):\n return list(map(lambda x: x.strip(), filter(None, l)))\n meta = {}\n title = self.TITLE_RE.search(data)\n if title:\n meta['title'] = title.group(1).strip()\n authors = self.AUTHORS_RE.search(data)\n if authors:\n authors = re.sub(r'\\[[^\\]]+\\]', '', authors.group(1))\n meta['authors'] = strip(authors.split('&'))\n\n tags = self.TAGS_RE.search(data)\n if tags:\n meta['tags'] = strip(tags.group(1).split(','))\n\n languages = self.LANGUAGES_RE.search(data)\n if languages:\n meta['language'] = languages.group(1).split(',')[0].strip()\n\n series = self.SERIES_RE.search(data)\n if series:\n series = re.match(r'(.*) #(\\d+)', series.group(1))\n if series:\n meta['series'] = series.group(1)\n meta['series_index'] = int(series.group(2))\n\n return meta\n\n async def parse_result(self, data):\n\n data = data.decode(self.output_encoding)\n meta = await self._parse_data(data)\n \n cover_in = os.path.join(UPLOAD_DIR, self.cover_name)\n loop = asyncio.get_event_loop()\n hash = await loop.run_in_executor(None, file_hash, self.fname_full)\n size = await loop.run_in_executor(None, lambda f: os.stat(f).st_size, self.fname_full)\n cover = None\n if os.path.exists(cover_in):\n cover_file = self.cover_name[:-8] + '_cover.jpg'\n cover_file_full = os.path.join(UPLOAD_DIR, cover_file)\n\n proc = await asyncio.create_subprocess_exec(IMAGE_MAGIC, cover_in, '-fuzz', '7%',\n '-trim', cover_file_full)\n return_code = await proc.wait()\n\n if return_code == 0 and os.path.exists(cover_file_full):\n os.remove(cover_in)\n else:\n logger.warn(\n 'Image Magic failed triming file %s with code %d', cover_in, return_code)\n os.rename(cover_in, cover_file_full)\n cover = cover_file\n else:\n logger.warn('Cannot get cover image')\n \n upload_id = await dal.add_upload(self.fname, cover, meta, size, hash, self.user)\n \n return upload_id\n"
},
{
"alpha_fraction": 0.6245933771133423,
"alphanum_fraction": 0.6499674916267395,
"avg_line_length": 34.74418640136719,
"blob_id": "b7e5b7e666f2e6fb9596f56a1340f6ceb46933c9",
"content_id": "6cea6074b903435d3021e3852836c8bf8bc1233c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1537,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 43,
"path": "/app/tests/test_logic.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from app import db, app\nimport app.model as model\nimport app.logic as logic\nfrom .basecase import TestCase\nimport os.path\n\n\nebook_file = os.path.join(os.path.dirname(\n __file__), '../data/books/Kissinger, Henry/Roky v Bilem dome/Kissinger, Henry - Roky v Bilem dome.epub')\n\n\nclass TestLogic(TestCase):\n\n def test_logic(self):\n\n b1 = model.Ebook.query.get(33837)\n self.assertEqual(b1.authors_str, 'Crichton Michael')\n\n b2 = model.Ebook.query.get(37157)\n self.assertEqual(b2.authors_str, 'Strugackij A N, Strugackij B N')\n\n b3 = model.Ebook.query.get(62546)\n self.assertEqual(b3.authors_str, 'Wilkins G, Dalton M, Young K')\n b3.authors.append(b1.authors[0])\n\n self.assertEqual(\n b3.authors_str, 'Wilkins G, Dalton M, Young K and others')\n\n b1.authors = []\n self.assertEqual(b1.authors_str, 'No Authors')\n\n source = model.Source.query.get(46519)\n name = logic.norm_file_name(source)\n self.assertEqual(\n name, 'Strugackij A N, Strugackij B N/Noc na Marse/Strugackij A N, Strugackij B N - Noc na Marse.doc')\n\n source = model.Source.query.get(63546)\n name = logic.norm_file_name(source)\n self.assertEqual(\n name, 'Monroe Lucy/Nevesty od Stredozemniho more/Nevesty od Stredozemniho more 2 - Spanelova milenka/Monroe Lucy - Nevesty od Stredozemniho more 2 - Spanelova milenka.doc')\n\n res = logic.check_uploaded_file('application/epub+zip', ebook_file)\n self.assertEqual(res['error'], 'file already exists')\n"
},
{
"alpha_fraction": 0.6458852887153625,
"alphanum_fraction": 0.6483790278434753,
"avg_line_length": 35.45454406738281,
"blob_id": "d4a0633a5b68cc657a465d8ab610012b248d97e6",
"content_id": "e1ea78d741f38f2df9dce7fecf7e6ad0dc54dffb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 401,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 11,
"path": "/start_terminal",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nVENV=\"venv/bin/activate\"\n\nexport PYTHONPATH=~/workspace/asexor:~/workspace/mybookshelf2\n\ngnome-terminal \\\n--tab --title \"crossbar\" -e \"bash --rcfile $VENV -ci 'cd engine && crossbar start'\" \\\n--tab --title \"engine\" -e \"bash --rcfile $VENV\" \\\n--tab --title \"web\" -e \"bash --rcfile $VENV\" \\\n--tab --title \"client\" -e \"bash --rcfile $VENV\" \\\n--tab --title \"terminal\" -e \"bash --rcfile $VENV\"\n"
},
{
"alpha_fraction": 0.685164213180542,
"alphanum_fraction": 0.685164213180542,
"avg_line_length": 22.864864349365234,
"blob_id": "9227ce73d518fab8fee0c958ebf7a2805bd02fbc",
"content_id": "713c3797a85db87e894ebc76bcec07be72941617",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 883,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 37,
"path": "/client/src/pages/author.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {inject, bindable, computedFrom, LogManager} from 'aurelia-framework';\nimport {ApiClient} from 'lib/api-client';\nconst logger = LogManager.getLogger('search');\n\n@inject(ApiClient)\nexport class Author {\n _loader;\n @bindable filter;\n author;\n\n constructor(client) {\n this.client=client;\n }\n\n activate(params) {\n logger.debug('Author activated with '+JSON.stringify(params));\n this.id=decodeURIComponent(params.id);\n this.client.getOne('authors', params.id). then(data => {this.author=data; logger.debug('Loaded author'+JSON.stringify(data))})\n\n this.updateLoader()\n }\n\n filterChanged() {\n logger.debug('Filter changed to '+ this.filter);\n this.updateLoader()\n }\n\n updateLoader() {\n this._loader = this.client.authorBooks.bind(this.client, this.id, this.filter);\n }\n\n @computedFrom('_loader')\n get loader() {\n return this._loader;\n }\n\n}\n"
},
{
"alpha_fraction": 0.6311625242233276,
"alphanum_fraction": 0.632988452911377,
"avg_line_length": 26.383333206176758,
"blob_id": "ddb2ad9f8b62667fa5d09b30187434260dd80f64",
"content_id": "49844922848c280a5c5c7806e9d56ecb4c9d9591",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1643,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 60,
"path": "/client/src/pages/upload-result.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {inject, LogManager, computedFrom} from 'aurelia-framework';\nimport {ApiClient} from 'lib/api-client';\n\nlet logger = LogManager.getLogger('upload-result');\n\n@inject(ApiClient)\nexport class UploadResult {\n meta;\n file;\n ebook;\n ebookCandidates=[];\n cover=new Image();\n constructor(client) {\n this.client=client;\n // has to revoke object URL to release blob\n this.cover.onload = function() {\n URL.revokeObjectURL(this.src);\n }\n\n }\n\n activate(model) {\n logger.debug(`Activated with ${JSON.stringify(model)}`);\n this.client.getOne('uploads-meta', model.id)\n .then(meta => {\n this.meta = meta.meta;\n this.file = meta.load_source;\n logger.debug(`Got meta ${JSON.stringify(meta)}`);\n this.client.getCoverMeta(model.id)\n .then(blob => this.cover.src = URL.createObjectURL(blob))\n\n return meta.meta\n })\n .catch(err => logger.error(`Upload meta error ${err}`))\n .then(meta => {\n var authors = meta.authors ? meta.authors.join(' ') : null;\n var search=meta.title ? meta.title : '';\n search = meta.series ? search + ' '+ meta.series : search\n search = authors ? authors + ' ' + search : search;\n logger.debug(`Searching for ebooks: ${search}`);\n return this.client.search(search, 1, 5);\n })\n .then(result => {\n logger.debug(`Found ${result.data}`);\n this.ebookCandidates=result.data;\n\n })\n\n }\n\n attached() {\n document.getElementById('cover-holder').appendChild(this.cover);\n }\n\n @computedFrom('ebookCandidates')\n get hasCandidates() {\n return this.ebookCandidates && this.ebookCandidates.length>0;\n }\n\n}\n"
},
{
"alpha_fraction": 0.5702680945396423,
"alphanum_fraction": 0.575412929058075,
"avg_line_length": 26.977272033691406,
"blob_id": "7f58808ff7d6219e299a6f141cf0c0774c34c726",
"content_id": "9b0c2cd9b46e36f8277d6127c2016f6bcb9134b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3693,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 132,
"path": "/app/utils.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from functools import wraps\nimport bcrypt\nimport jwt\nfrom datetime import datetime, timedelta\nimport unicodedata\nimport hashlib\nimport logging\nimport mimetypes\n\nlogger = logging.getLogger('utils')\n\n\ndef mimetype_from_file_name(fname):\n return mimetypes.guess_type(fname, False)[0]\n\n\ndef success_error(fn):\n @wraps(fn)\n def inner(*args, **kwargs):\n try:\n fn(*args, **kwargs)\n return {'success': True}\n except Exception as e:\n return {'error': str(e)}\n return inner\n\nREAD_BLOCK = 8192\n\n\ndef file_hash(fname):\n h = hashlib.sha1()\n with open(fname, 'rb') as f:\n s = f.read(READ_BLOCK)\n if not s:\n raise ValueError(\"Empty file!\")\n while s:\n h.update(s)\n s = f.read(READ_BLOCK)\n return h.hexdigest()\n\n\ndef hash_pwd(p):\n if isinstance(p, str):\n p = p.encode('utf-8')\n return bcrypt.hashpw(p, bcrypt.gensalt()).decode('ascii')\n\n\ndef check_pwd(p, hash):\n if isinstance(p, str):\n p = p.encode('utf-8')\n if isinstance(hash, str):\n hash = hash.encode('ascii')\n return hash == bcrypt.hashpw(p, hash)\n\n\ndef create_token(user, secret, valid_minutes=24 * 60):\n token = jwt.encode({'id': user.id,\n 'user_name': user.user_name,\n 'email': user.email,\n 'roles': list(user.all_roles),\n 'exp': datetime.utcnow() + timedelta(hours=valid_minutes)}, secret, algorithm='HS256')\n \n return token.decode('ascii')\n\n\ndef verify_token(token, secret):\n try:\n token = token.encode('ascii')\n except UnicodeEncodeError:\n logger.exception('Invalid token - char encoding')\n return\n try:\n claim = jwt.decode(token, secret)\n except jwt.InvalidTokenError:\n logger.exception('Invalid token')\n return None\n return claim\n\n\ndef extract_token(token):\n try:\n return jwt.decode(token, verify=False)\n except jwt.InvalidTokenError as e:\n raise ValueError('Invalid token %s' % e)\n\n\ndef initials(name):\n names = name.split()\n return ' '.join(map(lambda n: n[0].upper(), names))\n\nnd_charmap = {\n u'\\N{Latin capital letter AE}': 'AE',\n u'\\N{Latin small letter ae}': 'ae',\n u'\\N{Latin capital letter Eth}': 'D',\n u'\\N{Latin small letter eth}': 'd',\n u'\\N{Latin capital letter O with stroke}': 'O',\n u'\\N{Latin small letter o with stroke}': 'o', #\n u'\\N{Latin capital letter Thorn}': 'Th',\n u'\\N{Latin small letter thorn}': 'th',\n u'\\N{Latin small letter sharp s}': 's',\n u'\\N{Latin capital letter D with stroke}': 'D',\n u'\\N{Latin small letter d with stroke}': 'd',\n u'\\N{Latin capital letter H with stroke}': 'H',\n u'\\N{Latin small letter h with stroke}': 'h',\n u'\\N{Latin small letter dotless i}': 'i',\n u'\\N{Latin small letter kra}': 'k',\n u'\\N{Latin capital letter L with stroke}': 'L',\n u'\\N{Latin small letter l with stroke}': 'l',\n u'\\N{Latin capital letter Eng}': 'N',\n u'\\N{Latin small letter eng}': 'n',\n u'\\N{Latin capital ligature OE}': 'Oe',\n u'\\N{Latin small ligature oe}': 'oe',\n u'\\N{Latin capital letter T with stroke}': 'T',\n u'\\N{Latin small letter t with stroke}': 't',\n}\n\n\ndef remove_diacritics(text):\n \"Removes diacritics from the string\"\n if not text:\n return text\n s = unicodedata.normalize('NFKD', text)\n b = []\n for ch in s:\n if unicodedata.category(ch) != 'Mn':\n if ch in nd_charmap:\n b.append(nd_charmap[ch])\n elif ord(ch) < 128:\n b.append(ch)\n else:\n b.append(' ')\n return ''.join(b)\n"
},
{
"alpha_fraction": 0.5823336839675903,
"alphanum_fraction": 0.5839694738388062,
"avg_line_length": 22.512821197509766,
"blob_id": "0eb9413451c35ebd5c755bc75d883258b87f11ba",
"content_id": "5213f3a90f1a27e8511e36a2d1e03afdc510b211",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1834,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 78,
"path": "/client/src/lib/notification.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {LogManager, inject} from 'aurelia-framework';\nimport {Configure} from 'lib/config/index'\nimport {EventAggregator} from 'aurelia-event-aggregator';\n\nlet logger=LogManager.getLogger('notifications');\nconst MAX_SIZE=20;\n\n@inject(Configure, EventAggregator)\nexport class Notification {\n _dirty = false;\n _ns=[];\n _details=new Map()\n\n constructor(config, event) {\n this.event = event;\n if (config.get('debug')) {\n this._ns=['aaaaa'];\n this._details.set('aaaaa',{text:'Extra metadata from file xxxx',\n 'task': 'metadata',\n 'start': new Date(),\n 'status':'success',\n 'result': 3});\n }\n }\n start(taskId, taskInfo) {\n\n this._ns.unshift(taskId);\n taskInfo.start=new Date();\n this._details.set(taskId, taskInfo);\n if (this._ns.length > MAX_SIZE) {\n let k = this._ns.pop();\n this._details.delete(k);\n }\n this._dirty=true;\n }\n\n update(taskId, obj) {\n if (this._details.has(taskId)) {\n Object.assign(this._details.get(taskId), obj);\n this._dirty = true;\n logger.debug(`Task updated ${taskId}`);\n if (this._details.get(taskId).task === 'metadata') {\n if (obj.status === 'success') this.event.publish('metadata-ready', {taskId, result: obj.result});\n else if (obj.status === 'error') this.event.publish('metadata-error', {taskId, error: obj.error});\n }\n\n } else {\n logger.warn(`Update for uknown task ${taskId}`);\n }\n\n }\n\n markDone(taskId) {\n if (this._details.has(taskId)) {\n this._details.get(taskId).done = true;\n }\n }\n\n get items() {\n let a = [];\n for (let taskId of this._ns) {\n let notif = this._details.get(taskId);\n a.push(notif);\n }\n return a;\n }\n\n get dirty() {\n return this._dirty;\n }\n\n resetDirty() {\n this._dirty=false;\n }\n\n\n\n}\n"
},
{
"alpha_fraction": 0.5497382283210754,
"alphanum_fraction": 0.5607329607009888,
"avg_line_length": 31.94827651977539,
"blob_id": "4b05c43221bbe001110f3be9ff93e4d1791fe483",
"content_id": "3a5def4104c8cf322c021dc0ae8a3f9302fa865d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1910,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 58,
"path": "/app/tests/test_schema.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "'''\nCreated on Apr 21, 2016\n\n@author: ivan\n'''\nimport unittest\nfrom app import app,db\nimport app.schema as schema\nimport app.model as model\nfrom .basecase import TestCase\n\n#app.config['SQLALCHEMY_ECHO']=True\nclass Test(TestCase):\n\n\n def test_schema_ebook(self):\n \n ebook_data={'title': 'Sedm lumpu slohlo pumpu'}\n errors=schema.ebook_deserializer_insert().validate(ebook_data)\n self.assertFalse(errors)\n errors=schema.ebook_deserializer_insert().validate({})\n self.assertTrue(errors)\n \n \n ebook_data={'title':'Povidky o nicem', 'language':'cs', \n 'rating':100,\n 'authors':[{'first_name':'Jan', 'last_name': 'Kan'}],\n 'series':{'id':5, 'title':'Za co'},\n 'series_index':1,\n 'genres':[ {'id':23,'name':'Romance'}, {'id':9, 'name':'Fantasy'}, {'id':13, 'name':'Horror'}],\n 'version_id':1\n }\n eb,errors=schema.ebook_deserializer_update().load(ebook_data)\n print (errors)\n self.assertFalse(errors)\n \n self.assertEqual(eb.title, ebook_data['title'])\n db.session.flush()\n self.assertFalse(db.session.new)\n self.assertFalse(db.session.dirty)\n db.session.add(eb)\n db.session.commit()\n \n self.assertEqual(eb.authors[0].last_name, ebook_data['authors'][0]['last_name'] )\n self.assertEqual(eb.series.title, ebook_data['series']['title'])\n \n db.session.remove()\n \n eb=model.Ebook.query.filter_by(title='Povidky o nicem').one()\n self.assertEqual(eb.authors[0].last_name, ebook_data['authors'][0]['last_name'] )\n self.assertEqual(eb.series.title, ebook_data['series']['title'])\n \n \n\n\nif __name__ == \"__main__\":\n #import sys;sys.argv = ['', 'Test.testName']\n unittest.main()"
},
{
"alpha_fraction": 0.44771528244018555,
"alphanum_fraction": 0.4512302279472351,
"avg_line_length": 40.38181686401367,
"blob_id": "9e76cd6e31dacb2b2a748a3ed37e1462fe6ad76b",
"content_id": "c9f2dec92e067ae9bed2e22975db7d1b70730163",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2276,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 55,
"path": "/engine/dal.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import aiopg\nfrom aiopg.sa import create_engine\nimport settings\nimport asyncio\nimport os\nimport sys\nimport app.model as model\nfrom psycopg2.extras import Json\nfrom sqlalchemy.sql import select, or_\n\nDSN = 'dbname={db} user={user} password={password} host={host}'.format(db=settings.DB_NAME,\n host=settings.DB_HOST,\n user=settings.DB_USER,\n password=settings.DB_PASSWORD\n )\n\n\nengine = None\n\n\ndef init():\n global engine\n loop = asyncio.get_event_loop()\n engine = loop.run_until_complete(create_engine(DSN))\n\n\ndef close(wait=False):\n engine.close()\n if wait:\n loop = asyncio.get_event_loop()\n loop.run_until_complete(engine.wait_closed())\n\n\nasync def add_upload(fname, cover, meta, size, hash, user_email):\n async with engine.acquire() as conn:\n format = model.Format.__table__\n ext = os.path.splitext(fname)[1].lower()[1:]\n res = await conn.execute(select([format.c.id]).where(format.c.extension == ext))\n format_id = (await res.fetchone())[0]\n user = model.User.__table__\n res = await conn.execute(select([user.c.id]).where(user.c.email == user_email))\n user_id = (await res.fetchone())[0]\n upload = model.Upload.__table__\n source_name = os.path.split(fname)[-1]\n res = await conn.execute(upload.insert().values(file=fname, cover=cover,\n load_source=source_name,\n size=size, hash=hash,\n format_id=format_id,\n version_id=1,\n created_by_id=user_id,\n modified_by_id=user_id,\n meta=meta\n ))\n new_id = (await res.fetchone())[0]\n return new_id\n"
},
{
"alpha_fraction": 0.5780245661735535,
"alphanum_fraction": 0.6031560301780701,
"avg_line_length": 33.551021575927734,
"blob_id": "e585d80d960cbbb17488f0547e221ac1178fd594",
"content_id": "efacae97af19a8c51821ef7eb7ba36504d21a6cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1714,
"license_type": "no_license",
"max_line_length": 208,
"num_lines": 49,
"path": "/engine/tests/test_meta.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import unittest\nfrom unittest.mock import Mock\nimport engine.dal as dal\nimport shutil\nimport os.path\nimport asyncio\n\nfrom settings import UPLOAD_DIR\nfrom engine.tasks import MetadataTask\n\n\nclass TestMeta(unittest.TestCase):\n\n def setUp(self):\n fname = os.path.join(os.path.dirname(__file__),\n '../../app/data/books/Adams, Douglas/Stoparuv pruvodce/Stoparuv pruvodce 1 - Stoparuv pruvodce po Galaxii/Adams, Douglas - Stoparuv pruvodce 1 - Stoparuv pruvodce po Galaxii.epub'\n )\n shutil.copy(fname, UPLOAD_DIR)\n self.fname = os.path.split(fname)[1]\n\n def tearDown(self):\n try:\n os.remove(os.path.join(UPLOAD_DIR, self.fname))\n except IOError:\n pass\n\n def test_meta1(self):\n result=[]\n upload = Mock(side_effect=lambda *args: result.extend(args) or 1)\n async def dummy(*args, **kwargs):\n return upload(*args, **kwargs)\n dal.add_upload = dummy\n \n t = MetadataTask(user='ivan')\n loop = asyncio.get_event_loop()\n res = loop.run_until_complete(t.run(self.fname))\n self.assertEqual(res, 1)\n \n \n meta = result[2]\n self.assertEqual(meta['authors'], ['Douglas Adams'])\n self.assertEqual(meta['title'], 'Stopařův průvodce po Galaxii')\n self.assertEqual(meta['tags'], ['Fantasy', 'Humor', 'Sci-fi'])\n self.assertEqual(result[3], 200134)\n self.assertEqual(result[4], 'f75a07f5ad1da3a27035742eb978868b1a912a1a')\n self.assertEqual(result[5], 'ivan')\n cover = result[1]\n self.assertTrue(cover)\n os.remove(os.path.join(UPLOAD_DIR, cover))\n \n \n"
},
{
"alpha_fraction": 0.6576354503631592,
"alphanum_fraction": 0.6600984930992126,
"avg_line_length": 26.03333282470703,
"blob_id": "46e1b0d7c521c1ee0a87393aee3a3777815dba95",
"content_id": "2879887d06d767cb591a579dacc4c853d858d22e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 812,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 30,
"path": "/app/minimal.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from flask import Blueprint, render_template, request, flash\nfrom flask_login import login_required\nimport app.model as model\nimport app.logic as logic\nimport app.access as access\n\nbp = Blueprint('minimal', __name__)\n\n\[email protected]('/search', methods=['GET'])\n@login_required\ndef search():\n search = ''\n ebooks = None\n if request.args.get('search'):\n search = request.args['search'].strip()\n\n if search:\n ebooks = logic.search_query(model.Ebook.query, search).limit(50).all()\n if not ebooks:\n flash('No ebooks found!')\n\n return render_template('search.html', search=search, ebooks=ebooks)\n\n\[email protected]('/ebooks/<int:id>')\n@login_required\ndef ebook_detail(id):\n ebook = model.Ebook.query.get(id)\n return render_template('ebook.html', ebook=ebook)\n\n"
},
{
"alpha_fraction": 0.680311918258667,
"alphanum_fraction": 0.6816114187240601,
"avg_line_length": 26.9818172454834,
"blob_id": "e50be3570041f6c90f8138f1db73cec19cedfec1",
"content_id": "fd67957edbe6bf6e4cc5f574ae0ee947bb58be41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1539,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 55,
"path": "/engine/backend.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import os.path\nimport sys\nimport asyncio\nimport logging\nfrom asexor.runner import ApplicationRunnerRawSocket\nfrom asexor.executor import Executor\nfrom asexor.config import Config\nfrom asexor.task import load_tasks_from\nfrom autobahn.wamp.exception import ApplicationError\nimport time\nimport engine.dal as dal\n\n# sys.path.append(os.path.join(os.path.dirname(__file__),'..'))\nfrom app.utils import verify_token\nimport settings\n\nlog = logging.getLogger('engine')\n\n\ndef authenticate(realm, user_id, details):\n log.debug('Got auth request for %s')\n token = details.get('ticket')\n payload = verify_token(token, settings.SECRET_KEY)\n if payload and user_id == payload['email']:\n if 'user' in payload['roles']:\n log.debug('Authenticaticated user %s to role user', user_id)\n return 'user'\n return 'anonymous'\n\n\nif __name__ == '__main__':\n import argparse\n\n # for testing\n load_tasks_from('engine.tasks')\n\n parser = argparse.ArgumentParser()\n parser.add_argument(\n '-d', '--debug', action='store_true', help='enable debug')\n opts = parser.parse_args()\n level = 'info'\n if opts.debug:\n level = 'debug'\n\n Config.AUTHENTICATION_PROCEDUTE = authenticate\n Config.AUTHENTICATION_PROCEDURE_NAME = \"eu.zderadicka.mybookshelf.authenticate\"\n\n path = os.path.join(os.path.dirname(__file__), '.crossbar/socket1')\n runner = ApplicationRunnerRawSocket(\n path,\n u\"realm1\",\n )\n dal.init()\n runner.run(Executor, logging_level=level)\n dal.close()\n"
},
{
"alpha_fraction": 0.6960486173629761,
"alphanum_fraction": 0.6960486173629761,
"avg_line_length": 28.81818199157715,
"blob_id": "41e1461ee0bc169702017c9101fafd5db2302bcd",
"content_id": "15d2ebb523f9b8088efbaa148e794edca71790d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 329,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 11,
"path": "/client/src/components/ebook-panel.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "\nimport {inject, bindable, LogManager} from 'aurelia-framework';\nconst logger=LogManager.getLogger('ebooks-panel');\n\nexport class EbookPanel {\n @bindable sortings=[{name:'Title A-Z', key:'title'}, {name:'Title Z-A',key:'-title'}];\n @bindable loader;\n\n loaderChanged() {\n logger.debug('Loader changed in EbookPanel');\n }\n}\n"
},
{
"alpha_fraction": 0.6526610851287842,
"alphanum_fraction": 0.6526610851287842,
"avg_line_length": 25.44444465637207,
"blob_id": "f224fec1ff88c1bcdcc683a3fc57f29f118caf6b",
"content_id": "5681f3f4857b603fbd89d4f18089cd2d331ea1fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 714,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 27,
"path": "/client/test/unit/config.spec.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {Configure} from 'lib/config/index';\nimport {DefaultLoader} from 'aurelia-loader-default';\n\n\ndescribe('When using Configure', () => {\n\nlet loader = new DefaultLoader();\nvar conf = null;\n\nbeforeEach(() => {\n conf = new Configure(loader);\n})\nit('it should contain default instance properties', () => {\nexpect(conf._config).toBeDefined();\nexpect(conf.loader).not.toBeNull();\nexpect(conf.loader).not.toBeNull();\n});\n\nit('it should load config with values', (done) =>{\n conf.loadFile().then(() => {\n expect(conf.get('version')).toEqual(jasmine.stringMatching(/^[\\d.]+$/));\n expect(conf.get('api.host')).toBeDefined();\n expect(typeof(conf.get('api.host'))).toEqual('string');\n done();\n });\n});\n});\n"
},
{
"alpha_fraction": 0.49845200777053833,
"alphanum_fraction": 0.5541795492172241,
"avg_line_length": 20.46666717529297,
"blob_id": "abea5710c72697a0cd3bcd7faae0e12a44399456",
"content_id": "a2cfdb45c4faf2cb757c94831fb537be5c9115ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 323,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 15,
"path": "/server.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "#! /usr/bin/env python\n\nimport sys\nfrom app import app\n\n\nif __name__ == \"__main__\":\n dbg=True\n host='127.0.0.1'\n if len(sys.argv)>1 and 'NO_DEBUG' in sys.argv[1:]:\n dbg=False\n if len(sys.argv)>1 and 'VISIBLE' in sys.argv[1:]:\n host='0.0.0.0'\n \n app.run(debug=dbg, host=host, port=6006)\n\n"
},
{
"alpha_fraction": 0.4678456485271454,
"alphanum_fraction": 0.4678456485271454,
"avg_line_length": 26.66666603088379,
"blob_id": "9883325d7055385ff647c0d5f0a618b968bf3a16",
"content_id": "207ce53d4bbf6b1bfe0be2585f345555571c741c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1244,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 45,
"path": "/engine/dal2.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import aiopg\nimport aiopg.sa\nimport settings\nimport asyncio\nimport os\nimport sys\n\nDSN = 'dbname={db} user={user} password={password} host={host}'.format(db=settings.DB_NAME,\n host=settings.DB_HOST,\n user=settings.DB_USER,\n password=settings.DB_PASSWORD\n )\n\n\npool = None\n\n\ndef init():\n global pool\n loop = asyncio.get_event_loop()\n pool = loop.run_until_complete(aiopg.create_pool(DSN))\n\n\ndef close(wait=False):\n pool.terminate()\n if wait:\n loop = asyncio.get_event_loop()\n loop.run_until_complete(pool.wait_closed())\n\n\nif __name__ == '__main__':\n print (sys.version)\n init()\n assert pool\n async def do():\n async with pool.acquire() as conn:\n async with conn.cursor() as cur:\n await cur.execute('select * from format')\n async for row in cur:\n print(row)\n \n loop=asyncio.get_event_loop()\n loop.run_until_complete(do())\n \n close()"
},
{
"alpha_fraction": 0.5905511975288391,
"alphanum_fraction": 0.5905511975288391,
"avg_line_length": 20.16666603088379,
"blob_id": "8b86d74007c63fb1dac1d8d51bda34e44175f7f6",
"content_id": "1bb6d473d8a74c4573ff9aac581b2bae8e558e75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 127,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 6,
"path": "/client/src/components/list-converter.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "export class ListValueConverter {\n toView(list) {\n if (!list || !list.length) return '';\n return list.join(', ');\n }\n}\n"
},
{
"alpha_fraction": 0.7558139562606812,
"alphanum_fraction": 0.7596899271011353,
"avg_line_length": 31.375,
"blob_id": "a1b57f10850b3cb9fefb0d298abc9769330ae39a",
"content_id": "0d35f6be1a60c5666295922fd8144f648e8e0fd4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 258,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 8,
"path": "/README.md",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "MyBookshelf2\n============\n\nExperimental project to test new web technologies for next generation of MyBookshelf.\n\nTesting now Aurelia and Eve (but will prelace Eve with something else Hug? or some Node stuff).\n\n**DEFINITELLY NOT READY FOR ANY EXTERNAL USE**"
},
{
"alpha_fraction": 0.5971435308456421,
"alphanum_fraction": 0.5998640060424805,
"avg_line_length": 35.16393280029297,
"blob_id": "88d3d5e454bee0806871af7315f43ded2a1416dc",
"content_id": "a9f70330f4d6b5792d2a4a6c581c4e80fa4182cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4411,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 122,
"path": "/app/access.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from flask import request, redirect, flash, render_template, url_for, jsonify, Blueprint, abort\nfrom flask_login import LoginManager, login_user, logout_user, current_user, login_required\nfrom functools import wraps\nfrom app.utils import check_pwd, create_token, verify_token\nimport app.model as model\nfrom sqlalchemy import or_\nfrom sqlalchemy.orm.exc import NoResultFound\nimport logging\nlogger=logging.getLogger('access')\nfrom app.cors import cors_enabled\n\n\nbp=Blueprint('access', __name__)\nlm=LoginManager()\n\n\nSECRET_KEY=''\nTOKEN_VALIDITY_HOURS=4\[email protected]_once\ndef on_load(state):\n global SECRET_KEY, TOKEN_VALIDITY_HOURS\n lm.init_app(state.app)\n SECRET_KEY=state.app.config.get('SECRET_KEY')\n TOKEN_VALIDITY_HOURS=state.app.config.get('TOKEN_VALIDITY_HOURS')\n \[email protected]_loader\ndef load_user_from_request(request):\n user_token = request.args.get('bearer_token')\n if not user_token:\n token=request.headers.get('Authorization')\n if token and token.lower().startswith('bearer '):\n user_token=token[7:].strip() \n if not user_token:\n return\n \n claim=verify_token(user_token, SECRET_KEY)\n if claim:\n user=model.User.query.get(claim['id']) # @UndefinedVariable\n if user and user.is_active:\n return user\n \n # finally, return None if both methods did not login the user\n return None\n\[email protected]_loader\ndef load_user(user_id):\n return model.User.query.get(int(user_id)) # @UndefinedVariable\n\ndef role_required(*roles): \n def wrapper(fn):\n @wraps(fn)\n def inner(*args,**kwargs):\n user=current_user\n if not(user.is_authenticated and user.has_role(*roles)):\n abort(401, 'Access denied')\n return fn(*args, **kwargs)\n return inner\n return wrapper\n\ndef role_required_owning(*roles, if_own=None, owning_role=None):\n assert roles or (if_own and owning_role)\n user=current_user\n if user.is_authenticated and user.has_role(*roles):\n return\n if if_own and if_own.created_by == user and user.is_authenticated and user.has_role(owning_role):\n return\n abort(401, 'Access denied')\n\[email protected]('/login', methods=['GET', 'POST'])\n@cors_enabled\ndef login():\n def check_user(username, pwd):\n user= model.User.query.filter(or_(model.User.user_name == username, # @UndefinedVariable\n model.User.email ==username)).one_or_none() # @UndefinedVariable\n if user and check_pwd(pwd, user.password):\n return user\n \n username=\"\"\n if request.method=='POST':\n if request.mimetype == 'application/json':\n credentials=request.get_json()\n \n if credentials and (credentials.get('email') or credentials.get('username')) and credentials.get('password'):\n q=model.User.query\n if 'username' in credentials:\n q=q.filter(model.User.user_name == credentials['username'])\n else:\n q=q.filter(model.User.email == credentials['email'] )\n \n try:\n user=q.one()\n except NoResultFound:\n return jsonify(error= 'Invalid Login')\n if not user.is_active:\n return jsonify(error= 'Invalid Login')\n if not check_pwd(credentials['password'], user.password):\n return jsonify(error= 'Invalid Login')\n resp= jsonify(access_token=create_token(user, SECRET_KEY, TOKEN_VALIDITY_HOURS or 4))\n #resp.headers.extend(cors_headers)\n return resp\n else:\n logger.info('Failed JSON login with %s', credentials)\n abort(400,'Provide credentials')\n else:\n \n user=check_user(request.form['username'], request.form['password'] )\n \n if user:\n logger.info('User logged in %s ', user.user_name)\n login_user(user)\n #request.args.get(\"next\")\n return redirect('/')\n else:\n flash('Invalid user name or password!')\n \n return render_template('login.html', username=username)\n\[email protected]('/logoff')\n@login_required\ndef logoff():\n logout_user()\n return redirect(url_for('access.login'))"
},
{
"alpha_fraction": 0.7336448431015015,
"alphanum_fraction": 0.7336448431015015,
"avg_line_length": 18.454545974731445,
"blob_id": "0d0214b98455decfa8875b23b405e64e7cd647e7",
"content_id": "9b8256293a4da3cd4c1c5c389adfbfc52fa6417b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 214,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 11,
"path": "/client/src/components/notifications-drawer.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {Notification} from 'lib/notification';\nimport {inject, LogManager} from 'aurelia-framework';\n\n@inject(Notification)\nexport class NotificationsDrawer {\n\n constructor(notif) {\n this.notif=notif;\n }\n\n}\n"
},
{
"alpha_fraction": 0.46262189745903015,
"alphanum_fraction": 0.48591548204421997,
"avg_line_length": 37.45833206176758,
"blob_id": "4d9b72f2c3a6364d0ff7023062d17afdd7714462",
"content_id": "1bdd7d4845d97c140f4c15dd10a4e16d005b529a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1846,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 48,
"path": "/engine/tests/test_dal.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import unittest\nimport asyncio\nimport engine.dal as dal\nfrom settings import Testing\n\nfrom app.tests.basecase import TestCase\nimport app.model as model\n\ndal.DSN = 'dbname={db} user={user} password={password} host={host}'.format(db=Testing.DB_NAME,\n host=Testing.DB_HOST,\n user=Testing.DB_USER,\n password=Testing.DB_PASSWORD\n )\n\n\nclass TestDAL(TestCase):\n\n def test_dal1(self):\n from app.model import Format\n dal.init()\n assert dal.engine\n fmt_table = Format.__table__\n formats = []\n async def do():\n async with dal.engine.acquire() as conn:\n async for row in conn.execute(fmt_table.select(fmt_table)):\n formats.append(row.mime_type)\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(do())\n self.assertEqual(len(formats), 17)\n self.assertTrue('application/x-aportisdoc' in formats)\n\n # ----\n insert = dal.add_upload(fname='ebook.epub', cover='cover.jpg', meta={'authors': ['Douglas Adams'],\n 'title': 'Stoparuv pruvodce'}, size=1233455,\n hash='12345678901234567890123456789010',\n user_email='[email protected]')\n\n new_id = loop.run_until_complete(insert)\n self.assertEqual(new_id, 1)\n dal.close()\n\n upload = model.Upload.query.get(new_id)\n\n self.assertEqual(upload.created_by.email, '[email protected]')\n\n self.assertEqual(upload.meta['title'], 'Stoparuv pruvodce')\n"
},
{
"alpha_fraction": 0.5445082783699036,
"alphanum_fraction": 0.5459440350532532,
"avg_line_length": 24.099098205566406,
"blob_id": "5dbe16c61bd99e80e57a5f6cd305cbd8e983e1c8",
"content_id": "cbfff3ca661080e8c458054f8c2828f1af1d134d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 2786,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 111,
"path": "/client/src/app.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import { FetchConfig, AuthorizeStep} from 'aurelia-auth';\nimport {inject,LogManager,bindable} from 'aurelia-framework';\nimport { HttpClient} from 'aurelia-fetch-client';\nimport {Configure} from 'lib/config/index';\nimport {WSClient} from 'lib/ws-client';\nimport {Access} from 'lib/access';\n\nconst logger = LogManager.getLogger('app');\n@inject(Configure, FetchConfig, HttpClient, WSClient, Access)\nexport class App {\n constructor(config, fetchConfig, client, wsClient, access) {\n this.config = config;\n this.access=access;\n fetchConfig.configure();\n client.configure(conf => conf\n .withBaseUrl(`http://${this.config.get('api.hostname',window.location.hostname)}:${this.config.get('api.port')}`)\n\n .withInterceptor({\n response: response => {\n if (response && response.status == 401) {\n logger.warn('Not authenticated!');\n this.router.navigateToRoute('login');\n throw new Error('Not autherticated!');\n\n }\n return response;\n }\n })\n );\n\n }\n\n configureRouter(config, router) {\n config.title = 'MyBookshelf2';\n config.addPipelineStep('authorize', AuthorizeStep);\n config.map([{\n route: ['', 'welcome'],\n name: 'welcome',\n moduleId: 'pages/welcome',\n nav: true,\n title: 'Welcome'\n }, {\n route: 'ebooks',\n name: 'ebooks',\n moduleId: 'pages/ebooks',\n nav: true,\n title: 'Ebooks',\n auth: true\n }, {\n route: 'login',\n name: 'login',\n moduleId: 'pages/login',\n title: 'Login'\n }, {\n route: 'ebook/:id',\n name: 'ebook',\n moduleId: 'pages/ebook',\n title: 'Ebook',\n auth: true\n }, {\n route: 'search/:query',\n name: 'search',\n moduleId: 'pages/search',\n title: 'Search Results',\n auth: true\n }, {\n route: ['author/:id'],\n name: 'author',\n moduleId: 'pages/author',\n title: 'Authors books',\n auth: true\n }, {\n route: 'upload',\n name: 'upload',\n moduleId: 'pages/upload',\n title: 'Upload Ebook',\n nav: true,\n auth: true\n },\n {\n route: 'upload-result/:id',\n name : 'upload-result',\n moduleId: 'pages/upload-result',\n title: 'Upload results',\n auth: true\n },\n {\n route: 'test',\n name : 'test',\n moduleId: 'test/test-page',\n title: 'Just testing'\n }\n ]);\n\n this.router = router;\n }\n\n activate() {\n this.access.signalState();\n }\n\n isAuthenticated() {\n return this.access.authenticated;\n }\n\n doSearch(query) {\n this.router.navigateToRoute('search', {\n query: encodeURIComponent(query)\n });\n }\n}\n"
},
{
"alpha_fraction": 0.6272421479225159,
"alphanum_fraction": 0.6328475475311279,
"avg_line_length": 30.875,
"blob_id": "e4024d5d8fc2c1f3a02cd29b0b71463215cf0b94",
"content_id": "e4a1b04a022d10a62a6970acf04df60d0f625598",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1784,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 56,
"path": "/engine/client.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from autobahn.asyncio.wamp import ApplicationSession\nfrom asyncio import coroutine, get_event_loop\nimport logging\nimport requests\nimport os.path\nimport sys\n\nsys.path.append(os.path.join(os.path.dirname(__file__),'..'))\nfrom app.utils import extract_token\n\nlog=logging.getLogger('client')\n\nclass ClientSession(ApplicationSession):\n # must be set before session\n TOKEN=''\n USER=''\n \n def onConnect(self):\n log.debug('Connected')\n self.join(self.config.realm, [u\"ticket\"], self.USER)\n \n def onChallenge(self, ch):\n if ch.method=='ticket':\n log.debug('Got challenge %s', ch)\n return self.TOKEN\n else:\n raise Exception('Invalid authentication method')\n \n @ coroutine\n def onJoin(self, details):\n log.debug('Session joined %s', details)\n \n self.subscribe(lambda t: print('#Notification', t), 'eu.zderadicka.mybookshelf2.heartbeat')\n \n def onLeave(self, details):\n log.debug(\"Leaving session %s\", details)\n self.disconnect()\n \n def onDisconnect(self):\n log.debug('Disconnected')\n get_event_loop().stop()\n \n \n \nif __name__=='__main__':\n #logging.basicConfig(level=logging.DEBUG)\n log.setLevel(logging.DEBUG)\n resp=requests.post('http://localhost:6006/login', json={'username':'admin', 'password':'admin'})\n token = resp.json().get('access_token')\n if token:\n ClientSession.TOKEN=token\n ClientSession.USER=extract_token(token)['email']\n print('Starting client for user %s'% (ClientSession.USER))\n from autobahn.asyncio.wamp import ApplicationRunner\n app=ApplicationRunner(url='ws://localhost:8080/ws', realm='realm1')\n app.run(ClientSession)"
},
{
"alpha_fraction": 0.601123571395874,
"alphanum_fraction": 0.601123571395874,
"avg_line_length": 28.66666603088379,
"blob_id": "b5ed6aae9567d18f0042b7275dcecf18dfa3ba6c",
"content_id": "61f886b10a3e064f95ee99a49abe50685df40e64",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 178,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 6,
"path": "/client/src/components/authors-converter.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "export class AuthorsValueConverter {\n toView(val) {\n if (!val) return '';\n return val.map(a => a.firstname ? `${a.firstname} ${a.lastname}`: a.lastname).join(', ');\n }\n}\n"
},
{
"alpha_fraction": 0.519133985042572,
"alphanum_fraction": 0.5395639538764954,
"avg_line_length": 36.056495666503906,
"blob_id": "e838649771daad2c7545ac037e20397404e6e9f4",
"content_id": "39669194bc89da8e8e1802e566770667e2e637d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6579,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 177,
"path": "/app/tests/test_api.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from .basecase import TestCase\nfrom urllib.parse import quote\nimport flask\nfrom flask_login import current_user\nimport app\n\n\nclass TestApi(TestCase):\n\n def __init__(self, *args, **kwargs):\n super(TestApi, self).__init__(*args, **kwargs)\n self.headers = None\n self.token = None\n\n def login(self, user='admin', pwd='admin'):\n res = self.client.post('/login', data='{\"username\":\"%s\", \"password\":\"%s\"}' % (user, pwd),\n content_type='application/json')\n token = res.json.get('access_token')\n self.assertTrue(token)\n self.headers = {'Authorization': 'Bearer %s' % token}\n self.token = token\n\n def __getattr__(self, name):\n name = name.upper()\n if name in ('GET', 'POST', 'DELETE', 'PUT', 'OPTIONS'):\n def req(*args, **kwargs):\n kwargs['method'] = name\n if 'headers' in kwargs:\n kwargs['headers'].update(self.headers)\n else:\n kwargs['headers'] = self.headers\n failure = kwargs.pop('failure', False)\n not_json = kwargs.pop('not_json', False)\n resp = self.client.open(*args, **kwargs)\n if not failure:\n self.assert200(resp)\n if not not_json and not failure:\n return resp.json\n return resp\n return req\n raise AttributeError('Attribute %s not found' % name)\n\n def test_api(self):\n res = self.get('/api/ebooks', failure=True)\n self.assert401(res)\n self.login()\n res = self.get('/api/ebooks')\n\n self.assertTrue(len(res['items']) > 5)\n self.assertEqual(res['page'], 1)\n # current user is anonymous outside of request\n self.assertFalse(current_user.is_authenticated)\n\n for b in res['items']:\n self.assertTrue(b['title'] and b['id'], 'Invalid ebook %s' % b)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 1, 'page_size': 12, 'sort': 'title'})\n self.assertEqual(res['page'], 1)\n self.assertEqual(res['total'], 100)\n self.assertEqual(res['page_size'], 12)\n self.assertEqual(len(res['items']), 12)\n self.assertEqual(\n res['items'][0]['title'], 'Alenka v říši kvant - Alegorie kvantové fyziky')\n\n first_book = res['items'][0]\n\n res = self.get(\n '/api/ebooks', query_string={'page': 9, 'page_size': 12, 'sort': '-title'})\n self.assertEqual(res['page'], 9)\n self.assertEqual(len(res['items']), 4)\n self.assertEqual(res['total'], 100)\n self.assertEqual(res['page_size'], 12)\n self.assertEqual(\n res['items'][-1]['title'], 'Alenka v říši kvant - Alegorie kvantové fyziky')\n last_book = res['items'][-1]\n\n self.assertEqual(first_book, last_book)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 9, 'page_size': 12, 'sort': 'blba'}, failure=True)\n self.assert400(res)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 0, 'page_size': 12, 'sort': '-title'}, failure=True)\n self.assert400(res)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 1, 'page_size': -1, 'sort': '-title'}, failure=True)\n self.assert400(res)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 1, 'page_size': 101, 'sort': '-title'}, failure=True)\n self.assert400(res)\n\n res = self.get(\n '/api/ebooks', query_string={'page': 10, 'page_size': 12, 'sort': 'title'}, failure=True)\n self.assert404(res)\n\n id = first_book['id']\n\n res = self.get('/api/ebooks/%s' % id)\n self.assertEqual(res['id'], id)\n self.assertEqual(\n res['title'], 'Alenka v říši kvant - Alegorie kvantové fyziky')\n\n #------------\n res = self.get(\n '/api/authors', query_string={'page': 1, 'page_size': 50, 'sort': 'name'})\n self.assertEqual(len(res['items']), 50)\n self.assertEqual(res['total'], 102)\n self.assertEqual(res['items'][0]['last_name'], 'Adornetto')\n\n res = self.get(\n '/api/series', query_string={'page': 2, 'page_size': 14, 'sort': 'title'})\n self.assertEqual(res['total'], 28)\n self.assertEqual(res['items'][-1]['title'], 'Zář')\n\n res = self.get('/api/search/%s' % quote('Zápas boh'))\n self.assertEqual(res['total'], 1)\n self.assertEqual(res['items'][0]['title'], 'Podobni bohům')\n\n res = self.get('/api/search/%s' % quote('prip'))\n self.assertEqual(res['total'], 4)\n\n res = self.get('/api/search/%s' % quote('henry dome'))\n self.assertEqual(res['total'], 1)\n self.assertEqual(res['items'][0]['title'], 'Roky v Bílém domě')\n\n res = self.get('/api/ebooks/author/8015')\n self.assertEqual(res['total'], 4)\n self.assertEqual(len(res['items']), 4)\n\n def test_api2(self):\n #---------------\n self.login('guest', 'guest')\n res = self.get('/api/ebooks', failure=True)\n self.assert401(res)\n\n self.login('user', 'user')\n res = self.get('/api/ebooks')\n\n id = 62241\n res = self.delete('/api/ebooks/%d' % id, failure=True)\n self.assert401(res)\n\n self.login('superuser', 'superuser')\n\n res = self.delete('/api/ebooks/%d' % id)\n\n res = self.get('/api/ebooks/%s' % id, failure=True)\n print (res.json)\n self.assert404(res)\n\n res = self.get(\n '/api/download/86060', query_string={'bearer_token': self.token}, not_json=True)\n self.assertEqual(int(res.headers['Content-Length']), 3147900)\n self.assertEqual(res.headers['Content-Type'], 'application/epub+zip')\n self.assertEqual(len(res.data), 3147900)\n\n res = self.post('/api/upload/check', data='{\"mime_type\":\"application/pdf\", \"size\":10000, \"hash\":null}',\n content_type='application/json')\n self.assertEqual(res['result'], 'ok')\n\n # indexes\n res = self.get('/api/series/index/a')\n self.assertEqual(res['total'], 3)\n res = self.get('/api/series/index/á')\n self.assertEqual(res['total'], 3)\n res = self.get('/api/authors/index/c')\n self.assertEqual(res['total'], 4)\n res = self.get('/api/authors/index/č')\n self.assertEqual(res['total'], 4)\n\n res = self.get('/api/ebooks/index/r')\n self.assertEqual(res['total'], 1)\n print(res)\n"
},
{
"alpha_fraction": 0.5711501240730286,
"alphanum_fraction": 0.719298243522644,
"avg_line_length": 17.35714340209961,
"blob_id": "fcc538693fe1e31534207eb1c307ed3e92fbc0cb",
"content_id": "77ee3e9712840365dfb831d9da3fb3c5bb8c4c5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 513,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 28,
"path": "/requirements.txt",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "autobahn>=0.14.0\nbcrypt>=2.0.0\nblinker>=1.4\ncbor>=1.0.0\ncrossbar>=0.13.2\nFlask>=0.10.1\nFlask-Login>=0.3.2\nflask-marshmallow>=0.6.2\nFlask-RESTful>=0.3.5\nFlask-Script>=2.0.5\nFlask-SQLAlchemy>=2.1\nmarshmallow>=2.7.1\nmarshmallow-sqlalchemy>=0.8.1\nmsgpack-python>=0.4.7\npsycopg2>=2.6.1\nrequests>=2.9.1\nSQLAlchemy>=1.0.12\nSQLAlchemy-Utils>=0.32.2\nPyJWT>=1.4.0\naiopg>=0.9.2\n\n# development \npytest\ncoverage>=4.0.3\nipython>=4.1.2\njupyter>=1.0.0\nFlask-Testing>=0.4.2\n#git+https://github.com/mysql/mysql-connector-python.git"
},
{
"alpha_fraction": 0.5063725709915161,
"alphanum_fraction": 0.5147058963775635,
"avg_line_length": 37.45283126831055,
"blob_id": "d60d236d821e1d564cb4e1b0e5e9cc74a88ed8be",
"content_id": "ec86acbf931885db9dc803de48dd65f7aaac2357",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2040,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 53,
"path": "/settings.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import os.path\n\n_base_dir = os.path.dirname(__file__)\n\n\nDB_NAME = 'ebooks'\nDB_HOST = 'localhost'\nDB_USER = 'ebooks'\nDB_PASSWORD = 'ebooks'\n\nSQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}/{db}'.format(db=DB_NAME,\n host=DB_HOST,\n user=DB_USER,\n password=DB_PASSWORD)\nSQLALCHEMY_TRACK_MODIFICATIONS = False\nSECRET_KEY = 'Pjk5EzGOcCOG5Rf1deqpZAvz17uUdZmWxJa3X/izSns'\nMAX_CONTENT_LENGTH = 100 * 1024 * 1024\n\nCACHE_CONTROL = 'max-age=20'\nCACHE_EXPIRES = 20\n\n# CORS\nCORS_HOSTS = '*' # or list of hosts ['localhost']\nCORS_PORTS = None # or (lower, upper) or []\nCORS_SECURE = False # if cors is allowed only for https\n\nUPLOAD_DIR = os.path.join(_base_dir, 'app/data/uploads')\nBOOKS_BASE_DIR = os.path.join(_base_dir, 'app/data/books')\nBOOKS_FILE_SCHEMA = \"%(author)s/%(title)s/%(author)s - %(title)s\"\n\nBOOKS_FILE_SCHEMA_SERIE = \"%(author)s/%(serie)s/%(serie)s %(serie_index)d - %(title)s/%(author)s - %(serie)s %(serie_index)d - %(title)s\"\nBOOKS_RECON_DIR = \"/books/books_recon\"\nBOOKS_DIR_UMASK = 0 # umask for uploaded files and their directories - consider that it should be RW for both web server and console user\n# Conversion related\nBOOKS_CONVERTED_DIR = \"/books/books_converted\"\nCONVERSION_FORMATS = ['epub', 'mobi', 'fb2']\n\n\nIMAGE_MAGIC = '/usr/bin/convert'\n\n\nclass Testing:\n DB_NAME = 'ebooks_test'\n DB_HOST = 'localhost'\n DB_USER = 'ebooks_test'\n DB_PASSWORD = 'ebooks'\n \n SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}/{db}'.format(db=DB_NAME,\n host=DB_HOST,\n user=DB_USER,\n password=DB_PASSWORD)\n TESTING = True\n DEBUG = True\n\n\n"
},
{
"alpha_fraction": 0.5713381767272949,
"alphanum_fraction": 0.5779816508293152,
"avg_line_length": 32.80748748779297,
"blob_id": "9560407dcc2442f9572b0ab51cb4dd8277fb388f",
"content_id": "452e8b6725db596ad6245d4e6735c5639c20d35a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6322,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 187,
"path": "/app/logic.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "from flask import abort, request, current_app, Response, jsonify\nfrom sqlalchemy.sql import text, desc, func\nfrom functools import wraps\nimport app.model as model\nimport app.schema as schema\nfrom sqlalchemy.orm.exc import NoResultFound\nfrom app.utils import remove_diacritics, file_hash\nimport os.path\n\nimport logging\nimport re\nlogger = logging.getLogger('logic')\n\n\ndef safe_int(v, for_=''):\n if v is None or v == ' ':\n return\n try:\n v = int(v)\n if v <= 0:\n abort(400, 'Not positive number %s' % for_)\n return v\n except ValueError:\n abort(400, 'Invalid number for %s' % for_)\n\n\ndef preprocess_search_query(text):\n tokens = text.split()\n tokens = map(lambda t: re.sub('[:.;]', ' ', t, re.UNICODE).strip(), tokens)\n return ' & '.join(['%s:*' % t for t in tokens])\n\n\ndef search_query(q, search):\n # works only for pg backend\n search = preprocess_search_query(search)\n return q.filter(model.Ebook.full_text.match(search))\\\n .order_by(desc(func.ts_rank_cd(model.Ebook.full_text, func.to_tsquery(text(\"'custom'\"), search))))\n\n\ndef filter_ebooks(q, filter):\n return q.filter(func.unaccent(model.Ebook.title).ilike(func.unaccent(text(\"'%%%s%%'\" % filter))))\n\n\ndef paginated(default_page_size=10, max_page_size=100, sortings=None):\n def wrapper(fn):\n @wraps(fn)\n def inner(*args, **kwargs):\n page_size = safe_int(\n request.args.get('page_size'), 'page_size') or default_page_size\n if page_size > max_page_size:\n abort(400, 'Page size bigger then maximum')\n kwargs['page_size'] = page_size\n kwargs['page'] = safe_int(request.args.get('page'), 'page') or 1\n sort_in = request.args.get('sort')\n if sortings:\n sort = sortings.get(sort_in)\n if sort_in and not sort:\n abort(400, 'Invalid sort key %s' % sort_in)\n kwargs['sort'] = sortings.get(request.args.get('sort'))\n else:\n if sort_in:\n abort(400, 'Sorting not supported')\n return fn(*args, **kwargs)\n return inner\n return wrapper\n\n\ndef paginate(q, page, page_size, sort, serializer):\n if sort:\n q = q.order_by(*sort)\n pager = q.paginate(page, page_size)\n return {'page': pager.page,\n 'page_size': pager.per_page,\n 'total': pager.total,\n 'items': serializer.dump(pager.items).data}\n\n\ndef norm_file_name(source):\n new_name_rel = norm_file_name_base(source.ebook)\n for ch in [':', '*', '%', '|', '\"', '<', '>', '?', '\\\\']:\n new_name_rel = new_name_rel.replace(ch, '')\n new_name_rel += '.' + source.format.extension\n\n return new_name_rel\n\n\ndef norm_file_name_base(ebook):\n config = current_app.config\n if ebook.series and config.get('BOOKS_FILE_SCHEMA_SERIE'):\n new_name_rel = config.get('BOOKS_FILE_SCHEMA_SERIE') % {'author': ebook.authors_str,\n 'title': ebook.title,\n 'serie': ebook.series.title,\n 'serie_index': ebook.series_index or 0}\n # TODO: might need to spplit base part\n else:\n new_name_rel = config.get('BOOKS_FILE_SCHEMA') % {'author': ebook.authors_str,\n 'title': ebook.title}\n new_name_rel = remove_diacritics(new_name_rel)\n assert(len(new_name_rel) < 4096)\n return new_name_rel\n\n\ndef stream_response(fname, mimetype, headers={}):\n def stream_file(f):\n buf_size = 8192\n try:\n while True:\n data = f.read(buf_size)\n if not data:\n break\n yield data\n finally:\n f.close()\n\n try:\n outfile = open(fname, 'rb')\n except IOError as e:\n logger.exception('File %s error %s', fname, e)\n abort(404, 'File not found')\n\n headers['Content-Length'] = os.stat(fname).st_size\n response = Response(\n stream_file(outfile), mimetype=mimetype, headers=headers)\n return response\n\n\ndef download(id):\n try:\n source = model.Source.query.get(id)\n except NoResultFound:\n abort(404, 'Source not found')\n fname = os.path.join(current_app.config['BOOKS_BASE_DIR'], source.location)\n\n down_name = norm_file_name(source)\n down_name = os.path.split(fname)[-1]\n response = stream_response(fname, mimetype=source.format.mime_type,\n headers={'Content-Disposition': 'attachment; filename=\"%s\"' % down_name})\n\n return response\n\n\ndef check_file(mime_type, size, hash):\n if size > current_app.config['MAX_CONTENT_LENGTH']:\n logger.warn('File too big %d (limit is %d)', size,\n current_app.config['MAX_CONTENT_LENGTH'])\n return {'error': 'file too big'}\n\n t = model.Format.query.filter_by(mime_type=mime_type.lower()).all()\n if not t:\n logger.warn('Unsupported mime type %s', mime_type)\n return {'error': 'unsupported file type'}\n\n sources = model.Source.query.filter_by(size=size, hash=hash).all()\n if sources:\n logger.warn('File already exists - %s', sources[0])\n return {'error': 'file already exists'}\n\n\ndef check_uploaded_file(mime_type, fname):\n size = os.stat(fname).st_size\n hash = file_hash(fname)\n return check_file(mime_type, size, hash)\n\n\ndef _run_query(q):\n return q.count(), q.limit(current_app.config.get('MAX_INDEX_SIZE', 100)).all()\n\n\ndef series_index(start):\n q = model.Series.query\n q = q.filter(func.unaccent(model.Series.title).ilike(\n func.unaccent(start + '%'))).order_by(model.Series.title)\n return _run_query(q)\n\n\ndef ebooks_index(start):\n q = model.Ebook.query\n q = q.filter(func.unaccent(model.Ebook.title).ilike(\n func.unaccent(start + '%'))).order_by(model.Ebook.title)\n return _run_query(q)\n\n\ndef authors_index(start):\n q = model.Author.query\n q = q.filter(func.unaccent(model.Author.last_name + ', ' + model.Author.first_name)\n .ilike(func.unaccent(start + '%'))).order_by(model.Author.last_name, model.Author.first_name)\n return _run_query(q)\n"
},
{
"alpha_fraction": 0.6772486567497253,
"alphanum_fraction": 0.6772486567497253,
"avg_line_length": 16.18181800842285,
"blob_id": "5958abe07623aef8e2023882f29cbd8e9d6a98da",
"content_id": "d7d9f08e2bb44c4b77016a34da73543378c45b93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 189,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 11,
"path": "/client/src/components/search.js",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import {bindable} from 'aurelia-framework';\n\nexport class Search {\n @bindable query;\n @bindable execute;\n\n executeSearch() {\n\n if (this.query) this.execute({query:this.query});\n }\n}\n"
},
{
"alpha_fraction": 0.6721991896629333,
"alphanum_fraction": 0.6753910183906555,
"avg_line_length": 23.66929054260254,
"blob_id": "d7cb02b8948f3971c4cca6f5075d3c59382badf9",
"content_id": "5289e2700f8b9109b4ec271f24ab705361815b0a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3133,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 127,
"path": "/app/schema.py",
"repo_name": "lioenel/mybookshelf2",
"src_encoding": "UTF-8",
"text": "import app\nfrom flask_marshmallow import Marshmallow\nfrom marshmallow import fields, post_dump, validate, Schema, post_load\nimport app.model as model\nfrom sqlalchemy import desc\nfrom collections import namedtuple\n\nschema = Marshmallow(app.app)\n\nBaseModelSchema = schema.ModelSchema\n\n\nclass ModelSchema(BaseModelSchema):\n\n @post_dump\n def remove_nones(self, data):\n return {\n key: value for key, value in data.items()\n if value is not None\n }\n\n class Meta:\n sqla_session = app.db.session\n\n\nclass AuthorSchema(ModelSchema):\n\n class Meta:\n model = model.Author\n\n\nclass SeriesSchema(ModelSchema):\n\n class Meta:\n model = model.Series\n\n\nclass LanguageSchema(ModelSchema):\n\n class Meta:\n model = model.Language\n exclude = ('version_id',)\n\n\nclass GenreSchema(ModelSchema):\n\n class Meta:\n model = model.Genre\n exclude = ('version_id',)\n\n\nclass FormatSchema(ModelSchema):\n\n class Meta:\n model = model.Format\n exclude = ('version_id',)\n\n\nclass UserSchema(ModelSchema):\n email = fields.Email(validate=validate.Length(max=256))\n\n class Meta:\n model = model.User\n\n\nclass RoleSchema(ModelSchema):\n\n class Meta:\n model = model.Role\n exclude = ('version_id',)\n\n\nclass UploadSchema(ModelSchema):\n\n class Meta:\n model = model.Upload\n exclude = ('version_id',)\n\n\nclass SourceSchema(ModelSchema):\n format = fields.Function(serialize=lambda o: o.format.extension)\n\n class Meta:\n model = model.Source\n\n\ndef lang_from_code(c):\n return model.Language.query.filter_by(code=c).one()\n\n\nclass EbookSchema(ModelSchema):\n authors = fields.Nested(\n AuthorSchema, many=True, only=('id', 'first_name', 'last_name'))\n series = fields.Nested(SeriesSchema, only=('id', 'title'))\n language = fields.Function(\n serialize=lambda o: o.language.name, deserialize=lang_from_code)\n genres = fields.Nested(GenreSchema, many=True)\n sources = fields.Nested(SourceSchema, many=True, only=(\n 'id', 'format', 'location', 'quality', 'modified', 'size'))\n full_text = None\n\n class Meta:\n model = model.Ebook\n exclude = ('full_text',)\n\n\nclass FileInfoSchema(Schema):\n mime_type = fields.String(required=True, validate=validate.Length(max=255))\n size = fields.Integer(required=True, validate=validate.Range(min=1))\n # hash = fields.String(validate=validate.Length(max=128))\n\n\n# schemas are probably not thread safe, better to have new instance per\n# each use\nebook_serializer = lambda: EbookSchema()\nebook_deserializer_update = lambda: EbookSchema()\nebook_deserializer_insert = lambda: EbookSchema(exclude=('version_id',))\nebooks_list_serializer = lambda: EbookSchema(many=True, only=(\n 'id', 'title', 'authors', 'series', 'series_index', 'language', 'genres'))\n\nauthors_list_serializer = lambda: AuthorSchema(\n many=True, only=('id', 'first_name', 'last_name'))\nauthor_serializer = lambda: AuthorSchema()\n\nseries_list_serializer = lambda: SeriesSchema(many=True, only=('id', 'title'))\n\nupload_serializer = lambda: UploadSchema()\n"
}
] | 33 |
Qzgfather/TensorFlow2_CNN_Models | https://github.com/Qzgfather/TensorFlow2_CNN_Models | 06b8a6bdee92f8661aed437a59e732b4ab300f0b | 50c92729c18bd43ae8eee336b43576bd130635f8 | 73fc9c37dca5d4b1ff1d3503320c16cae7209503 | refs/heads/master | 2021-06-28T21:53:26.211691 | 2021-02-09T04:45:52 | 2021-02-09T04:45:52 | 219,124,125 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5498891472816467,
"alphanum_fraction": 0.5620842576026917,
"avg_line_length": 29.066667556762695,
"blob_id": "d6caaeefb46c2080d47510aa1ced1963455ed02a",
"content_id": "7f8ebd639d2e3f93776824add0006732e6b7f240",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1804,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 60,
"path": "/03-Image Segmentation/SegNet/generator.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nfrom PIL import Image\n\n\ndef category_label(labels, dims, n_labels):\n x = np.zeros([dims[0], dims[1], n_labels])\n for i in range(dims[0]):\n for j in range(dims[1]):\n x[i, j, labels[i][j]] = 1\n x = x.reshape(dims[0] * dims[1], n_labels)\n return x\n\n\ndef data_gen_small(img_dir, mask_dir, lists, batch_size, dims, n_labels):\n while True:\n ix = np.random.choice(np.arange(len(lists)), batch_size)\n imgs = []\n labels = []\n for i in ix:\n # images\n img_path = img_dir + lists.iloc[i, 0] + \".jpg\"\n\n original_img = Image.open(img_path)\n original_img = original_img.resize((dims[0], dims[0]))\n array_img = np.array(original_img) / 255\n imgs.append(array_img)\n # masks\n original_mask = cv2.imread(mask_dir + lists.iloc[i, 0] + \".png\")\n resized_mask = cv2.resize(original_mask, (dims[0], dims[1]))\n array_mask = category_label(resized_mask[:, :, 0], dims, n_labels)\n labels.append(array_mask)\n imgs = np.array(imgs)\n labels = np.array(labels)\n yield imgs, labels\n\n\nif __name__ == '__main__':\n from train import argparser\n import pandas as pd\n\n args = argparser()\n train_list = pd.read_csv(args.train_list, header=None)\n val_list = pd.read_csv(args.val_list, header=None)\n trainimg_dir = args.trainimg_dir\n trainmsk_dir = args.trainmsk_dir\n valimg_dir = args.valimg_dir\n valmsk_dir = args.valmsk_dir\n train_gen = data_gen_small(\n trainimg_dir,\n trainmsk_dir,\n train_list,\n args.batch_size,\n [args.input_shape[0], args.input_shape[1]],\n args.n_labels,\n )\n\n for i in train_gen:\n print(i)\n break\n"
},
{
"alpha_fraction": 0.4052900969982147,
"alphanum_fraction": 0.44368600845336914,
"avg_line_length": 36.36065673828125,
"blob_id": "6dd43ca979075a2689e0d1496f05afbe757885f0",
"content_id": "fd8f40964748aa73facb2385caca057703155eda",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2414,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 61,
"path": "/01-Image Classification/resnet/resnet.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "# ================================================================\r\n# 日期 : 2019年11月2日12:34:43\r\n# 建模类型 : 自定义型\r\n# 改进人 : 亓志国\r\n# 模型 : ResNet\r\n# 文件数量 : 2\r\n# 依赖文件 : residual_block.py\r\n# ================================================================\r\n\r\nimport tensorflow as tf\r\nfrom residual_block import build_res_block_1\r\n\r\n\r\nclass ResNet34(tf.keras.Model):\r\n\r\n def __init__(self, num_classes=16):\r\n super(ResNet34, self).__init__()\r\n self.pre1 = tf.keras.layers.Conv2D(filters=64,\r\n kernel_size=(7, 7),\r\n strides=2,\r\n padding='same'\r\n )\r\n self.pre2 = tf.keras.layers.BatchNormalization()\r\n self.pre3 = tf.keras.layers.Activation(tf.keras.activations.relu)\r\n self.pre4 = tf.keras.layers.MaxPool2D(pool_size=(3, 3),\r\n strides=2)\r\n\r\n self.layer1 = build_res_block_1(filter_num=64,\r\n blocks=3)\r\n self.layer2 = build_res_block_1(filter_num=128,\r\n blocks=4,\r\n stride=2)\r\n self.layer3 = build_res_block_1(filter_num=256,\r\n blocks=6,\r\n stride=2)\r\n self.layer4 = build_res_block_1(filter_num=512,\r\n blocks=3,\r\n stride=2)\r\n\r\n self.avgpool = tf.keras.layers.GlobalAveragePooling2D()\r\n self.fc = tf.keras.layers.Dense(units=num_classes, activation=tf.keras.activations.softmax)\r\n\r\n def call(self, inputs, training=None, mask=None):\r\n pre0 = self.pre0\r\n pre1 = self.pre1(inputs)\r\n pre2 = self.pre2(pre1, training=training)\r\n pre3 = self.pre3(pre2)\r\n pre4 = self.pre4(pre3)\r\n l1 = self.layer1(pre4, training=training)\r\n l2 = self.layer2(l1, training=training)\r\n l3 = self.layer3(l2, training=training)\r\n l4 = self.layer4(l3, training=training)\r\n avgpool = self.avgpool(l4)\r\n out = self.fc(avgpool)\r\n\r\n return out\r\n\r\n\r\ndef resnet():\r\n model = ResNet34()\r\n return model\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.47145670652389526,
"alphanum_fraction": 0.5457677245140076,
"avg_line_length": 57.02857208251953,
"blob_id": "7dbb5f7da9c2dd63837a14322f3e95df529ee54b",
"content_id": "6658aa65e9542b7db57189b89cb5e884f9dda2a3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4138,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 70,
"path": "/01-Image Classification/VGG/vgg16.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "#================================================================\n# 日期 : 2019年11月2日12:17:53\n# 建模类型 : 堆叠型\n# 改进人 : 亓志国\n# 模型 : VGG16\n# 文件数量 : 1\n#================================================================\n\n\nimport numpy as np\nimport tensorflow as tf\nimport random, cv2\n\n\ndef VGG16():\n # define input layer 可以整个模型保存\n input_layer = tf.keras.layers.Input([224, 224, 3])\n\n # Block 1\n conv1_1 = tf.keras.layers.Conv2D(filters=64, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv1_1')(input_layer)\n conv1_2 = tf.keras.layers.Conv2D(filters=64, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv1_2')(conv1_1)\n pool1_1 = tf.nn.max_pool(conv1_2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool1_1')\n\n # Block 2\n conv2_1 = tf.keras.layers.Conv2D(filters=128, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv2_1')(pool1_1)\n conv2_2 = tf.keras.layers.Conv2D(filters=128, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv2_2')(conv2_1)\n pool2_1 = tf.nn.max_pool(conv2_2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool2_1')\n\n # Block 3\n conv3_1 = tf.keras.layers.Conv2D(filters=256, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv3_1')(pool2_1)\n conv3_2 = tf.keras.layers.Conv2D(filters=256, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv3_2')(conv3_1)\n conv3_3 = tf.keras.layers.Conv2D(filters=256, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv3_3')(conv3_2)\n pool3_1 = tf.nn.max_pool(conv3_3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool3_1')\n\n # Block 4\n conv4_1 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv4_1')(pool3_1)\n conv4_2 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv4_2')(conv4_1)\n conv4_3 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv4_3')(conv4_2)\n pool4_1 = tf.nn.max_pool(conv4_3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool4_1')\n\n # Block 4\n conv5_1 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv5_1')(pool4_1)\n conv5_2 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv5_2')(conv5_1)\n conv5_3 = tf.keras.layers.Conv2D(filters=512, kernel_size=[3, 3], strides=[1, 1], padding='same',\n use_bias=True, activation='relu', name='conv5_3')(conv5_2)\n pool5_1 = tf.nn.max_pool(conv5_3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool5_1')\n\n\n flatten = tf.keras.layers.Flatten()(pool5_1)\n fc6 = tf.keras.layers.Dense(units=4096, use_bias=True, name='fc6', activation='relu')(flatten)\n fc7 = tf.keras.layers.Dense(units=4096, use_bias=True, name='fc7', activation='relu')(fc6)\n fc8 = tf.keras.layers.Dense(units=1000, use_bias=True, name='fc8', activation=None)(fc7)\n\n prob = tf.nn.softmax(fc8)\n\n # Build model\n model = tf.keras.Model(input_layer, prob)\n return model\n\n\n"
},
{
"alpha_fraction": 0.5881374478340149,
"alphanum_fraction": 0.635254979133606,
"avg_line_length": 48,
"blob_id": "fd95f1e4423aa541b5e80dee36960e1d867d30da",
"content_id": "8b3d64b23a78eb9142db631f30c5674b042c7168",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1848,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 36,
"path": "/01-Image Classification/Alexnet/AlexNet.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\r\nfrom tensorflow import keras\r\nfrom tensorflow.keras import layers\r\n\r\n\r\ndef AlexNet_inference(in_shape):\r\n model = keras.Sequential(name='AlexNet')\r\n\r\n # model.add(layers.Conv2D(96,(11,11),strides=(4,4),input_shape=(in_shape[1],in_shape[2],in_shape[3]),\r\n # padding='same',activation='relu',kernel_initializer='uniform'))\r\n\r\n model.add(layers.Conv2D(96, (11, 11), strides=(2, 2), input_shape=(in_shape[1], in_shape[2], in_shape[3]),\r\n padding='same', activation='relu', kernel_initializer='uniform'))\r\n model.add(layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))\r\n model.add(\r\n layers.Conv2D(256, (5, 5), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))\r\n model.add(layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))\r\n model.add(\r\n layers.Conv2D(384, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))\r\n model.add(\r\n layers.Conv2D(384, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))\r\n model.add(\r\n layers.Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))\r\n model.add(layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))\r\n model.add(layers.Flatten())\r\n model.add(layers.Dense(2048, activation='relu'))\r\n model.add(layers.Dropout(0.5))\r\n model.add(layers.Dense(2048, activation='relu'))\r\n model.add(layers.Dropout(0.5))\r\n model.add(layers.Dense(10, activation='softmax'))\r\n model.compile(optimizer=keras.optimizers.Adam(),\r\n loss='sparse_categorical_crossentropy', # 不能直接用函数,否则在与测试加载模型不成功!\r\n metrics=['accuracy'])\r\n model.summary()\r\n\r\n return model\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5352112650871277,
"alphanum_fraction": 0.7112675905227661,
"avg_line_length": 10.833333015441895,
"blob_id": "eaafbe90eef6a0cc526f7b31e5def193e37ab330",
"content_id": "000555b0785761551fd795832ab243b50f4a955b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 208,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 12,
"path": "/readme.md",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "版本 V1.0\n\n时间 2019年11月2日16:19:59\n\n上传内容:使用TensorFlow2.0 对常用模型进行了复现,实现的模型包括:\n\n- 1 Lenet \n- 2 AlexNet\n- 3 VGG16\n- 4 ResNet\n- 5 SeNet\n- 6 MobileNet\n"
},
{
"alpha_fraction": 0.7662337422370911,
"alphanum_fraction": 0.7662337422370911,
"avg_line_length": 24.66666603088379,
"blob_id": "40c0b43f983f416a5af20a19056fd9f4284ec6e7",
"content_id": "6ef15be4748bc0dcf0a3f6fcbc1ab1cfcdd6fad4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 77,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 3,
"path": "/03-Image Segmentation/SegNet/mian.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\ntf.test.is_gpu_available()\ntf.test.gpu_device_name()\n"
},
{
"alpha_fraction": 0.7628458738327026,
"alphanum_fraction": 0.8023715615272522,
"avg_line_length": 35.10714340209961,
"blob_id": "2b19e9f8f481e5fb9b44cb396d9018bba64c32d9",
"content_id": "ac03b14d4faadc29111f41adf213cbf14f95449c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1012,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 28,
"path": "/03-Image Segmentation/SegNet/README.md",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "# SegNet\n\nSegNet is a model of semantic segmentation based on Fully Comvolutional Network.\n\nThis repository contains the implementation of learning and testing in keras and tensorflow.\nAlso included is a custom layer implementation of index pooling, a new property of segnet.\n\n## architecture\n- encoder decoder architecture\n- fully convolutional network\n- indices pooling\n\n \n\n## description\nThis repository is SegNet architecture for Semantic Segmentation.\nThe repository of other people's segmentation, pooling with indices not implemented.But In this repository we implemented pooling layer and unpooling layer with indices at MyLayers.py.\n\nSegnet architecture is early Semantic Segmentation model,so acccuracy is low but fast.\nIn the future, we plan to implement models with high accuracy.(UNet,PSPNet,Pix2Pix ect..)\n\n\n\n## Usage\n\n### train\n\n`python SegNet.py [--options your dataset]`\n\n"
},
{
"alpha_fraction": 0.6307692527770996,
"alphanum_fraction": 0.6402159333229065,
"avg_line_length": 34.97087478637695,
"blob_id": "b12da4f13447ac5fcc5e8754f5f5af34a9c6441d",
"content_id": "aa60db10fcd0e2215add6c55f4fef0d147e394a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3705,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 103,
"path": "/03-Image Segmentation/SegNet/train.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "import argparse\n\nimport pandas as pd\nfrom tensorflow.python.keras.callbacks import TensorBoard\n\nfrom generator import data_gen_small\nfrom model import SegNet\nimport os\nimport tensorflow as tf\n\n\ndef argparser():\n # command line argments\n parser = argparse.ArgumentParser(description=\"SegNet LIP dataset\")\n parser.add_argument(\"--save_dir\", help=\"output directory\")\n parser.add_argument(\"--train_list\", default=\"dataset/coins_seg/ImageSets/Segmentation/train.txt\",\n help=\"train list path\")\n parser.add_argument(\"--trainimg_dir\", default=\"dataset/coins_seg/JPEGImages/\",\n help=\"D:/dataset/coins_seg/JPEGImages/\")\n parser.add_argument(\"--trainmsk_dir\", default=\"dataset/coins_seg/SegmentationClass/\", help=\"train mask dir path\")\n parser.add_argument(\"--val_list\", default=\"dataset/coins_seg/ImageSets/Segmentation/trainval.txt\",\n help=\"val list path\")\n parser.add_argument(\"--valimg_dir\", default=\"dataset/coins_seg/JPEGImages/\", help=\"val image dir path\")\n parser.add_argument(\"--valmsk_dir\", default=\"dataset/coins_seg/SegmentationClass/\", help=\"val mask dir path\")\n parser.add_argument(\"--batch_size\", default=10, type=int, help=\"batch size\")\n parser.add_argument(\"--n_epochs\", default=10, type=int, help=\"number of epoch\")\n parser.add_argument(\n \"--epoch_steps\", default=100, type=int, help=\"number of epoch step\"\n )\n parser.add_argument(\n \"--val_steps\", default=10, type=int, help=\"number of valdation step\"\n )\n parser.add_argument(\"--n_labels\", default=21, type=int, help=\"Number of label\")\n parser.add_argument(\n \"--input_shape\", default=(256, 256, 3), help=\"Input images shape\"\n )\n parser.add_argument(\"--kernel\", default=3, type=int, help=\"Kernel size\")\n parser.add_argument(\n \"--pool_size\", default=(2, 2), help=\"pooling and unpooling size\"\n )\n parser.add_argument(\n \"--output_mode\", default=\"softmax\", type=str, help=\"output activation\"\n )\n parser.add_argument(\n \"--loss\", default=\"categorical_crossentropy\", type=str, help=\"loss function\"\n )\n parser.add_argument(\"--optimizer\", default=\"adadelta\", type=str, help=\"oprimizer\")\n args = parser.parse_args()\n\n return args\n\n\ndef main(args):\n # set the necessary list\n train_list = pd.read_csv(args.train_list, header=None)\n val_list = pd.read_csv(args.val_list, header=None)\n\n # set the necessary directories\n trainimg_dir = args.trainimg_dir\n trainmsk_dir = args.trainmsk_dir\n valimg_dir = args.valimg_dir\n valmsk_dir = args.valmsk_dir\n\n train_gen = data_gen_small(\n trainimg_dir,\n trainmsk_dir,\n train_list,\n args.batch_size,\n [args.input_shape[0], args.input_shape[1]],\n args.n_labels,\n )\n val_gen = data_gen_small(\n valimg_dir,\n valmsk_dir,\n val_list,\n args.batch_size,\n [args.input_shape[0], args.input_shape[1]],\n args.n_labels,\n )\n\n model = SegNet(\n args.n_labels, args.kernel, args.pool_size, args.output_mode\n )\n model.build(input_shape=(1, 256, 256, 3))\n print(model.summary())\n\n model.compile(loss=args.loss, optimizer=args.optimizer, metrics=[\"accuracy\"])\n model.fit_generator(\n train_gen,\n steps_per_epoch=args.epoch_steps,\n epochs=args.n_epochs,\n validation_data=val_gen,\n validation_steps=args.val_steps,\n callbacks=[TensorBoard(log_dir=\"./run\")]\n )\n model.save_weights(args.save_dir + str(args.n_epochs) + \".hdf5\")\n print(\"sava weight done..\")\n\n\nif __name__ == \"__main__\":\n args = argparser()\n with tf.device(\"/gpu:0\"):\n main(args)\n"
},
{
"alpha_fraction": 0.5322203636169434,
"alphanum_fraction": 0.5945742726325989,
"avg_line_length": 47.91666793823242,
"blob_id": "fc2889306f0cc73a6db7a2f261b0e28cb7bbba2d",
"content_id": "c3266902790d186cc4f5977bb7386705a2fa1c9e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11980,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 240,
"path": "/01-Image Classification/googlenet/inceptionv3.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "from tensorflow import keras\r\nfrom tensorflow.keras.layers import Conv2D, MaxPooling2D, AveragePooling2D\r\nfrom tensorflow.keras.layers import Dense, BatchNormalization, Activation\r\nfrom tensorflow.keras.layers import Flatten, Input, concatenate, Dropout\r\nfrom tensorflow.keras.regularizers import l2\r\nfrom tensorflow.keras import backend as K\r\nfrom tensorflow.keras.models import Model\r\nclass Inceptionv3_builder():\r\n\r\n def __init__(self, input_shape = (244,244,3), output_units = 10, init_strides = (2,2),\r\n regularizer = l2(1e-4), initializer = \"he_normal\", init_maxpooling = True):\r\n\r\n '''\r\n :param input_shape: input shape of dataset\r\n :param output_units: output result dimension\r\n :param init_strides: The strides for first convolution layer\r\n :param regularizer: regularizer for all the convolution layers in whole NN\r\n :param initializer: weight/parameters initializer for all convolution & fc layers in whole NN\r\n :param init_maxpooling: Do the maxpooling after first two convolution layers or not\r\n '''\r\n self.input_shape = input_shape\r\n self.output_units = output_units\r\n self.init_strides = init_strides\r\n self.regularizer = regularizer\r\n self.initializer = initializer\r\n self.init_maxpooling = init_maxpooling\r\n self.row_axis = 1\r\n self.col_axis = 2\r\n self.channel_axis = 3\r\n def _cn_bn_relu(self, filters = 32, kernel_size = (3,3), strides = (1,1), padding = \"same\"):\r\n '''\r\n convenient function to build convolution -> batch_nromalization -> relu activation layers\r\n '''\r\n def f(input_x):\r\n\r\n x = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding,kernel_regularizer=self.regularizer,kernel_initializer=self.initializer)(input_x)\r\n #x = BatchNormalization(axis=self.channel_axis)(x)\r\n #x = Activation(\"relu\")(x)\r\n\r\n return x\r\n\r\n return f\r\n\r\n def _auxiliary(self, name = \"auxiliary_1\"):\r\n '''\r\n In author's explanation:\r\n \" The auxiliary classifier will encourage discrimination in lower stages in the classifier,\r\n increase the gradient signal that gets propagated back, and provide additional regularization\"\r\n :return: An output layer of auxiliary classifier\r\n '''\r\n def f(input_x):\r\n\r\n x = AveragePooling2D(pool_size=(5,5), strides = (3,3), padding = \"same\")(input_x)\r\n x = self._cn_bn_relu(filters = 128, kernel_size = (5,5), strides = (1,1), padding = \"same\")(x)\r\n x = Flatten()(x)\r\n x = Dense(units = 1024, kernel_initializer = self.initializer)(x)\r\n x = BatchNormalization(axis = 1)(x)\r\n x = Activation(\"relu\")(x)\r\n x = Dropout(0.7)(x)\r\n\r\n return Dense(units = self.output_units , activation = \"softmax\", kernel_initializer=self.initializer, name = name)(x)\r\n\r\n return f\r\n\r\n\r\n def _inception_block35x35(self,_1x1 = 64, _3x3r = 48, _3x3 = 64, _d3x3r = 64, _d3x3 = 96, _pool = 64, name = \"inception_fig5_1\"):\r\n '''\r\n A function for building inception block of figure5 in original article,\r\n '''\r\n def f(input_x):\r\n\r\n branch1x1 = self._cn_bn_relu(filters = _1x1, kernel_size = (1,1))(input_x)\r\n\r\n branchpooling = AveragePooling2D(pool_size=(3,3), strides = (1,1), padding = \"same\")(input_x)\r\n branchpooling = self._cn_bn_relu(filters = _pool, kernel_size = (1,1))(branchpooling)\r\n\r\n branch3x3 = self._cn_bn_relu(filters = _3x3r, kernel_size = (1,1))(input_x)\r\n branch3x3 = self._cn_bn_relu(filters = _3x3, kernel_size = (3,3))(branch3x3)\r\n\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3r, kernel_size = (1,1))(input_x)\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,3))(dbranch3x3)\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,3))(dbranch3x3)\r\n\r\n return concatenate([branch1x1, branchpooling, branch3x3, dbranch3x3], axis = self.channel_axis, name = name)\r\n\r\n return f\r\n\r\n def _GridSizeReduction35x35(self, _3x3r = 288, _3x3 = 384, _d3x3r = 64, _d3x3 = 96):\r\n '''\r\n A function for dimension reducing from 35x35 -> 17x17\r\n '''\r\n def f(input_x):\r\n\r\n branchpool = AveragePooling2D(pool_size=(3,3), strides = (2,2), padding = \"valid\")(input_x)\r\n\r\n branch3x3 = self._cn_bn_relu(filters = _3x3r, kernel_size = (1,1))(input_x)\r\n branch3x3 = self._cn_bn_relu(filters = _3x3, kernel_size = (3,3), strides = (2,2), padding = \"valid\")(branch3x3)\r\n\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3r, kernel_size = (1,1))(input_x)\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,3))(dbranch3x3)\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,3), strides = (2,2), padding = \"valid\")(dbranch3x3)\r\n\r\n return concatenate([branchpool, branch3x3, dbranch3x3], axis = self.channel_axis)\r\n\r\n return f\r\n\r\n def _inception_block17x17(self, _1x1 = 192, _7x7r = 128, _7x7 = 192, _d7x7r = 128, _d7x7 = 192, _pool = 192, name = \"inception_fig6_1\"):\r\n '''\r\n A function for building inception block of figure6 in original article,\r\n '''\r\n def f(input_x):\r\n\r\n branch1x1 = self._cn_bn_relu(filters=_1x1, kernel_size=(1, 1))(input_x)\r\n\r\n branchpooling = AveragePooling2D(pool_size = (3,3), strides = (1,1), padding = \"same\")(input_x)\r\n branchpooling = self._cn_bn_relu(filters = _pool, kernel_size = (1,1))(branchpooling)\r\n\r\n branch7x7 = self._cn_bn_relu(filters = _7x7r, kernel_size = (1,1))(input_x)\r\n branch7x7 = self._cn_bn_relu(filters = _7x7r, kernel_size = (7,1))(branch7x7)\r\n branch7x7 = self._cn_bn_relu(filters = _7x7, kernel_size=(1, 7))(branch7x7)\r\n\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7r, kernel_size = (1,1))(input_x)\r\n for i in range(2):\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7r, kernel_size=(7, 1))(branch7x7)\r\n if i == 0:\r\n dbranch7x7 = self._cn_bn_relu(filters=_d7x7r, kernel_size=(1, 7))(branch7x7)\r\n else :\r\n dbranch7x7 = self._cn_bn_relu(filters=_d7x7, kernel_size=(1, 7))(branch7x7)\r\n\r\n\r\n return concatenate([branch1x1, branchpooling, branch7x7, dbranch7x7], axis = self.channel_axis, name = name)\r\n\r\n return f\r\n\r\n\r\n def _GridSizeReduction17x17(self, _3x3r = 192, _3x3 =320, _d7x7x3r = 192, _d7x7x3 = 192):\r\n '''\r\n A function for dimension reducing from 17x17 -> 8x8\r\n '''\r\n def f(input_x):\r\n\r\n branchpool = AveragePooling2D(pool_size = (3,3), strides = (2,2), padding = \"valid\")(input_x)\r\n\r\n branch7x7 = self._cn_bn_relu(filters = _3x3r, kernel_size = (1,1))(input_x)\r\n branch7x7 = self._cn_bn_relu(filters = _3x3 , kernel_size = (3,3), strides = (2,2), padding = \"valid\")(branch7x7)\r\n\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7x3r, kernel_size = (1, 1))(input_x)\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7x3, kernel_size = (7, 1))(dbranch7x7)\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7x3, kernel_size = (1, 7))(dbranch7x7)\r\n dbranch7x7 = self._cn_bn_relu(filters = _d7x7x3, kernel_size = (3, 3), strides = (2, 2), padding = \"valid\")(dbranch7x7)\r\n\r\n return concatenate([branchpool, branch7x7, dbranch7x7], axis = self.channel_axis)\r\n\r\n return f\r\n\r\n def _inception_block8x8(self, _1x1 = 320, _pool = 192, _3x3r = 384, _3x3 = 384, _d3x3r = 448, _d3x3 = 384, name = \"inception_fig7_1\"):\r\n '''\r\n A function for building inception block of figure7 in original article,\r\n '''\r\n\r\n def f(input_x):\r\n\r\n branch1x1 = self._cn_bn_relu(filters = _1x1, kernel_size = (1,1))(input_x)\r\n\r\n branchpool = AveragePooling2D(pool_size = (3,3), strides = (1,1), padding = \"same\")(input_x)\r\n branchpool = self._cn_bn_relu(filters = _pool, kernel_size = (1,1))(branchpool)\r\n\r\n branch3x3 = self._cn_bn_relu(filters = _3x3r, kernel_size = (1,1))(input_x)\r\n branch3x3_1 = self._cn_bn_relu(filters = _3x3, kernel_size = (3,1))(branch3x3)\r\n branch3x3_2 = self._cn_bn_relu(filters = _3x3, kernel_size = (1,3))(branch3x3)\r\n\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3r, kernel_size = (1,1))(input_x)\r\n dbranch3x3 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,3))(dbranch3x3)\r\n dbranch3x3_1 = self._cn_bn_relu(filters = _d3x3, kernel_size = (3,1))(dbranch3x3)\r\n dbranch3x3_2 = self._cn_bn_relu(filters = _d3x3, kernel_size = (1,3))(dbranch3x3)\r\n\r\n return concatenate([branch1x1, branchpool, branch3x3_1, branch3x3_2, dbranch3x3_1, dbranch3x3_2], axis = self.channel_axis, name = name)\r\n\r\n return f\r\n\r\n\r\n def build_inception(self):\r\n\r\n '''\r\n Main function for building inceptionV3 nn\r\n :return: An inceptionV3 nn\r\n '''\r\n\r\n input_x = Input(self.input_shape)\r\n\r\n\r\n #Few traditional convolutional layers at lower layers\r\n #Which are factorized by original 7x7 convolution layer\r\n x = self._cn_bn_relu(filters = 32, kernel_size = (3,3), strides = self.init_strides, padding = \"valid\")(input_x)\r\n x = self._cn_bn_relu(filters = 32, kernel_size = (3,3), strides = (1,1), padding = \"valid\")(x)\r\n x = self._cn_bn_relu(filters = 64, kernel_size = (3,3), strides=(1,1), padding=\"same\")(x)\r\n\r\n if self.init_maxpooling:\r\n x = MaxPooling2D(pool_size = (3,3), strides = (2,2), padding = \"valid\")(x)\r\n\r\n x = self._cn_bn_relu(filters = 80, kernel_size = (3,3), strides=(1,1), padding = \"valid\")(x)\r\n x = self._cn_bn_relu(filters = 192, kernel_size = (3,3), strides = self.init_strides, padding = \"valid\")(x)\r\n x = self._cn_bn_relu(filters = 288, kernel_size = (3,3), strides = (1,1), padding = \"same\")(x)\r\n\r\n #First 3 inception block, which are using architecture of figure5 in original article\r\n for i in range(3):\r\n x = self._inception_block35x35(_1x1=64,_3x3r=48,_3x3=64,_d3x3r=64,_d3x3=96, name = \"inception_fig5_\"+str(i+1))(x)\r\n\r\n #Dimension reducing #1 (from 35x35 -> 17x17 in original article)\r\n x = self._GridSizeReduction35x35( _3x3r = 288, _3x3 = 384, _d3x3r = 64, _d3x3 = 96)(x)\r\n\r\n # 5 inception block, which are using architecture of figure6 in original article\r\n for i in range(5):\r\n x = self._inception_block17x17(_1x1=192,_7x7r=128,_7x7=192,_d7x7r=128,_d7x7=192,_pool=192, name = \"inception_fig6_\"+str(i+1))(x)\r\n\r\n\r\n\r\n #Dimension reducing #2 (from 17x17 -> 8x8 in original article)\r\n x = self._GridSizeReduction17x17(_3x3r=192,_3x3=320,_d7x7x3r=192,_d7x7x3=192)(x)\r\n\r\n for i in range(2):\r\n x = self._inception_block8x8(_1x1 = 320, _pool = 192, _3x3r = 384, _3x3 = 384, _d3x3r = 448, _d3x3 = 384, name = \"inception_fig7_\"+str(i+1))(x)\r\n # auxiliary classifier\r\n auxiliary = self._auxiliary(name=\"auxiliary_1\")(x)\r\n x_shape = K.int_shape(x)\r\n\r\n x = AveragePooling2D(pool_size = (x_shape[self.row_axis], x_shape[self.col_axis]), strides = (1,1))(x)\r\n x = Flatten()(x)\r\n x = Dense(units = 2048, kernel_initializer=self.initializer)(x)\r\n x = BatchNormalization(axis = 1)(x)\r\n x = Activation(\"relu\")(x)\r\n\r\n output_x = Dense(units = self.output_units, activation = \"softmax\", kernel_initializer=self.initializer,name = \"main_output\")(x)\r\n\r\n inceptionv3_model = Model(inputs = [input_x], outputs = [output_x,auxiliary])\r\n return inceptionv3_model\r\n\r\ninception_builder = Inceptionv3_builder()\r\nmodel = inception_builder.build_inception()\r\nmodel.summary()\r\n"
},
{
"alpha_fraction": 0.5882093906402588,
"alphanum_fraction": 0.6062472462654114,
"avg_line_length": 32.43939208984375,
"blob_id": "244f162b7474154b713a0e61b5449dbe354f6d8c",
"content_id": "1fec2575dcda318768891861ecff6ec0fb946b5f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2273,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 66,
"path": "/01-Image Classification/googlenet/inceptionv4.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\r\nfrom inception_modules import Stem, InceptionBlockA, InceptionBlockB, \\\r\n InceptionBlockC, ReductionA, ReductionB\r\n\r\nNUM_CLASSES = 10\r\n\r\n\r\ndef build_inception_block_a(n):\r\n block = tf.keras.Sequential()\r\n for _ in range(n):\r\n block.add(InceptionBlockA())\r\n return block\r\n\r\n\r\ndef build_inception_block_b(n):\r\n block = tf.keras.Sequential()\r\n for _ in range(n):\r\n block.add(InceptionBlockB())\r\n return block\r\n\r\n\r\ndef build_inception_block_c(n):\r\n block = tf.keras.Sequential()\r\n for _ in range(n):\r\n block.add(InceptionBlockC())\r\n return block\r\n\r\n\r\nclass InceptionV4(tf.keras.Model):\r\n def __init__(self):\r\n super(InceptionV4, self).__init__()\r\n self.stem = Stem()\r\n self.inception_a = build_inception_block_a(4)\r\n self.reduction_a = ReductionA(k=192, l=224, m=256, n=384)\r\n self.inception_b = build_inception_block_b(7)\r\n self.reduction_b = ReductionB()\r\n self.inception_c = build_inception_block_c(3)\r\n self.avgpool = tf.keras.layers.AveragePooling2D(pool_size=(8, 8))\r\n self.dropout = tf.keras.layers.Dropout(rate=0.2)\r\n self.flat = tf.keras.layers.Flatten()\r\n self.fc = tf.keras.layers.Dense(units=NUM_CLASSES,\r\n activation=tf.keras.activations.softmax)\r\n\r\n def call(self, inputs, training=True, mask=None):\r\n x = self.stem(inputs, training=training)\r\n x = self.inception_a(x, training=training)\r\n x = self.reduction_a(x, training=training)\r\n x = self.inception_b(x, training=training)\r\n x = self.reduction_b(x, training=training)\r\n x = self.inception_c(x, training=training)\r\n x = self.avgpool(x)\r\n x = self.dropout(x, training=training)\r\n x = self.flat(x)\r\n x = self.fc(x)\r\n\r\n return x\r\n\r\n\r\nmodel = InceptionV4()\r\n(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()\r\nx_train = x_train.reshape((-1, 28, 28, 1)).astype(\"float32\")\r\nx_test = x_test.reshape((-1, 28, 28, 1)).astype(\"float32\")\r\nmodel.compile(optimizer=tf.keras.optimizers.Adam(),\r\n loss='sparse_categorical_crossentropy',\r\n metrics=['accuracy'])\r\nmodel.fit(x_train, y_train)\r\n"
},
{
"alpha_fraction": 0.4766690731048584,
"alphanum_fraction": 0.5039483308792114,
"avg_line_length": 26.701030731201172,
"blob_id": "a90ee20bde642af8675be682428a767d99d9581d",
"content_id": "66c1aa82d27a848cc6eb21d5e9084cea5bc1968e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2846,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 97,
"path": "/01-Image Classification/senet/senet.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "#================================================================\r\n# 日期 : 2019年11月2日12:34:43\r\n# 建模类型 : 自定义型\r\n# 改进人 : 亓志国\r\n# 模型 : ResNet\r\n# 文件数量 : 1\r\n#================================================================\r\nimport numpy as np\r\nimport tensorflow as tf\r\nimport cv2, random\r\nfrom tensorflow.keras.models import Model, Sequential\r\nfrom tensorflow.keras.layers import Conv2D, Dense, Activation, InputLayer\r\nfrom tensorflow.keras.layers import GlobalAveragePooling2D, BatchNormalization\r\nfrom tensorflow.keras.layers import LeakyReLU, Multiply, Dropout\r\n\r\n\r\nclass SELayer(Model):\r\n def __init__(self, filters, reduction=16):\r\n super(SELayer, self).__init__()\r\n self.gap = GlobalAveragePooling2D()\r\n self.fc = Sequential([\r\n # use_bias???\r\n Dense(filters // reduction,\r\n input_shape=(filters, ),\r\n use_bias=False),\r\n Dropout(0.5),\r\n BatchNormalization(),\r\n Activation('relu'),\r\n Dense(filters, use_bias=False),\r\n Dropout(0.5),\r\n BatchNormalization(),\r\n Activation('sigmoid')\r\n ])\r\n self.mul = Multiply()\r\n\r\n def call(self, input_tensor):\r\n weights = self.gap(input_tensor)\r\n weights = self.fc(weights)\r\n return self.mul([input_tensor, weights])\r\n\r\n\r\ndef DBL(filters, ksize, strides=1):\r\n layers = [\r\n BatchNormalization(),\r\n LeakyReLU(),\r\n Conv2D(filters, (ksize, ksize),\r\n strides=strides,\r\n padding='same',\r\n use_bias=False)\r\n ]\r\n return Sequential(layers)\r\n\r\n\r\nclass ResUnit(Model):\r\n def __init__(self, filters):\r\n super(ResUnit, self).__init__()\r\n self.dbl1 = DBL(filters // 2, 1)\r\n self.dbl2 = DBL(filters, 3)\r\n self.se = SELayer(filters, 1)\r\n\r\n def call(self, input_tensor):\r\n x = self.dbl1(input_tensor)\r\n x = self.dbl2(x)\r\n x = self.se(x)\r\n x += input_tensor\r\n return x\r\n\r\n\r\ndef SENet(input_shape,\r\n output_filters,\r\n filters=[64, 128, 256, 512, 1024],\r\n res_n=[1, 2, 8, 8, 4]):\r\n layers = []\r\n layers += [\r\n Conv2D(32, (7, 7),\r\n input_shape=input_shape,\r\n padding='same',\r\n use_bias=False)\r\n ]\r\n for fi, f in enumerate(filters):\r\n layers += [DBL(f, 3, 2)] + [ResUnit(f)] * res_n[fi]\r\n layers += [\r\n Dropout(0.5),\r\n BatchNormalization(),\r\n LeakyReLU(),\r\n Conv2D(output_filters, (7, 7), padding='same'),\r\n GlobalAveragePooling2D(),\r\n Activation('softmax')\r\n ]\r\n return Sequential(layers)\r\n\r\n\r\n\r\n\r\ndef senet():\r\n model = SENet((224, 224, 3), 10)\r\n return model\r\n\r\n"
},
{
"alpha_fraction": 0.551561176776886,
"alphanum_fraction": 0.595780611038208,
"avg_line_length": 43.2164192199707,
"blob_id": "9bbf63191b25edc979a4803777085d31a986111c",
"content_id": "a4b0fdf0f03ef8fafb5ea129915540d159019f6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5925,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 134,
"path": "/03-Image Segmentation/SegNet/model.py",
"repo_name": "Qzgfather/TensorFlow2_CNN_Models",
"src_encoding": "UTF-8",
"text": "from tensorflow.keras.layers import Activation, Reshape\nfrom tensorflow.keras.layers import BatchNormalization\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras import layers\nfrom tensorflow.keras.models import Sequential\nfrom layers import MaxPoolingWithArgmax2D, MaxUnpooling2D\n\n\nclass SegNet(Model):\n def get_config(self):\n pass\n\n def __init__(self, n_labels=0, kernel=3, pool_size=(2, 2), output_mode=\"softmax\"):\n super(SegNet, self).__init__(name='')\n self.n_labels = n_labels\n self.output_mode = output_mode\n self.pool_size = pool_size\n self.unit01 = Sequential([\n layers.Conv2D(64, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(64, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit02 = Sequential([\n layers.Conv2D(128, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(128, (kernel, kernel), padding=\"same\"),\n BatchNormalization()\n ])\n self.unit03 = Sequential([\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit04 = Sequential([\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit05 = Sequential([\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit06 = Sequential([\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit07 = Sequential([\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(512, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n ])\n self.unit08 = Sequential([\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(256, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(128, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization()\n ])\n self.unit09 = Sequential([\n layers.Conv2D(128, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(64, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization()\n ])\n self.unit10 = Sequential([\n layers.Conv2D(64, (kernel, kernel), padding=\"same\", activation=\"relu\"),\n BatchNormalization(),\n layers.Conv2D(n_labels, (1, 1), padding=\"valid\"),\n BatchNormalization()\n ])\n\n def call(self, inputs, training=None, mask=None):\n # encoder\n output_01 = self.unit01(inputs)\n pool_1, mask_1 = MaxPoolingWithArgmax2D(self.pool_size)(output_01)\n output_02 = self.unit02(pool_1)\n pool_2, mask_2 = MaxPoolingWithArgmax2D(self.pool_size)(output_02)\n output_03 = self.unit03(pool_2)\n pool_3, mask_3 = MaxPoolingWithArgmax2D(self.pool_size)(output_03)\n output_04 = self.unit04(pool_3)\n pool_4, mask_4 = MaxPoolingWithArgmax2D(self.pool_size)(output_04)\n output_05 = self.unit05(pool_4)\n pool_5, mask_5 = MaxPoolingWithArgmax2D(self.pool_size)(output_05)\n print(\"Build enceder done..\")\n\n # decoder\n unpool_1 = MaxUnpooling2D(self.pool_size)([pool_5, mask_5])\n output_06 = self.unit06(unpool_1)\n unpool_2 = MaxUnpooling2D(self.pool_size)([output_06, mask_4])\n output_07 = self.unit07(unpool_2)\n unpool_3 = MaxUnpooling2D(self.pool_size)([output_07, mask_3])\n output_08 = self.unit08(unpool_3)\n unpool_4 = MaxUnpooling2D(self.pool_size)([output_08, mask_2])\n output_09 = self.unit09(unpool_4)\n unpool_5 = MaxUnpooling2D(self.pool_size)([output_09, mask_1])\n output_10 = self.unit10(unpool_5)\n output_11 = Reshape(\n (256 * 256, self.n_labels),\n input_shape=(256, 256, self.n_labels),\n )(output_10)\n\n outputs = Activation(self.output_mode)(output_11)\n print(\"Build decoder done..\")\n return outputs\n\n\nif __name__ == '__main__':\n import tensorflow as tf\n import numpy as np\n\n model = SegNet(n_labels=21, kernel=3, pool_size=(2, 2), output_mode=\"softmax\")\n model.build(input_shape=(1, 256, 256, 3))\n data = np.ones((1, 256, 256, 3))\n result = model(data)\n print(result.shape)\n"
}
] | 12 |
gitter-badger/pandas_degreedays | https://github.com/gitter-badger/pandas_degreedays | 64768843378c75a3beacf301d95251910663875d | 97b9035260f57f440995b0bc1f0e75b8b3dae1d6 | 3af6b37100d9ec22d42c1daad364c12188c890f8 | refs/heads/master | 2020-06-25T22:35:30.864831 | 2014-12-21T16:16:44 | 2014-12-21T16:16:44 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4287916123867035,
"alphanum_fraction": 0.6211467385292053,
"avg_line_length": 29.885713577270508,
"blob_id": "79ddb670f372cb38797402066783a57c8a77565f",
"content_id": "b84779594aa3370207fc2c174828b2ab3bba23c4",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 3244,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 105,
"path": "/README.rst",
"repo_name": "gitter-badger/pandas_degreedays",
"src_encoding": "UTF-8",
"text": "pandas\\_degreedays\n==================\n\nPandas Degree Days (``pandas_degreedays``) is a Python package to\ncalculate `degree days <http://en.wikipedia.org/wiki/Degree_day>`__.\n\n.. image:: https://travis-ci.org/scls19fr/pandas_degreedays.svg?branch=master\n :target: https://travis-ci.org/scls19fr/pandas_degreedays\n\n.. image:: https://pypip.in/version/pandas_degreedays/badge.svg\n :target: https://pypi.python.org/pypi/pandas_degreedays/\n :alt: Latest Version\n\nYou must provide a `Pandas\nSeries <http://pandas.pydata.org/pandas-docs/stable/generated/pandas.Series.html>`__\nwith temperature values.\n\nLet's call ``ts_temp`` this Serie which looks like:\n\n::\n\n datetime\n 2014-03-20 23:00:00 11\n 2014-03-20 23:30:00 11\n 2014-03-21 00:00:00 11\n 2014-03-21 00:30:00 11\n 2014-03-21 01:00:00 11\n 2014-03-21 01:30:00 11\n ...\n 2014-11-01 20:00:00 12\n 2014-11-01 20:30:00 12\n 2014-11-01 21:00:00 12\n 2014-11-01 21:30:00 12\n 2014-11-01 22:00:00 12\n 2014-11-01 22:30:00 12\n Name: temp, Length: 10757\n\nYou can get a time serie with temperature in ``sample`` folder and read\nit using:\n\n::\n\n import pandas as pd\n filename = 'temperature_sample.xls'\n df_temp = pd.read_excel(filename)\n df_temp = df_temp.set_index('datetime')\n ts_temp = df_temp['temp']\n\nWe can calculate degree days using:\n\n::\n\n from pandas_degreedays import calculate_dd\n df_degreedays = calculate_dd(ts_temp, method='pro', typ='heating', Tref=18.0)\n\n\n``method`` can be :\n\n - ``'pro'`` (energy professionals) - this is default calculation method)\n - ``'meteo'``\n\n``typ`` can be :\n\n - ``'heating'``\n - ``'cooling'``\n\nIt outputs a `Pandas\nDataFrame <http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.html>`__\nwith degree days like:\n\n::\n\n Tmin Tmax Tavg Tref DD DD_cum\n 2014-03-22 7.0 11.0 9.00 18 9.000000 9.000000\n 2014-03-23 3.0 12.0 7.50 18 10.500000 19.500000\n 2014-03-24 0.0 10.0 5.00 18 13.000000 32.500000\n 2014-03-25 6.0 10.0 8.00 18 10.000000 42.500000\n 2014-03-26 5.0 12.0 8.50 18 9.500000 52.000000\n 2014-03-27 2.0 8.0 5.00 18 13.000000 65.000000\n ... ... ... ... ... ... ...\n 2014-10-26 5.0 17.0 11.00 18 7.000000 653.547663\n 2014-10-27 9.0 22.0 15.50 18 3.336923 656.884586\n 2014-10-28 7.5 20.0 13.75 18 4.544400 661.428986\n 2014-10-29 8.0 19.0 13.50 18 4.618182 666.047168\n 2014-10-30 12.0 22.0 17.00 18 1.992000 668.039168\n 2014-10-31 11.0 24.0 17.50 18 2.143077 670.182245\n\n [224 rows x 6 columns]\n\n.. figure:: https://github.com/scls19fr/pandas_degreedays/blob/master/sample/figure.png\n :alt: figure\n\nAbout Pandas\n------------\n\n`pandas <http://pandas.pydata.org/>`__ is a Python package providing\nfast, flexible, and expressive data structures designed to make working\nwith \"relational\" or \"labeled\" data both easy and intuitive. It's a very\nconvenient library to work with time series.\n\nLinks\n-----\n\n- Source code and issue tracking can be found at\n `GitHub <https://github.com/scls19fr/pandas_degreedays>`__.\n\n"
},
{
"alpha_fraction": 0.6235077977180481,
"alphanum_fraction": 0.6372818946838379,
"avg_line_length": 24.928571701049805,
"blob_id": "2781e8de62f88662e290907381f03fa283d3e296",
"content_id": "9aa77144fa80c6a4e56a492488637818ba90bf65",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1089,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 42,
"path": "/sample/calc_degreedays.py",
"repo_name": "gitter-badger/pandas_degreedays",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nReading XLS file with temperature (sample)\nCalculating degree days\nPlotting\n\"\"\"\n\nimport os\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nfrom pandas_degreedays import calculate_dd\n\ndef main():\n basepath = os.path.dirname(__file__)\n filename = os.path.join(basepath, 'temperature_sample.xls')\n df_temp = pd.read_excel(filename)\n df_temp = df_temp.set_index('datetime')\n\n ts_temp = df_temp['temp']\n\n print(ts_temp)\n #print(ts_temp.dtypes)\n #print(ts_temp.index)\n\n df_degreedays = calculate_dd(ts_temp, method='pro', typ='heating', Tref=18.0)\n print(df_degreedays)\n\n #df_degreedays['DD_7'] = pd.rolling_mean(df_degreedays['DD'], 7)\n\n fig, axes = plt.subplots(nrows=4, ncols=1)\n ts_temp.resample('1H').plot(ax=axes[0])\n df_degreedays[['Tmin', 'Tavg', 'Tmax', 'Tref']].plot(ax=axes[1], legend=False)\n df_degreedays['DD'].plot(ax=axes[2])\n #df_degreedays[['DJU', 'DJU_7']].plot(ax=axes[2])\n df_degreedays['DD_cum'].plot(ax=axes[3])\n plt.show()\n\nif __name__ == \"__main__\":\n main()\n"
}
] | 2 |
jitpavi/Virat-Kohli_Data-Analysis_ODI-runs | https://github.com/jitpavi/Virat-Kohli_Data-Analysis_ODI-runs | 3d68e6308e6ec87adb5128ab1dedcfdf53edd57a | 55cc9d57d45b6e74787d754a668a619bdbf2ab66 | 0c7840d7d82ba1046ec365f6da173a764bdc43cf | refs/heads/master | 2022-07-08T23:41:21.864426 | 2020-05-11T18:19:05 | 2020-05-11T18:19:05 | 263,107,871 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7563391327857971,
"alphanum_fraction": 0.7702060341835022,
"avg_line_length": 42.895652770996094,
"blob_id": "a15c663fe49b23e28c586b590a21ad03364ce15e",
"content_id": "19092d0a98cb95245a148aefe2bdfd1335da241d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5048,
"license_type": "no_license",
"max_line_length": 312,
"num_lines": 115,
"path": "/README.md",
"repo_name": "jitpavi/Virat-Kohli_Data-Analysis_ODI-runs",
"src_encoding": "UTF-8",
"text": "# Project Name - Virat-Kohli_Data-Analysis_ODI-runs\n\n#### -- Project Status: [Active]\nNeed to add annotations in all the bar plot and explore more datasets to understand better which other factos can influence the batting performance of Virat Kohli\n\n## Project Intro/Objective:\nThe purpose of the project is to perform an in-depth analysis of Virat Kohli's ODI career statistics and understand what all factors are crucial which influences stellar performance when it comes to batting.I have taken into account all the important data which can be helpful in deriving the critical variables.\n\n### Methods Used:\n* Web Scraping\n* Data Exploration\n* Data Wrangling\n* Data Visualization\n\n### Technologies Used:\n* Python\n* Pandas\n* Pycharm\n* Seaborn\n* Requests\n* BeautifulSoup\n* Matplotlib\n* Numpy\n\n## Project Description:\n\n### Prerequisites\n ### -> Dataset:\n * The data set was downloaded from ESPN cric info website through web scraping. (https://www.espncricinfo.com/india/content/player/253802.html)\n \n ### -> Python Libraries:\n * Pandas\n * Seaborn\n * Requests\n * BeautifulSoup\n * Matplotlib\n * Numpy\n\n### Workflow:\nWe are going to divide the workflow in 2 sections , one dealing the ODI data on calendar year basis and 2nd one based on the Venue:\n\n## Calender Year Basis:\n1. Through web scraping we access and download the required data of Kohli from the ESPN website\n2. Then massive amount of Data Wrangling is performed which will be further used for analysis.\n3. Create a Dataframe holding the RAW data in a groupby form based on calander year.\n4. Perform an extensive Explorative Data Analysis considering variables like Runs, Centuries, Strike-Rate and AverageRuns\n5. Develop a correlation table understanding the impacting of each variable on the effective variable which Total runs scored yearly.\n6. Parse the Date columns into Datetime object and convert the format into \"Year-Week\" type.\n\n## Expected Output:\n### As you can observe here, for calendar year when Kohli scored more than 1000 runs below listed are the crucial factors which helps him achieving the same \n### No of runs for each calender year increase if below variables increases:\n\n### 1.No of Matches played in each calendar year.\n\n### 2.No. of 4s hit in every ODI match.\n\n### 3.No. of Centuries hit in a calendar year.\n\n### 4.Every time he involved in a chase his run tally increases.\n\n### His Strike rate has the least impact on the total runs scored in an year however no of sixes or batting in first innings do leave an impact to some extent.\n\n \n\n\n \n \n \n \n\n\n \n \n \n \n\n## Venue Basis:\n1. Through web scraping we access and download the required data of Kohli from the ESPN website.\n2. Then massive amount of Data Wrangling is performed which will be further used for analysis.\n3. Create a Dataframe holding the RAW data in a groupby form based on matches played (Home and Away) on each venue throughout his career.\n4. Perform an extensive Explorative Data Analysis considering variables like Runs, Centuries, 4s,6s and batting 1st or 2nd.\n5. Develop pie chart showing the relationship between all variables divided in 2 slices Home and Away data respectively.\n\n## Expected Output:\n### As you can observe here, for matches played in Home and Away there is massive difference between amongst few variables as listed below:\n### 1.Kohli played more No of Matches outside India.\n\n### 2.Kohli scored more No of Runs outside India.\n\n### 3.Kohli has hit more No. of Centuries outside India.\n\n### 4.Kohli has hit more No. of 4s outside India.\n\n### This exaclty confirms with our observation found on the earlier section on pre calendar year basis although Strike rate,6s and Avg there is not much of a difference observed here\n\n \n\n## Featured Notebooks/Analysis/Deliverables\n* [ViratKohli_ODI Analysis v1.0.py](https://github.com/jitpavi/Virat-Kohli_Data-Analysis_ODI-runs/blob/master/ViratKohli_ODI%20Analysis%20v1.0.py)\n\n## Versioning\nCode version - v1.0\n\n## Author:\n\n* **Jitin Pavithran** - [jitpavi](https://github.com/jitpavi)\n\n## Acknowledgments:\n\n* https://www.espncricinfo.com/india/content/player/253802.html\n\n## References:\n\n* https://www.kaggle.com/vijaydwivedi052/analysis-of-virat-kohli-in-test-matches\n"
},
{
"alpha_fraction": 0.6645895838737488,
"alphanum_fraction": 0.6845552325248718,
"avg_line_length": 53.57243728637695,
"blob_id": "39c6b299a38c5da373018d49b8083d997881b64f",
"content_id": "275d72febeb5e05475ba0f690612f4a8d513d601",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 15727,
"license_type": "no_license",
"max_line_length": 221,
"num_lines": 283,
"path": "/ViratKohli_ODI Analysis v1.0.py",
"repo_name": "jitpavi/Virat-Kohli_Data-Analysis_ODI-runs",
"src_encoding": "UTF-8",
"text": "\"\"\"\r\nCode Name: Data Analysis of Virat Kohli's ODI batting career\r\nCode Author: Jitin Pavithran\r\nCode Version: v1.0\r\nCode Description: The purpose of the code is to perform an in-depth analysis of Virat Kohli's ODI career statistics and understand what all factors are crucial which influences stellar performance when it comes to batting\r\n\"\"\"\r\n\r\n# Import all the important libraries required for this code\r\n\r\nfrom bs4 import BeautifulSoup\r\nimport pandas as pd\r\nimport requests\r\nimport re\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\n# Perform webscraping using BeautifulSoup method and download the data for ODI statistics for each Calendar year\r\n\r\nespncricinfo_url = \"https://stats.espncricinfo.com/ci/engine/player/253802.html?class=2;template=results;type=batting;view=innings\"\r\n\r\nresponse_vkohli = requests.request(\"GET\",url=espncricinfo_url)\r\n\r\nsoup_vkohli = BeautifulSoup(response_vkohli.content,'html.parser')\r\n\r\nsoup_body = soup_vkohli.findAll('tr',class_='data1')\r\n\r\nfinal_list=[]\r\nfor body in soup_body[1:]:\r\n data = body.findAll('td')\r\n summary_list = [data[len].text.strip() for len in range(len(data))]\r\n final_list.append(tuple(summary_list))\r\n\r\nhead_list=[]\r\nsoup_heading = soup_vkohli.findAll('tr',class_='headlinks')\r\n\r\ncol_head = soup_heading[0].findAll('th')\r\nhead_list = [col.text.strip() for col in col_head]\r\n\r\n# Create a Raw Data Dataframe\r\n\r\nkohli_dict= pd.DataFrame(final_list,columns=head_list,index=pd.Series(np.arange(1,249)))\r\n\r\n# This data required massive amount of Data Wrangling hence we proceed first with the the removal of unwanted columns\r\n\r\nkohli_dict.drop(columns=[\"\"],axis=1,inplace=True)\r\n\r\nkohli_dict['ODI Match No.'] = kohli_dict.index.values\r\nkohli_dict.set_index('ODI Match No.',drop=True,inplace=True)\r\nkohli_dict.reset_index(inplace=True)\r\nkohli_dict.rename(columns={'Runs':'Innings Runs Scored','Mins':'Innings Minutes Batted','BF':'Count of Balls Faced','4s':'Count of 4s',\r\n '6s':'Count of 6s','SR':'Strike Rate','Pos':'Batting Position','Inns':'Innings','Opposition':'Opposition Team','Ground':'Venue','Start Date':'Match Date'\r\n },inplace=True)\r\n\r\nkohli_dict['Innings Runs Scored'] = kohli_dict['Innings Runs Scored'].str.replace('*','')\r\nkohli_dict['Opposition Team'] = kohli_dict['Opposition Team'].str.replace('v ','')\r\nkohli_dict.replace('TDNB',0,inplace=True)\r\nkohli_dict.replace('DNB',0,inplace=True)\r\nkohli_dict.replace('-',0,inplace=True)\r\nkohli_dict[['Innings Runs Scored','Innings Minutes Batted','Count of Balls Faced','Count of 4s','Count of 6s','Batting Position','Innings']] = kohli_dict[['Innings Runs Scored','Innings Minutes Batted',\r\n 'Count of Balls Faced','Count of 4s','Count of 6s',\r\n 'Batting Position','Innings']].astype('int')\r\n\r\nkohli_dict['Dismissal'] = kohli_dict['Dismissal'].str.replace('0','')\r\nkohli_dict['Dismissal'].fillna('Not Played',inplace=True)\r\nkohli_dict[['Strike Rate']] = kohli_dict[['Strike Rate']].astype(float)\r\nkohli_dict['Match Date'] = pd.to_datetime(kohli_dict['Match Date'])\r\nkohli_dict['Year Played'] = kohli_dict['Match Date'].dt.year\r\n\r\n\r\nkohli_dict.to_csv(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\Virat_kohli_statistics_filterdata.csv\")\r\n\r\n# Perform webscraping using BeautifulSoup method and download the data for ODI statistics for each Venue basis\r\n\r\nespncricinfo_url_ground = \"https://stats.espncricinfo.com/ci/engine/player/253802.html?class=2;orderby=runs;template=results;type=batting;view=ground\"\r\n\r\nresponse_vkohli_ground = requests.request(\"GET\",url=espncricinfo_url_ground)\r\n\r\nsoup_vkohli_ground = BeautifulSoup(response_vkohli_ground.content,'html.parser')\r\n\r\nsoup_body_ground = soup_vkohli_ground.findAll('tr',class_='title')\r\n\r\nfinal_list_ground=[]\r\nfor body in soup_body_ground:\r\n data = body.findAll('td')\r\n summary_list = [data[len].text.strip() for len in range(len(data))]\r\n final_list_ground.extend((summary_list))\r\n\r\nsoup_body_ground_data = soup_vkohli_ground.findAll('tr',class_='data1')\r\n\r\nfinal_list_ground_data=[]\r\nfor body in soup_body_ground_data[1:]:\r\n data = body.findAll('td')\r\n summary_list = [data[len].text.strip() for len in range(len(data))]\r\n final_list_ground_data.append(tuple(summary_list))\r\n\r\nhead_list=[]\r\nsoup_heading_ground_head = soup_vkohli_ground.findAll('tr',class_='headlinks')\r\n\r\ncol_head = soup_heading_ground_head[0].findAll('th')\r\nhead_list = [col.text.strip() for col in col_head]\r\n\r\n# Create RAW Data Dataframe holding information on Venue basis\r\n\r\nkohli_dict_ground= pd.DataFrame(final_list_ground_data,columns=head_list)\r\n\r\n\r\n# This data required massive amount of Data Wrangling hence we proceed first with the the removal of unwanted columns\r\n\r\nkohli_dict_ground.drop(columns=[\"\"],axis=1,inplace=True)\r\nkohli_dict_ground['Ground'] = final_list_ground\r\n\r\nkohli_dict_ground['Venue_location'] = kohli_dict_ground['Ground'].apply(lambda val: ('Home' if re.search('.*India$',val) else 'Away'))\r\n\r\nkohli_dict_ground.replace('-',0,inplace=True)\r\n\r\nkohli_dict_ground[['Mat','Inns','NO','Runs','BF','100','50','0','4s','6s']] = kohli_dict_ground[['Mat','Inns','NO','Runs','BF','100','50','0','4s','6s']].astype('int')\r\n\r\nkohli_dict_ground[['Ave','SR']] = kohli_dict_ground[['Ave','SR']].astype(float)\r\n\r\nkohli_dict_ground.to_csv(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\Virat_kohli_grounddata.csv\",index=False)\r\n\r\n# Explorative Data Analysis of data on the basis matches played at each Venue\r\n\r\nVenuewise_Record = kohli_dict_ground.groupby('Venue_location',as_index=False).agg({'Mat':'sum','Inns':'sum','Runs':'sum','SR':'mean',\r\n 'Ave':'mean','4s':'sum','6s':'sum','100':'sum','50':'sum'\r\n })\r\n\r\n\r\nVenuewise_Record.rename(columns={'Runs':'Venuewise_Runs','SR':'Venuewise_SR','Mat':'Venuewise_Matchcount',\r\n '4s':'Venuewise_4s','6s':'Venuewise_6s','Ave':'Venuewise_Avg','100':'Venuewise_Centuries'},inplace=True)\r\n\r\nVenuewise_Record.to_csv(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\Virat_kohli_statistics_venuewisedata.csv\",index=False)\r\n\r\n# Explorative Data Analysis based on the Runs scored in ODI for each calender year\r\n\r\n# Data Analysis of Runs scored against each variable\r\n\r\nVenue_runs = kohli_dict.groupby('Venue').agg({'Innings Runs Scored':'sum'}).sort_values('Innings Runs Scored',ascending=False)\r\nOppTeam_runs = kohli_dict.groupby('Opposition Team').agg({'Innings Runs Scored':'sum'}).sort_values('Innings Runs Scored',ascending=False)\r\nBatpos_runs = kohli_dict.groupby('Batting Position').agg({'Innings Runs Scored':'sum'}).sort_values('Innings Runs Scored',ascending=False)\r\nInnings_runs= kohli_dict.groupby('Innings').agg({'Innings Runs Scored':'sum'}).sort_values('Innings Runs Scored',ascending=False)\r\nYear_Runs = kohli_dict.groupby('Year Played').agg({'Innings Runs Scored':'sum'}).sort_values('Innings Runs Scored',ascending=False)\r\n\r\n\r\n# Data Analysis of Centuries against each variable\r\n\r\nOppTeam_cent = kohli_dict[kohli_dict['Innings Runs Scored']>=100].groupby('Opposition Team').agg({'Innings Runs Scored':'count'}).sort_values('Innings Runs Scored',ascending=False)\r\nYear_cent = kohli_dict[kohli_dict['Innings Runs Scored']>=100].groupby('Year Played').agg({'Innings Runs Scored':'count'}).sort_values('Innings Runs Scored',ascending=False)\r\nInnings_cent = kohli_dict[kohli_dict['Innings Runs Scored']>=100].groupby('Innings').agg({'Innings Runs Scored':'count'}).sort_values('Innings Runs Scored',ascending=False)\r\nVenue_cent = kohli_dict[kohli_dict['Innings Runs Scored']>=100].groupby('Venue').agg({'Innings Runs Scored':'count'}).sort_values('Innings Runs Scored',ascending=False)\r\n\r\n# Data Analysis of Strike Rate and Average runs against each variable\r\n\r\nYear_SR_unsort = kohli_dict.groupby('Year Played').agg({'Strike Rate':'mean'})\r\nYear_avg_unsort = kohli_dict.groupby('Year Played').agg({'Innings Runs Scored':'mean'})\r\n\r\n# Data Analysis of Runs Scored in each Year of his ODI Career\r\nYearwise_Record = kohli_dict.groupby('Year Played',as_index=False).agg({'Strike Rate':'mean','ODI Match No.':'size','Count of 4s':'sum',\r\n 'Count of 6s':'sum','Innings Runs Scored':[np.sum,lambda val: (val>=100).sum()],\r\n 'Innings':[lambda val: (val == 1).sum(),lambda val: (val == 2).sum()]})\r\n\r\nYearwise_Record.rename(columns={'Innings Runs Scored':'Yearwise_Runs','Strike Rate':'Yearwise_SR','ODI Match No.':'Yearwise_Matchcount',\r\n 'Count of 4s':'Yearwise_4s','Count of 6s':'Yearwise_6s','<lambda_0>':'Yearwise_Centuries',},inplace=True)\r\nYearwise_Record.columns = Yearwise_Record.columns.droplevel(1)\r\nYearwise_Record.columns = ['Year Played', 'Yearwise_SR', 'Yearwise_Matchcount', 'Yearwise_4s','Yearwise_6s', 'Yearwise_Runs', 'Yearwise_Centuries','Batting 1st', 'Batting 2nd']\r\n\r\nYearwise_Record['Yearwise_Avg'] = Yearwise_Record['Yearwise_Runs'] / Yearwise_Record['Yearwise_Matchcount']\r\n\r\nYearwise_Record_corr = Yearwise_Record[['Yearwise_Runs','Yearwise_4s', 'Yearwise_Centuries', 'Batting 2nd','Yearwise_Matchcount','Batting 1st','Yearwise_6s','Yearwise_SR']].corr()\r\n\r\nYearwise_Record_corr.to_csv(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\Virat_kohli_statistics_yearwise_corr.csv\")\r\nYearwise_Record.to_csv(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\Virat_kohli_statistics_yearwisedata.csv\",index=False)\r\n\r\n\r\n# Data Visualisation\r\n\r\n# Data Visualisation Plot of Kohli's run tally against each variable\r\n\r\nfig1,ax1 = plt.subplots(nrows = 2,ncols = 2,figsize=(18,7))\r\nfig1.canvas.set_window_title(\"Plotting of Runs Scored against each variable\")\r\nfig1.tight_layout(pad = 3.0)\r\n\r\nInnings_runs[:5].plot(kind = 'bar',ax = ax1[0,0],rot = 0,colormap = 'bwr_r',width = 0.3)\r\nYear_Runs[:5].plot(kind = 'bar',ax = ax1[0,1],rot = 0,colormap = 'seismic',width = 0.3)\r\nBatpos_runs[:5].plot(kind = 'bar',ax = ax1[1,0],rot = 0,colormap = 'RdBu',width = 0.2)\r\nOppTeam_runs[:5].plot(kind = 'bar',ax = ax1[1,1],rot = 0,colormap = 'coolwarm',width = 0.2)\r\n\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_TotalRuns_Barplot.jpg\")\r\n\r\n\r\n# Data Visualisation Plot of Kohli's Centuries against each variable\r\n\r\nfig3,ax3 = plt.subplots(nrows = 2,ncols = 2,figsize=(18,7))\r\nfig3.canvas.set_window_title(\"Plotting of Centuries against each variable\")\r\nfig3.tight_layout(pad = 3.0)\r\n\r\nInnings_cent[:5].plot(kind = 'bar',ax = ax3[0,0],rot = 0,colormap = 'Wistia',width = 0.3)\r\nYear_cent[:5].plot(kind = 'bar',ax = ax3[0,1],rot = 0,colormap = 'bwr_r',width = 0.3)\r\nVenue_cent[:5].plot(kind = 'bar',ax = ax3[1,0],rot = 0,colormap = 'copper_r',width = 0.2)\r\nOppTeam_cent[:5].plot(kind = 'bar',ax = ax3[1,1],rot = 0,colormap = 'seismic',width = 0.2)\r\n\r\nax3[0,0].legend(labels = [\"Count of Centuries\"],loc='upper center',fontsize=12)\r\nax3[0,1].legend(labels = [\"Count of Centuries\"],loc='upper center',fontsize=12)\r\nax3[1,0].legend(labels = [\"Count of Centuries\"],loc='upper center',fontsize=12)\r\nax3[1,1].legend(labels = [\"Count of Centuries\"],loc='upper center',fontsize=12)\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_Centuries_barplot.jpg\")\r\n\r\n\r\n# Define the input variables for Pie chart plot\r\n\r\nX = ['Venuewise_Runs', 'Venuewise_SR', 'Venuewise_Matchcount', 'Venuewise_4s', 'Venuewise_6s','Venuewise_Centuries']\r\nY = Venuewise_Record['Venuewise_Runs']\r\n\r\n# Create a Figure object with dimension 2 rows and 3 columns for showing plot fo each variable\r\n\r\nfig4,ax4 = plt.subplots(nrows=2,ncols=3,figsize=(14,7))\r\nfig4.canvas.set_window_title(\"Pie Chart Plot showing relation between 8 Variables for Venue in Home and Away\")\r\nfig4.tight_layout(pad=3.0)\r\n\r\n# Plot a Pie Chart for each of the 8 variables\r\n\r\ndef Create_pie(size,labels,i,j):\r\n ax = ax4[i,j]\r\n d = 20 *(i+j)\r\n explode = (0.1,0)\r\n total = sum(size)\r\n colors = ['#ff9999','#66b3ff','#99ff99','#ffcc99','#66b3ff','#99ff99']\r\n ax.pie(size,explode=explode,colors=colors,autopct = lambda p: '{:.0f}'.format(p*total/100),shadow = True, startangle = d,labeldistance=1.0)\r\n ax.legend(labels,loc = 'lower left',bbox_to_anchor=(0.0, 0.95))\r\n\r\nfor i in range(2):\r\n for j in range(3):\r\n k= j + i*3\r\n Create_pie([Venuewise_Record[X[k]][0],Venuewise_Record[X[k]][1]],[f\"Away - {(re.search('.*$',X[k])).group()}\",f\"Home - {(re.search('.*$',X[k])).group()}\"],i,j)\r\n\r\nfig4.subplots_adjust(wspace = 0.25)\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_Venuewise_Piechartplot.jpg\")\r\n\r\n\r\n# Data Visualisation Strike Rate and Avg Run for each year of his career\r\nfig5,ax5 = plt.subplots(figsize=(18,7))\r\nfig5.canvas.set_window_title(\"Plotting of Strike Rate and Avg Run for each year of his career\")\r\nfig5.tight_layout(pad = 3.0)\r\n\r\nYear_avg_unsort.plot(kind='line',ax=ax5,marker='o')\r\nYear_SR_unsort.plot(kind='line',ax=ax5,marker='o')\r\nax5.legend(labels = [\"Avg. no. of Runs\",\"Strike Rate\"],loc='upper center',fontsize=12)\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_SRvsAvg_lineplot.jpg\")\r\n\r\n\r\n# Define the input variables for Regression plot\r\n\r\nX = ['Yearwise_Matchcount', 'Yearwise_4s', 'Yearwise_Centuries', 'Batting 2nd', 'Batting 1st', 'Yearwise_6s', 'Yearwise_SR','Yearwise_Avg']\r\nY = Yearwise_Record['Yearwise_Runs']\r\n\r\n# Create a Figure object with dimension 2 rows and 4 columns for showing plot fo each variable\r\n\r\nfig6,ax6 = plt.subplots(nrows=2,ncols=4,figsize=(14,7))\r\nfig6.canvas.set_window_title(\"Regression Plot showing Correlation between 8 Variables and the Runs Scored in each Year\")\r\nfig6.tight_layout(pad=3.0)\r\n\r\n# Plot regression plot for each of the 8 variables\r\n\r\nc = Yearwise_Record_corr.columns.values.tolist()\r\nfor i in range(2):\r\n for j in range(4):\r\n k= j + i*4\r\n sns.regplot(x=Yearwise_Record[X[k]], y=Y, data=Yearwise_Record, ax=ax6[i, j])\r\n\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_Yearwise_RegressionPlot.jpg\")\r\n\r\n#Plot a HeatMap object displaying the correlation between variables and Happiness score\r\n\r\nfig7,ax7 = plt.subplots(figsize=(14,7))\r\nplt.title(\"Heatmap displaying the Correlation mapping between 8 variables and Total Runs Scored in each Year\")\r\nsns.heatmap(Yearwise_Record_corr,vmin=-1,vmax=1,cmap=\"Accent\",annot=True)\r\nax7.figure.subplots_adjust(bottom = 0.3)\r\nplt.savefig(r\"C:\\Users\\jpavithr\\OneDrive - Capgemini\\Desktop\\Automation Drive - Python training\\Pandas\\real python\\Virat_kohli_statistics\\VK_Yearwise_HeatMap.jpg\")\r\n\r\n\r\nplt.show()\r\nplt.close()\r\n"
}
] | 2 |
Ninja116/PyScripts | https://github.com/Ninja116/PyScripts | 13d2b5758fc3a2be9d8c5dfa73b0304728bb4699 | a064500d9cc8ad62e49d8f64cb76fa54759dabd0 | a077c027ffcd86aebcf4315db19525055c042c7a | refs/heads/master | 2020-04-06T04:10:26.994387 | 2013-11-02T08:54:29 | 2013-11-02T08:54:29 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7441673278808594,
"alphanum_fraction": 0.7506033778190613,
"avg_line_length": 34.400001525878906,
"blob_id": "87b96f11aa5a2984a36eab05f7695bf6cfd17822",
"content_id": "ea2cd75aa21b84f572ff0f4f86523c26c8b6841c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1243,
"license_type": "permissive",
"max_line_length": 132,
"num_lines": 35,
"path": "/FbScripts/README.md",
"repo_name": "Ninja116/PyScripts",
"src_encoding": "UTF-8",
"text": "Facebook-Scripts\n================\n\n##Overview\nPython scripts to experiment with Facebook graph search.\nWe use Graph API provided by Facebook for the same. To use the Graph API, it is necessary for the user to generate an Access Token. \n* * *\nSteps to use the Access Token in the script:\n\n1) Go to [Graph API Explorer](https://developers.facebook.com/tools/explorer/)\n\n2) Login with your regular Facebook ID.\n\n3) In the Access Token Edit Box, you might see some existing token already. Press \"x\" near the Edit Box to clear the Access Token.\n\n4) Now select \"Get Access Token\" to get a new Access Token.\n\n5) Select the appropriate permissions.\n\n6) Click \"Get Access Token\".\n\n7) In The Access Token Edit Box, you will now see the Access Token generated with new permissions.\n\n***\n###Objective\nTo make a script which can be used to do most of the facebook actions using graph API.\n\nFew operations which can be planned to do from this script are like Multiple group post, \nDownloading Profile pics of friends etc.\n\nOperations what are to be implemented will be put up in [Todo](https://github.com/sant0sh/Facebook-Scripts/blob/master/Todo.md)\n\n\n##Contributions\nNo code is perfect, so feel free to send me a pull request or create a git issue :)\n\n\n\n\n"
},
{
"alpha_fraction": 0.7454545497894287,
"alphanum_fraction": 0.7454545497894287,
"avg_line_length": 15.5,
"blob_id": "04ac68a71b533d683ba9ecc8527f300655ecc9e8",
"content_id": "32a0378906d14061af072d93c613b0c02a61fb63",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 165,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 10,
"path": "/FbScripts/Todo.md",
"repo_name": "Ninja116/PyScripts",
"src_encoding": "UTF-8",
"text": "##Todo list\n\nFunctions for following need to be implemented:\n\n* Updating Status\n* Downloading Profile picture of Friends\n\n\n* * *\nFeel free to add to this todo list.\n"
},
{
"alpha_fraction": 0.6044039726257324,
"alphanum_fraction": 0.6249051094055176,
"avg_line_length": 28.288888931274414,
"blob_id": "fa83a4d4510e36a5facb723f3d844d8524c08634",
"content_id": "3e7e8a1bd324b2d8a60a4b7624d8813b03082f58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1317,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 45,
"path": "/Funniest/Funniest.py",
"repo_name": "Ninja116/PyScripts",
"src_encoding": "UTF-8",
"text": "//This script is to download pics from funnie.st site\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom progressbar import *\n\nlink=raw_input('Paste the link here: ')\nres=requests.get(link)\ndata=BeautifulSoup(res.content)\nTotal_image=int(data.findChild('h3').string[-2:])\n\nfor pic in range(Total_image):\n\tres=requests.get(link)\n\tdata=BeautifulSoup(res.content)\n\tnext_image=data.findChildren('link')\n\timage_name=link[24:-3]+link[-2:-1]\n\tfor img in next_image:\n\t\tif 'next' in img['rel']:\n\t\t\tlink=img['href']\n\timage=data.findChildren('img')\n\n\tfor i in image:\n\t\tif 'jpg' in i['src']:\n\t\t\tr=requests.get(i['src'])\n\t\t\tsize=float(r.headers['content-length'])\n\t\t\tmbSize = 1024*1024\t#used for conversion to Mb\n\t\t\tTotalSize = (size)/mbSize\n\t\t\twidgets = ['Test: ', Percentage(), ' ', Bar(\">\"), ' ', ETA(), ' ', FileTransferSpeed()]\n\t\t\tprogress = ProgressBar(widgets=widgets,maxval=TotalSize)\n\t\t\tprogress.start()\n\n\t\t\tcount=0\n\t\t\tif r.status_code == 200:\n\t\t\t try:\n\t\t\t\t f=open(image_name+'.jpg', 'wb')\n\t\t\t\t print \"%s downloaded\" % image_name\n\t\t\t except Exception, e:\n\t\t\t print \"Error occured:\", e\n\t\t\t sys.exit(1)\n\t\t for chunk in r.iter_content(256):\n\t\t f.write(chunk)\t\n\t\t count=256\n\t\t progress.update(count/mbSize)\n \t\tf.close()\n \t\tprogress.finish()"
},
{
"alpha_fraction": 0.7491081953048706,
"alphanum_fraction": 0.7568370699882507,
"avg_line_length": 37.227272033691406,
"blob_id": "99b23c9e782e2b91d33277b2e7db5a0923c0c0db",
"content_id": "e24680cbd06981cf49f9398f5a3bfcbbea10cb7e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1682,
"license_type": "permissive",
"max_line_length": 150,
"num_lines": 44,
"path": "/FbScripts/Scripts/ScriptREADME.md",
"repo_name": "Ninja116/PyScripts",
"src_encoding": "UTF-8",
"text": "Facebook-Scripts\n================\n\nPlaying around with Graph API using python\n\nThis script is used to update selected Facebook groups of the user with any specific message. We use Graph API provided by Facebook for the same.\nTo use the Graph API, it is necessary for the user to generate an Access Token. \n\n\nThe script depends on the following packages:\n\n1) requests module. Do check this page for installing requests module (http://docs.python-requests.org/en/latest/user/install/)\n\nSteps to use the Access Token in the script:\n\n1) Go to the following page, https://developers.facebook.com/tools/explorer/\n\n2) Login with your regular Facebook ID.\n\n3) In the Access Token Edit Box, you might see some existing token already. Press \"x\" near the Edit Box to clear the Access Token.\n\n4) Now select \"Get Access Token\" to get a new Access Token.\n\n5) In the \"Select Permissions\" pop-up, under \"User Data Permissions\" tab, select \"user_group\" and in \"Friends Data Permissions\" tab, \nselect \"Friends_groups\".\n\n6) Click \"Get Access Token\".\n\n7) In The Access Token Edit Box, you will now see the Access Token generated with new permissions.\n\n\n\nHow to use the script:\n\n1) Once you have generated the access token, please edit the script, group_post.py and add the access token in the line:\nTOKEN=\"\";\n\n2) Now run the script from the command line. \n\n3) The script will ask to input the message that you want to post in different groups. The current script only supports single post to all the groups.\n\n4) After this, the script will go through all the groups that you are part of and ask you if you want to post in them one by one.\n\n5) Script will end after it has iterated over all the groups.\n"
}
] | 4 |
romasku/RTS_pygame | https://github.com/romasku/RTS_pygame | bff656e4f8c211e975dcbf1d484477702566c74a | 80984a13f4ac0c307c54a8daa14b5da213c38ee4 | 9d15e1fe5d41fc8cba002482491da4eab21a155c | refs/heads/master | 2019-07-14T09:58:50.927932 | 2016-10-16T09:43:59 | 2016-10-16T09:43:59 | 71,038,491 | 2 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6067073345184326,
"alphanum_fraction": 0.6138211488723755,
"avg_line_length": 40,
"blob_id": "c6f0e7ded9a656083ec659b0488d0588ee1ca373",
"content_id": "7a9f19ac97606ce3a0ed841e8fe10838585707ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 984,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 24,
"path": "/RTS_game/on_map_object.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "from pygame import image, transform\nfrom pygame.rect import Rect\n\n\nclass OnMapObject:\n def __init__(self, start_position=(0, 0), hit_box=(0, 0, 0, 0), max_health=1, image_size=None, current_health=None, spite_src=None):\n self.position = start_position\n self.hit_box = hit_box\n self.max_health = max_health\n self.image_size = image_size\n if current_health is None:\n self.current_health = self.max_health\n else:\n self.current_health = current_health\n if spite_src is not None:\n if image_size is None:\n self.sprite = image.load(spite_src)\n self.image_size = self.sprite.get_rect().size\n else:\n self.sprite = transform.scale(image.load(spite_src), image_size)\n\n def draw(self, screen, camera):\n rect = camera.rect_to_camera_cords(Rect((self.position, self.image_size)))\n screen.blit(transform.scale(self.sprite, rect.size), rect)\n"
},
{
"alpha_fraction": 0.5663006901741028,
"alphanum_fraction": 0.5808699131011963,
"avg_line_length": 39.82758712768555,
"blob_id": "64430d787e7b2f7297b332a5bc037181398599d7",
"content_id": "b0207c0cb7fa04c1a762ae7b6789e44b84b7abbb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4736,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 116,
"path": "/RTS_game/game.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "import pygame\n\nfrom RTS_game.camera import Camera\nfrom RTS_game.button import Button\nfrom RTS_game.on_map_object import OnMapObject\n\nNEAR_BORDER_SCALE = 0.05\nNO_MODE = 0\nRECT_SELECT_MODE = 1\nBUILD_MODE = 2\n\n\nclass Game:\n def __init__(self, screen_width, screen_height):\n self.screen_width = screen_width\n self.screen_height = screen_height\n self.quit_pressed = False\n self.left_mbutton_pressed = False\n self.right_mbutton_pressed = False\n self.mpos = (0, 0)\n self.select_start_position = None\n self.mode = NO_MODE\n self.background = pygame.transform.smoothscale(pygame.image.load(\"RTS_game/images/background.jpg\"), (8000, 8000))\n self.camera = Camera(8000, 8000, screen_width, screen_height)\n self.__init_buttons()\n self.__init_object()\n\n def __init_buttons(self):\n self.buttons = []\n self.buttons.append(Button(\"Build square\", (0, 0, 400, 100), self.__build_button_pressed))\n\n def __init_object(self):\n self.on_map_objects = []\n self.on_map_objects.append(OnMapObject(start_position=(200, 200), spite_src=\"RTS_game/images/test.jpg\"))\n\n def update(self, time_delta):\n self.__move_camera_if_needed(time_delta)\n\n def redraw(self, screen):\n background_rect = self.camera.backgound_rect_to_camera()\n screen.blit(pygame.transform.scale(self.background, background_rect.size), background_rect)\n for on_map_object in self.on_map_objects:\n on_map_object.draw(screen, self.camera)\n for button in self.buttons:\n button.draw(screen)\n if self.mode == RECT_SELECT_MODE:\n cursor_pos_in_world = self.camera.point_to_world_cords(self.mpos)\n rect = (self.select_start_position,\n (cursor_pos_in_world[0] - self.select_start_position[0],\n cursor_pos_in_world[1] - self.select_start_position[1]))\n rect = pygame.Rect(rect)\n pygame.draw.rect(screen, (0, 0, 0), self.camera.rect_to_camera_cords(rect), 5)\n\n # event callbacks\n def __build_button_pressed(self):\n if self.mode == BUILD_MODE:\n self.mode = NO_MODE\n else:\n self.mode = BUILD_MODE\n\n def __build_object(self, pos):\n self.on_map_objects.append(OnMapObject(start_position=self.camera.point_to_world_cords(pos),\n image_size=(100, 100), spite_src=\"RTS_game/images/test.jpg\"))\n\n def __start_selecting(self, pos):\n self.mode = RECT_SELECT_MODE\n self.select_start_position = self.camera.point_to_world_cords(pos)\n\n def __end_selecting(self):\n self.mode = NO_MODE\n\n # different private methods\n def __move_camera_if_needed(self, time_delta):\n if self.mpos[0] <= NEAR_BORDER_SCALE * self.screen_width:\n self.camera.move_left(time_delta)\n if (1 - NEAR_BORDER_SCALE) * self.screen_width <= self.mpos[0]:\n self.camera.move_right(time_delta)\n if self.mpos[1] <= NEAR_BORDER_SCALE * self.screen_height:\n self.camera.move_up(time_delta)\n if (1 - NEAR_BORDER_SCALE) * self.screen_height <= self.mpos[1]:\n self.camera.move_down(time_delta)\n if (NEAR_BORDER_SCALE * self.screen_width < self.mpos[0] < (1 - NEAR_BORDER_SCALE) * self.screen_width) and \\\n (NEAR_BORDER_SCALE * self.screen_height < self.mpos[1] < (1 - NEAR_BORDER_SCALE) * self.screen_height):\n self.camera.stop_move()\n\n # Method for getting events from outside\n def process_event(self, e):\n if e.type == pygame.QUIT:\n self.quit_pressed = True\n if e.type == pygame.MOUSEMOTION:\n self.mpos = e.pos\n if e.type == pygame.MOUSEBUTTONDOWN:\n if e.button == 1:\n self.left_mbutton_pressed = True\n if e.button == 3:\n self.right_mbutton_pressed = True\n # Doesn't work for now\n if e.button == 4:\n self.camera.decrease_camera_size()\n if e.button == 5:\n self.camera.increase_camera_size()\n\n was_button_press = False\n if e.button == 1:\n for button in self.buttons:\n if button.check_for_press(e.pos):\n was_button_press = True\n break\n if not was_button_press:\n if self.mode == BUILD_MODE and e.button == 1:\n self.__build_object(e.pos)\n elif e.button == 1:\n self.__start_selecting(e.pos)\n if e.type == pygame.MOUSEBUTTONUP:\n if self.mode == RECT_SELECT_MODE:\n self.__end_selecting()\n"
},
{
"alpha_fraction": 0.598351001739502,
"alphanum_fraction": 0.6171966791152954,
"avg_line_length": 32.959999084472656,
"blob_id": "35e1b2cd487edfa8d6a271ef20e37da7a0ad8955",
"content_id": "188f31bc718ff87fd582f0176304be21b3ba5631",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 849,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 25,
"path": "/RTS_game/button.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "import pygame\n\nBUTTON_COLOR = (0, 0, 0)\nBUTTON_WIDTH = 2\nBUTTON_TEXT_SIZE_SCALE = 0.8\n\n\nclass Button:\n def __init__(self, text, rect, func_on_press):\n self.rect = pygame.Rect(rect)\n self.func_on_press = func_on_press\n self.text = text\n self.font = pygame.font.SysFont(\"Arial\", int(rect[3] * BUTTON_TEXT_SIZE_SCALE))\n self.text_pos = (rect[0] + (1-BUTTON_TEXT_SIZE_SCALE)/2*rect[2], rect[1] + (1-BUTTON_TEXT_SIZE_SCALE)/2*rect[3])\n\n def draw(self, screen):\n pygame.draw.rect(screen, BUTTON_COLOR, self.rect, BUTTON_WIDTH)\n text_surf = self.font.render(self.text, 1, BUTTON_COLOR)\n screen.blit(text_surf, self.text_pos)\n\n def check_for_press(self, cursor_pos):\n if self.rect.collidepoint(cursor_pos):\n self.func_on_press()\n return True\n return False\n"
},
{
"alpha_fraction": 0.738095223903656,
"alphanum_fraction": 0.738095223903656,
"avg_line_length": 13,
"blob_id": "54dfc5a9465bef94e3b0e472651bac94f48fc147",
"content_id": "7276b9bde73afa37a6bf3381d96654322b8f3ad6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 42,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 3,
"path": "/run_game.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "from RTS_game import run_game\n\nrun_game()\n"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 34,
"blob_id": "caa4e9a7f2a4a1943e6cf5cff8cb93b9ae35cda4",
"content_id": "85552e78bb68fe14dbf398ceaa686d9516308f46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 35,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 1,
"path": "/RTS_game/__init__.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "from RTS_game.main import run_game\n"
},
{
"alpha_fraction": 0.7272727489471436,
"alphanum_fraction": 0.7272727489471436,
"avg_line_length": 13.666666984558105,
"blob_id": "f9f3f00e87e0476455c93d7f362800ac05c34075",
"content_id": "e68b515328222c19910f66ce8a9970cbdf69f650",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 44,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 3,
"path": "/README.md",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "# RTS_pygame\n\nTo start just run run_game.py\n"
},
{
"alpha_fraction": 0.5960575938224792,
"alphanum_fraction": 0.6088861227035522,
"avg_line_length": 37.49397659301758,
"blob_id": "d605a31f0142176d2e8d4bfa1e1896d5806053bc",
"content_id": "dcf3aa8c2df48d69ca93209b0efa04e521127179",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3196,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 83,
"path": "/RTS_game/camera.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "from pygame.rect import Rect\n\n\nclass Camera:\n def __init__(self, world_width, world_height, screen_width, screen_height, position=None):\n if position is None:\n position = [0, 0]\n self.world_width = world_width\n self.world_height = world_height\n self.screen_width = screen_width\n self.screen_height = screen_height\n self.width = screen_width\n self.height = screen_height\n self.position = position\n self.move_speed = 0\n self.MOVE_ACCELERATION = screen_width / 4\n self.MOVE_SPEED_CAP = screen_width\n self.SIZE_CHANGE_STEP = 0.1\n\n def rect_to_camera_cords(self, rect):\n rect.x, rect.y = self.point_to_camera_cords((rect.x, rect.y))\n rect.width *= self.width / self.screen_width\n rect.height *= self.height / self.screen_height\n return rect\n\n def point_to_camera_cords(self, point):\n return (point[0] - self.position[0]) * self.width / self.screen_width,\\\n (point[1] - self.position[1]) * self.height / self.screen_height\n\n def rect_to_world_cords(self, rect):\n rect.width /= self.width / self.screen_width\n rect.height /= self.height / self.screen_height\n rect.x, rect.y = self.point_to_world_cords((rect.x, rect.y))\n return rect\n\n def point_to_world_cords(self, point):\n return point[0] / self.width * self.screen_width + self.position[0], \\\n point[1] / self.height * self.screen_height + self.position[1]\n\n def __move(self, time_delta, direction):\n # Increasing camera move speed\n self.move_speed += self.MOVE_ACCELERATION * time_delta\n if self.move_speed > self.MOVE_SPEED_CAP:\n self.move_speed = self.MOVE_SPEED_CAP\n\n self.position[0] += self.move_speed * direction[0] * time_delta # Move x cord\n self.position[1] += self.move_speed * direction[1] * time_delta # Move y cord\n\n # Check if camera still inside world\n if self.position[0] < 0:\n self.position[0] = 0\n if self.position[0] + self.width > self.world_width:\n self.position[0] = self.world_width - self.width\n if self.position[1] < 0:\n self.position[1] = 0\n if self.position[1] + self.height > self.world_height:\n self.position[1] = self.world_height - self.height\n\n def move_left(self, time_delta):\n self.__move(time_delta, (-1, 0))\n\n def move_right(self, time_delta):\n self.__move(time_delta, (1, 0))\n\n def move_up(self, time_delta):\n self.__move(time_delta, (0, -1))\n\n def move_down(self, time_delta):\n self.__move(time_delta, (0, 1))\n\n def stop_move(self):\n self.move_speed = 0\n\n def increase_camera_size(self):\n self.width += self.screen_width * self.SIZE_CHANGE_STEP\n self.height += self.screen_height * self.SIZE_CHANGE_STEP\n\n def decrease_camera_size(self):\n self.width -= self.screen_width * self.SIZE_CHANGE_STEP\n self.height -= self.screen_height * self.SIZE_CHANGE_STEP\n\n def backgound_rect_to_camera(self):\n return self.rect_to_camera_cords(Rect((0, 0), (self.world_width, self.world_width)))\n\n"
},
{
"alpha_fraction": 0.6265389919281006,
"alphanum_fraction": 0.6456908583641052,
"avg_line_length": 23.366666793823242,
"blob_id": "559d2bfaadb79471c2824078f93a47f9fc459c31",
"content_id": "1f11df0f3b2cd66382f777312c62ca077c004bb8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 731,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 30,
"path": "/RTS_game/main.py",
"repo_name": "romasku/RTS_pygame",
"src_encoding": "UTF-8",
"text": "import ctypes\n\nimport pygame\n\nfrom RTS_game.game import Game\nctypes.windll.user32.SetProcessDPIAware()\n\nSCREEN_WIDTH = 1440\nSCREEN_HEIGHT = 900\nSCREEN_SIZE = (SCREEN_WIDTH, SCREEN_HEIGHT)\n\n\ndef run_game():\n pygame.init()\n screen = pygame.display.set_mode(SCREEN_SIZE)\n\n prev_time = pygame.time.get_ticks()\n game = Game(SCREEN_WIDTH, SCREEN_HEIGHT)\n while not game.quit_pressed:\n # Calculate time delta\n cur_time = pygame.time.get_ticks()\n time_delta = (cur_time - prev_time) / 1000.0\n prev_time = cur_time\n for e in pygame.event.get():\n game.process_event(e)\n game.update(time_delta)\n game.redraw(screen)\n pygame.display.update()\n\n pygame.quit()\n"
}
] | 8 |
robert-eklund/ultraheat | https://github.com/robert-eklund/ultraheat | 2795bca81aca89532256ec2a6d561013797f721a | c1c6e0a19c971aca0c34da9a07feec1a5da99431 | c14574a071fce89027262a9f0041ae65e9ddeacc | refs/heads/main | 2023-06-24T14:57:45.611188 | 2021-07-23T12:30:51 | 2021-07-23T12:30:51 | 388,710,354 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.653403639793396,
"alphanum_fraction": 0.681208074092865,
"avg_line_length": 31.076923370361328,
"blob_id": "346ad47c476069d5dde88f1f8e38017d34d8e712",
"content_id": "0635cace59b68c2bba7113999a0db403277d76c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2087,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 65,
"path": "/README.md",
"repo_name": "robert-eklund/ultraheat",
"src_encoding": "UTF-8",
"text": "# Ultraheat\n\nScript to read information from Landis+Gyr Ultraheat (T550)\n\nWith this script you can read a telegram from a Landis&Gyr Ultraheat T550 that is used by district heating companies, such as 'Tekniska Verken' in Linköping.\n\n## Installation and usage:\nThis script uses a optical probe (IEC 62056-21) on an USB port to read the telegrams from the meter.\n\n## Configuration - Docker Compose\n\n### Docker compose\nThis Docker image will send the values to Home Assistant in kW, using MQTT.\n\nMake sure to update ULTRAHEAT_MQTT_BROKER and ULTRAHEAT_USB_DEVICE\n\n version: \"2.1\"\n services:\n ultraheat:\n image: thintux/ultraheat:v1\n container_name: ultraheat\n privileged: true\n restart: unless-stopped\n environment:\n - ULTRAHEAT_MQTT_BROKER=192.168.68.121\n - ULTRAHEAT_USB_DEVICE=/dev/ttyUSB0\n volumes:\n - /etc/localtime:/etc/localtime:ro\n - /dev/:/dev/\n\n### Home Assistant\n\n sensor:\n - platform: mqtt\n state_topic: \"ultraheat/district_heating_meter\"\n unit_of_measurement: \"kWh\"\n name: district_heating_meter_total\n value_template: '{{ value_json[\"total_kwh\"] }}'\n\n utility_meter:\n district_heating_hourly:\n source: sensor.district_heating_meter_total\n cycle: hourly\n district_heating_daily:\n source: sensor.district_heating_meter_total\n cycle: daily\n district_heating_weekly:\n source: sensor.district_heating_meter_total\n cycle: weekly\n district_heating_monthly:\n source: sensor.district_heating_meter_total\n cycle: monthly\n district_heating_yearly:\n source: sensor.district_heating_meter_total\n cycle: yearly\n\n## Note:\nMy meter is connected to 220V so I read every minute.\nIf your meter is battery powered, then you should read much more seldom to avoid\ndraining the battery.\n\n## Requirements:\n- An optical probe (IEC 62056-21 standard) to place on the meter, for example: https://www.amazon.de/dp/B01B8N0ASY/ref=pe_3044161_185740101_TE_item\n- Docker\n- Home Assistant\n\n"
},
{
"alpha_fraction": 0.5981956720352173,
"alphanum_fraction": 0.6204025149345398,
"avg_line_length": 24.73214340209961,
"blob_id": "b23eaa4cc2c3932de5a8fe55ab7980c3a5bc21ce",
"content_id": "b5a6f9f0ae16ddb9d45d6bb2418862fa2ed50fcc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1441,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 56,
"path": "/t550.py",
"repo_name": "robert-eklund/ultraheat",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\nfrom __future__ import print_function\nimport serial, time\nimport os\nimport paho.mqtt.client as paho\nimport json\n\nser = serial.Serial(os.environ['ULTRAHEAT_USB_DEVICE'], baudrate=300, bytesize=7, parity=\"E\", stopbits=1, timeout=2, xonxoff=0, rtscts=0)\n\n#send init message\npacket = bytearray()\nfor i in range(0, 40):\n packet.append(0x00)\nprint(len(packet))\nser.write(packet)\n\n#send request message\nser.write(\"/?!\\x0D\\x0A\".encode())\nser.flush();\ntime.sleep(.5)\n\n#send read identification message\nprint(ser.readline())\n\n#change baudrate\nser.baudrate=2400\n\nmeter_total_kwh = 0\n\ntry:\n #read data message\n while True:\n response = ser.readline().decode()\n print(response, end=\"\")\n if \"6.8(\" in response:\n value, unit = response.split(\"(\")[1].split(\")\")[0].split(\"*\")\n if unit == \"kWh\":\n meter_total_kwh = int(value)\n elif unit == \"MWh\":\n meter_total_kwh = int(float(value) * 1000)\n else:\n print(\"Unknown unit!\")\n break\n if \"!\" in response:\n break\nfinally:\n ser.close()\n\nprint(\"meter_total_kwh: \" + str(meter_total_kwh))\nclient=paho.Client(\"ultraheat\")\nclient.connect(os.environ['ULTRAHEAT_MQTT_BROKER'])\ndata = '{\"total_kwh\": ' + str(meter_total_kwh) + '}'\nprint(data)\nif meter_total_kwh != 0:\n client.publish(\"ultraheat/district_heating_meter\", data)\nclient.disconnect()\n"
},
{
"alpha_fraction": 0.746835470199585,
"alphanum_fraction": 0.7848101258277893,
"avg_line_length": 38.66666793823242,
"blob_id": "0ad9c8887bc20db7a172336eeabde47767de8927",
"content_id": "03ae3506f6aa8f0fa93b53fcbc4732840ea41669",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 237,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 6,
"path": "/Dockerfile",
"repo_name": "robert-eklund/ultraheat",
"src_encoding": "UTF-8",
"text": "FROM debian:buster\nRUN apt-get update && apt-get install -y python3 python3-paho-mqtt python3-serial\n\nCOPY docker-entrypoint.sh /ultraheat/docker-entrypoint.sh\nCOPY t550.py /ultraheat/t550.py\nCMD [\"sh\", \"/ultraheat/docker-entrypoint.sh\"]"
},
{
"alpha_fraction": 0.5692307949066162,
"alphanum_fraction": 0.6461538672447205,
"avg_line_length": 10,
"blob_id": "66fe4e3479d26b877c0e0a6b40657bdc0c5dabe7",
"content_id": "9e52e455506ec30e75a9334ed731d2d7a95722ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 65,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 6,
"path": "/docker-entrypoint.sh",
"repo_name": "robert-eklund/ultraheat",
"src_encoding": "UTF-8",
"text": "#/bin/sh\n\nwhile true; do\n /ultraheat/t550.py\n sleep 30\ndone"
}
] | 4 |
TestaVuota/COCO-Style-Dataset-Generator-GUI | https://github.com/TestaVuota/COCO-Style-Dataset-Generator-GUI | a5dfe5a200b864e61e188cc5ac24708e0d0858cf | 35c095a815a6ef97d2e820d175bf46e32e59f40b | fc492177295fd295813f01f5a9a627d49100f7fb | refs/heads/master | 2023-08-14T18:27:19.785750 | 2021-10-21T22:51:22 | 2021-10-21T22:51:22 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5274766087532043,
"alphanum_fraction": 0.5449221134185791,
"avg_line_length": 43.093406677246094,
"blob_id": "099a7c0c7fa4e3e4c4779a8acd2eda69fc55ca42",
"content_id": "16d2b497c3b7699874ec9d946775c291f73037eb",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8025,
"license_type": "permissive",
"max_line_length": 193,
"num_lines": 182,
"path": "/coco_dataset_generator/extras/cut_objects.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport sys\nnp.set_printoptions(threshold=sys.maxsize)\n\nimport matplotlib.pyplot as plt\nfrom PIL import Image, ImageDraw\nimport json\nfrom collections import defaultdict\nimport random\nfrom matplotlib.path import Path\nimport argparse\nimport glob\nimport os\nimport time\n#from skimage.measure import find_contours\nfrom ..gui.contours import find_contours\n\nclass Occlusion_Generator_Bbox(object):\n\n def __init__(self, json_file, bg_dir, max_objs, imgs_path, curve_factor):\n \n self.dataset = json.load(open(json_file))\n self.max_objs = max_objs\n \n self.imgToAnns = defaultdict(list)\n for ann in self.dataset['annotations']:\n self.imgToAnns[ann['image_id']].append(ann)\n \n self.objToAnns = [[] for _ in range(len(self.dataset['classes'])+1)]\n for index in self.imgToAnns:\n for obj in self.imgToAnns[index]:\n self.objToAnns[obj['category_id']].append({'image': obj['image_id'], 'bbox':obj['bbox']})\n \n self.bg_dir = bg_dir\n \n self.imgs_dir = imgs_path\n self.set_random_background()\n\n self.classes = ['BG'] + self.dataset['classes']\n self.curve_factor = curve_factor\n \n def set_random_background(self):\n \n imgs = [x for x in glob.glob(os.path.join(self.bg_dir, '*')) if 'txt' not in x]\n print (imgs, self.bg_dir)\n bg_path = random.choice(imgs)\n self.img = Image.open(bg_path).convert(\"RGBA\")\n self.mask_img = Image.new('L', self.img.size, 0)\n self.text = ''\n \n def cut_bbox(self, rect): # Takes a bounding box of the form [x_min, y_min, x_max, y_max] and splits it in 2 based on a sine wave and returns 2 PIL polygons\n\n x = np.linspace(rect[0], rect[2], num=50)\n y = (rect[3]+rect[1])/2 + 15*np.sin(x/(rect[3]/np.pi/self.curve_factor))\n \n x1 = np.concatenate((x, np.array([rect[2], rect[0]])))\n y1 = np.concatenate((y, np.array([rect[3], rect[3]])))\n \n x2 = np.concatenate((x, np.array([rect[2], rect[0]])))\n y2 = np.concatenate((y, np.array([rect[1], rect[1]])))\n \n poly1 = [(x,y) for x,y in zip(x1, y1)]\n poly2 = [(x,y) for x,y in zip(x2, y2)]\n \n return random.choice([poly1, poly2])\n \n def add_objects(self): #Adds enlarged versions of n_objs (RANDOM) objects to self.img at random locations without overlap\n \n self.text += '%d'%self.image_id + '\\n' + os.path.abspath(os.path.join(self.imgs_dir, '%d.jpg'%self.image_id))+'\\n'+' '.join([str(x) for x in self.img.size])+'\\n\\n'\n \n n_objs = random.randint(5, self.max_objs)\n \n for _ in range(n_objs):\n\n c1 = random.randint(1, len(self.objToAnns)-1)\n c2 = random.randint(0, len(self.objToAnns[c1])-1)\n\n obj = Image.open(next(item for item in self.dataset['images'] if item[\"id\"] == self.objToAnns[c1][c2]['image'])['file_name'])\n obj_bbox = self.objToAnns[c1][c2]['bbox']\n obj_bbox = (obj_bbox[2], obj_bbox[3], obj_bbox[0], obj_bbox[1])\n \n obj_mask = Image.new('L', obj.size, 0) \n random_occ = self.cut_bbox(obj_bbox) \n ImageDraw.Draw(obj_mask).polygon(random_occ, outline=255, fill=255)\n \n obj = obj.crop(obj_bbox)\n obj_mask = obj_mask.crop(obj_bbox)\n\n obj = obj.resize(np.array(np.array(obj.size)*1.35, dtype=int))\n obj_mask = obj_mask.resize(np.array(np.array(obj_mask.size)*1.35, dtype=int))\n\n done_flag, timeout = False, False\n clk = time.time()\n\n while not done_flag:\n\n if time.time()-clk > 1: # One second timeout\n timeout = True\n\n randx = random.randint(0, self.img.size[0]-obj.size[0]-2)\n randy = random.randint(0, self.img.size[1]-obj.size[1]-2)\n\n temp_mask = self.mask_img.copy()\n temp_mask.paste(Image.new('L', obj_mask.size, 0), (randx, randy))\n\n if (temp_mask == self.mask_img):\n \n self.img.paste(obj, (randx, randy), obj_mask)\n self.mask_img.paste(obj_mask, (randx, randy))\n\n obj_ann = Image.new('L', self.mask_img.size, 0)\n obj_ann.paste(obj_mask, (randx, randy))\n\n padded_mask = np.zeros((obj_ann.size[0] + 2, obj_ann.size[1] + 2), dtype=np.uint8)\n padded_mask[1:-1, 1:-1] = np.array(obj_ann)\n contours = find_contours(padded_mask, 0.5)\n contours = [np.fliplr(verts) - 1 for verts in contours]\n\n x, y = contours[0][:,0], contours[0][:,1]\n area = (0.5*np.abs(np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1))))/2 #shoelace algorithm\n \n self.text += self.classes[c1]+'\\n'+'%.2f'%area+'\\n'+np.array2string(contours[0].flatten(), max_line_width=np.inf, formatter={'float_kind':lambda x: \"%.2f\" % x})[1:-1]+'\\n\\n'\n\n done_flag=True\n \n if not done_flag and timeout: # Add timeout-based object-preferencing\n\n print ('Object Timeout')\n timeout = False\n c2 = random.randint(0, len(self.objToAnns[c1])-1)\n\n obj = Image.open(next(item for item in self.dataset['images'] if item[\"id\"] == self.objToAnns[c1][c2]['image'])['file_name'])\n obj_bbox = self.objToAnns[c1][c2]['bbox']\n obj_bbox = (obj_bbox[2], obj_bbox[3], obj_bbox[0], obj_bbox[1])\n \n obj_mask = Image.new('L', obj.size, 0) \n random_occ = self.cut_bbox(obj_bbox) \n ImageDraw.Draw(obj_mask).polygon(random_occ, outline=255, fill=255)\n \n obj = obj.crop(obj_bbox)\n obj_mask = obj_mask.crop(obj_bbox)\n\n obj = obj.resize(np.array(np.array(obj.size)*1.35, dtype=int))\n obj_mask = obj_mask.resize(np.array(np.array(obj_mask.size)*1.35, dtype=int))\n\n with open(os.path.join(self.imgs_dir, '%d.txt'%self.image_id), 'w') as f:\n f.write(self.text)\n self.img.convert('RGB').save(os.path.join(self.imgs_dir, '%d.jpg'%self.image_id))\n \n def generate_images(self, num_imgs):\n\n self.image_id = 0\n\n for i in range(num_imgs):\n \n self.set_random_background()\n self.add_objects()\n self.image_id += 1\n print ('Image %d/%d created successfully!!!'%(i+1, num_imgs))\n\nif __name__=='__main__':\n\n parser = argparse.ArgumentParser(\n description='Create occluded dataset.')\n parser.add_argument('--json_file', required=True,\n metavar=\"/path/to/json_file/\",\n help='Path to JSON file', default='../pascal_dataset.json')\n parser.add_argument('--bg_dir', required=True,\n metavar=\"/path/to/possible/background/images\",\n help=\"Path to Background Images\", default='background/')\n parser.add_argument('--new_dir', required=True,\n help=\"Path to the new dataset directory\", default='10')\n parser.add_argument('--max_objs', required=True,\n help=\"Maximum number of objects in an image (min=5)\", default='10')\n parser.add_argument('--curve_factor', required=False,\n help=\"Amount of curvature of the sine wave (>2 values lead to high freq cuts)\", default='1.4')\n parser.add_argument('--num_imgs', required=True,\n help=\"Total number of images in the synthetic dataset\", default='50')\n args = parser.parse_args()\n\n occ = Occlusion_Generator_Bbox(args.json_file, args.bg_dir, int(args.max_objs), args.new_dir, float(args.curve_factor))\n occ.generate_images(int(args.num_imgs))\n"
},
{
"alpha_fraction": 0.5542168617248535,
"alphanum_fraction": 0.7309237122535706,
"avg_line_length": 13.647058486938477,
"blob_id": "a4bf28fed94ffe18c62dcb6c26242d96bc041117",
"content_id": "d7931c965b29adbe68d297681ad63127c4cc0535",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 249,
"license_type": "permissive",
"max_line_length": 22,
"num_lines": 17,
"path": "/requirements.txt",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "certifi==2019.11.28\ncycler==0.10.0\nCython==0.29.14\ndecorator==4.4.1\nimageio==2.6.1\nkiwisolver==1.1.0\nmatplotlib==3.1.3\nnetworkx==2.4\nnumpy\nopencv-python\nPillow\npyparsing==2.4.6\npython-dateutil==2.8.1\nPyWavelets==1.1.1\nscikit-image\nscipy\nsix==1.14.0\n"
},
{
"alpha_fraction": 0.7317073345184326,
"alphanum_fraction": 0.7804877758026123,
"avg_line_length": 29.75,
"blob_id": "8d056ebe0faac99da8f01baaff29e1112c013f17",
"content_id": "bb35ff3083423b73901bc4be546c58dcd382905c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 123,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 4,
"path": "/requirements_maskrcnn.txt",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "imutils==0.5.2\n-e git+https://www.github.com/hanskrupakar/Mask_RCNN@master#egg=mask_rcnn\npycocotools==2.0.0\nsimplification\n"
},
{
"alpha_fraction": 0.6239316463470459,
"alphanum_fraction": 0.632478654384613,
"avg_line_length": 27.5,
"blob_id": "db43889fd70c92f75014f02588d2af438bad5e1a",
"content_id": "90ee8e90a1e2d13f347b7ac0d67054e2f79233dd",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 351,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 12,
"path": "/coco_dataset_generator/gui/contours.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "from skimage.measure import find_contours as FC\nimport numpy as np\nfrom simplification.cutil import simplify_coords\n\ndef find_contours(*args):\n \n contours = FC(*args)\n \n simplified_contours = [np.array(simplify_coords(x, 1), dtype=np.int32) \\\n for x in contours]\n \n return simplified_contours\n \n"
},
{
"alpha_fraction": 0.5730769038200378,
"alphanum_fraction": 0.574999988079071,
"avg_line_length": 26.36842155456543,
"blob_id": "996bb0161385f60ac405221cc1789ca0fd5c7d78",
"content_id": "2b6e28bf4bdbabfec15e3ed61d9d21cf67c4ff33",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1040,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 38,
"path": "/coco_dataset_generator/extras/move_dataset_to_single_folder.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import argparse\nimport shutil\nimport json\nimport os\n\nif __name__=='__main__':\n \n ap = argparse.ArgumentParser()\n ap.add_argument('dir', help='Path to folder to put all images in the dataset')\n ap.add_argument('json', help='Path to folder to put all images in the dataset')\n args = ap.parse_args()\n\n with open(args.json, 'r') as f:\n obj = json.load(f)\n \n try:\n os.makedirs(args.dir)\n except Exception:\n pass\n\n for idx, img in enumerate(obj['images']):\n \n path = img['file_name']\n newpath = os.path.join(args.dir, '%s.'%(str(idx).zfill(5))+path.split('.')[-1])\n \n shutil.copyfile(path, newpath)\n \n print (\"Moving %s to %s\"%(path, newpath))\n\n obj['images'][idx]['file_name'] = newpath\n \n print (\"Writing new JSON file!\")\n\n base, direc = os.path.basename(args.dir), os.path.dirname(args.dir)\n \n with open(os.path.join(direc, '%s_dataset.json'%(base)), 'w') as f:\n json.dump(obj, f)\n print (\"JSON file written!\")\n"
},
{
"alpha_fraction": 0.5427255630493164,
"alphanum_fraction": 0.5539675951004028,
"avg_line_length": 36.16222381591797,
"blob_id": "57860abe43b293a5137f71f5372a8a19b5392f79",
"content_id": "35efcc81a29d02758316d2182323a3f3d0bad9ca",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16723,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 450,
"path": "/coco_dataset_generator/gui/segment.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "from matplotlib import pyplot as plt\nfrom matplotlib.collections import PatchCollection\nfrom matplotlib.patches import Polygon\nfrom matplotlib.widgets import RadioButtons\nfrom matplotlib.path import Path\n\nfrom PIL import Image\nimport matplotlib\n\nimport argparse\nimport numpy as np\nimport glob\nimport os\n\nfrom matplotlib.widgets import Button\nfrom matplotlib.lines import Line2D\nfrom matplotlib.artist import Artist\n\nfrom .poly_editor import PolygonInteractor, dist_point_to_segment\n\nimport sys\nfrom ..utils.visualize_dataset import return_info\n\nclass COCO_dataset_generator(object):\n\n def __init__(self, fig, ax, args):\n\n self.ax = ax\n self.ax.set_yticklabels([])\n self.ax.set_xticklabels([])\n\n self.img_dir = args['image_dir']\n self.index = 0\n self.fig = fig\n self.polys = []\n self.zoom_scale, self.points, self.prev, self.submit_p, self.lines, self.circles = 1.2, [], None, None, [], []\n\n self.zoom_id = fig.canvas.mpl_connect('scroll_event', self.zoom)\n self.click_id = fig.canvas.mpl_connect('button_press_event', self.onclick)\n self.clickrel_id = fig.canvas.mpl_connect('button_release_event', self.onclick_release)\n self.keyboard_id = fig.canvas.mpl_connect('key_press_event', self.onkeyboard)\n\n self.axradio = plt.axes([0.0, 0.0, 0.2, 1])\n self.axbringprev = plt.axes([0.3, 0.05, 0.17, 0.05])\n self.axreset = plt.axes([0.48, 0.05, 0.1, 0.05])\n self.axsubmit = plt.axes([0.59, 0.05, 0.1, 0.05])\n self.axprev = plt.axes([0.7, 0.05, 0.1, 0.05])\n self.axnext = plt.axes([0.81, 0.05, 0.1, 0.05])\n self.b_bringprev = Button(self.axbringprev, 'Bring Previous Annotations')\n self.b_bringprev.on_clicked(self.bring_prev)\n self.b_reset = Button(self.axreset, 'Reset')\n self.b_reset.on_clicked(self.reset)\n self.b_submit = Button(self.axsubmit, 'Submit')\n self.b_submit.on_clicked(self.submit)\n self.b_next = Button(self.axnext, 'Next')\n self.b_next.on_clicked(self.next)\n self.b_prev = Button(self.axprev, 'Prev')\n self.b_prev.on_clicked(self.previous)\n\n self.button_axes = [self.axbringprev, self.axreset, self.axsubmit, self.axprev, self.axnext, self.axradio]\n\n self.existing_polys = []\n self.existing_patches = []\n self.selected_poly = False\n self.objects = []\n self.feedback = args['feedback']\n\n self.right_click = False\n\n self.text = ''\n\n with open(args['class_file'], 'r') as f:\n self.class_names = [x.strip() for x in f.readlines() if x.strip() != \"\"]\n\n self.radio = RadioButtons(self.axradio, self.class_names)\n self.class_names = ('BG',) + tuple(self.class_names)\n\n self.img_paths = sorted(glob.glob(os.path.join(self.img_dir, '*.jpg')))\n\n if len(self.img_paths)==0:\n self.img_paths = sorted(glob.glob(os.path.join(self.img_dir, '*.png')))\n if os.path.exists(self.img_paths[self.index][:-3]+'txt'):\n self.index = len(glob.glob(os.path.join(self.img_dir, '*.txt')))\n self.checkpoint = self.index\n \n try:\n im = Image.open(self.img_paths[self.index])\n except IndexError:\n print (\"Reached end of dataset! Delete some TXT files if you want to relabel some images in the folder\")\n exit()\n\n width, height = im.size\n im.close()\n\n image = plt.imread(self.img_paths[self.index])\n\n if args['feedback']:\n\n from mask_rcnn import model as modellib\n from mask_rcnn.get_json_config import get_demo_config\n \n #from skimage.measure import find_contours\n from .contours import find_contours\n\n from mask_rcnn.visualize_cv2 import random_colors\n \n config = get_demo_config(len(self.class_names)-2, True)\n \n if args['config_path'] is not None:\n config.from_json(args['config_path'])\n\n # Create model object in inference mode.\n model = modellib.MaskRCNN(mode=\"inference\", model_dir='/'.join(args['weights_path'].split('/')[:-2]), config=config)\n\n # Load weights trained on MS-COCO\n model.load_weights(args['weights_path'], by_name=True)\n\n r = model.detect([image], verbose=0)[0]\n\n # Number of instances\n N = r['rois'].shape[0]\n\n masks = r['masks']\n\n # Generate random colors\n colors = random_colors(N)\n\n # Show area outside image boundaries.\n height, width = image.shape[:2]\n\n class_ids, scores = r['class_ids'], r['scores']\n\n for i in range(N):\n color = colors[i]\n\n # Label\n class_id = class_ids[i]\n score = scores[i] if scores is not None else None\n label = self.class_names[class_id]\n\n # Mask\n mask = masks[:, :, i]\n\n # Mask Polygon\n # Pad to ensure proper polygons for masks that touch image edges.\n padded_mask = np.zeros(\n (mask.shape[0] + 2, mask.shape[1] + 2), dtype=np.uint8)\n padded_mask[1:-1, 1:-1] = mask\n contours = find_contours(padded_mask, 0.5)\n \n for verts in contours:\n # Subtract the padding and flip (y, x) to (x, y)\n\n verts = np.fliplr(verts) - 1\n pat = PatchCollection([Polygon(verts, closed=True)], facecolor='green', linewidths=0, alpha=0.6)\n self.ax.add_collection(pat)\n self.objects.append(label)\n self.existing_patches.append(pat)\n self.existing_polys.append(Polygon(verts, closed=True, alpha=0.25, facecolor='red'))\n\n self.ax.imshow(image, aspect='auto')\n\n self.text+=str(self.index)+'\\n'+os.path.abspath(self.img_paths[self.index])+'\\n'+str(width)+' '+str(height)+'\\n\\n'\n\n def bring_prev(self, event):\n\n if not self.feedback:\n\n poly_verts, self.objects = return_info(self.img_paths[self.index-1][:-3]+'txt')\n\n for num in poly_verts:\n self.existing_polys.append(Polygon(num, closed=True, alpha=0.5, facecolor='red'))\n\n pat = PatchCollection([Polygon(num, closed=True)], facecolor='green', linewidths=0, alpha=0.6)\n self.ax.add_collection(pat)\n self.existing_patches.append(pat)\n\n def points_to_polygon(self):\n return np.reshape(np.array(self.points), (int(len(self.points)/2), 2))\n\n def deactivate_all(self):\n self.fig.canvas.mpl_disconnect(self.zoom_id)\n self.fig.canvas.mpl_disconnect(self.click_id)\n self.fig.canvas.mpl_disconnect(self.clickrel_id)\n self.fig.canvas.mpl_disconnect(self.keyboard_id)\n\n def onkeyboard(self, event):\n\n if not event.inaxes:\n return\n elif event.key == 'a':\n\n if self.selected_poly:\n self.points = self.interactor.get_polygon().xy.flatten()\n self.interactor.deactivate()\n self.right_click = True\n self.selected_poly = False\n self.fig.canvas.mpl_connect(self.click_id, self.onclick)\n self.polygon.color = (0,255,0)\n self.fig.canvas.draw()\n else:\n for i, poly in enumerate(self.existing_polys):\n\n if poly.get_path().contains_point((event.xdata, event.ydata)):\n\n self.radio.set_active(self.class_names.index(self.objects[i])-1)\n self.polygon = self.existing_polys[i]\n self.existing_patches[i].set_visible(False)\n self.fig.canvas.mpl_disconnect(self.click_id)\n self.ax.add_patch(self.polygon)\n self.fig.canvas.draw()\n self.interactor = PolygonInteractor(self.ax, self.polygon)\n self.selected_poly = True\n self.existing_polys.pop(i)\n break\n\n elif event.key == 'r':\n\n for i, poly in enumerate(self.existing_polys):\n if poly.get_path().contains_point((event.xdata, event.ydata)):\n self.existing_patches[i].set_visible(False)\n self.existing_patches[i].remove()\n self.existing_patches.pop(i)\n self.existing_polys.pop(i)\n break\n self.fig.canvas.draw()\n\n def next(self, event):\n\n if len(self.text.split('\\n'))>5:\n\n print (self.img_paths[self.index][:-3]+'txt')\n\n with open(self.img_paths[self.index][:-3]+'txt', \"w\") as text_file:\n text_file.write(self.text)\n\n self.ax.clear()\n\n self.ax.set_yticklabels([])\n self.ax.set_xticklabels([])\n\n if (self.index<len(self.img_paths)-1):\n self.index += 1\n else:\n exit()\n\n image = plt.imread(self.img_paths[self.index])\n self.ax.imshow(image, aspect='auto')\n\n im = Image.open(self.img_paths[self.index])\n width, height = im.size\n im.close()\n\n self.reset_all()\n\n self.text+=str(self.index)+'\\n'+os.path.abspath(self.img_paths[self.index])+'\\n'+str(width)+' '+str(height)+'\\n\\n'\n\n def reset_all(self):\n\n self.polys = []\n self.text = ''\n self.points, self.prev, self.submit_p, self.lines, self.circles = [], None, None, [], []\n\n def previous(self, event):\n\n if (self.index>self.checkpoint):\n self.index-=1\n #print (self.img_paths[self.index][:-3]+'txt')\n os.remove(self.img_paths[self.index][:-3]+'txt')\n\n self.ax.clear()\n\n self.ax.set_yticklabels([])\n self.ax.set_xticklabels([])\n\n image = plt.imread(self.img_paths[self.index])\n self.ax.imshow(image, aspect='auto')\n\n im = Image.open(self.img_paths[self.index])\n width, height = im.size\n im.close()\n\n self.reset_all()\n\n self.text+=str(self.index)+'\\n'+os.path.abspath(self.img_paths[self.index])+'\\n'+str(width)+' '+str(height)+'\\n\\n'\n\n def onclick(self, event):\n\n if not event.inaxes:\n return\n if not any([x.in_axes(event) for x in self.button_axes]):\n if event.button==1:\n self.points.extend([event.xdata, event.ydata])\n #print (event.xdata, event.ydata)\n\n circle = plt.Circle((event.xdata,event.ydata),2.5,color='black')\n self.ax.add_artist(circle)\n self.circles.append(circle)\n\n if (len(self.points)<4):\n self.r_x = event.xdata\n self.r_y = event.ydata\n else:\n if len(self.points)>5:\n self.right_click=True\n self.fig.canvas.mpl_disconnect(self.click_id)\n self.click_id = None\n self.points.extend([self.points[0], self.points[1]])\n #self.prev.remove()\n\n if (len(self.points)>2):\n line = self.ax.plot([self.points[-4], self.points[-2]], [self.points[-3], self.points[-1]], 'b--')\n self.lines.append(line)\n\n self.fig.canvas.draw()\n\n if len(self.points)>4:\n if self.prev:\n self.prev.remove()\n self.p = PatchCollection([Polygon(self.points_to_polygon(), closed=True)], facecolor='red', linewidths=0, alpha=0.4)\n self.ax.add_collection(self.p)\n self.prev = self.p\n\n self.fig.canvas.draw()\n\n #if len(self.points)>4:\n # print 'AREA OF POLYGON: ', self.find_poly_area(self.points)\n #print event.x, event.y\n\n def find_poly_area(self):\n coords = self.points_to_polygon()\n x, y = coords[:,0], coords[:,1]\n return (0.5*np.abs(np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1))))/2 #shoelace algorithm\n\n def onclick_release(self, event):\n\n if any([x.in_axes(event) for x in self.button_axes]) or self.selected_poly:\n return\n\n if hasattr(self, 'r_x') and hasattr(self, 'r_y') and None not in [self.r_x, self.r_y, event.xdata, event.ydata]:\n if np.abs(event.xdata - self.r_x)>10 and np.abs(event.ydata - self.r_y)>10: # 10 pixels limit for rectangle creation\n if len(self.points)<4:\n\n self.right_click=True\n self.fig.canvas.mpl_disconnect(self.click_id)\n self.click_id = None\n bbox = [np.min([event.xdata, self.r_x]), np.min([event.ydata, self.r_y]), np.max([event.xdata, self.r_x]), np.max([event.ydata, self.r_y])]\n self.r_x = self.r_y = None\n\n self.points = [bbox[0], bbox[1], bbox[0], bbox[3], bbox[2], bbox[3], bbox[2], bbox[1], bbox[0], bbox[1]]\n self.p = PatchCollection([Polygon(self.points_to_polygon(), closed=True)], facecolor='red', linewidths=0, alpha=0.4)\n self.ax.add_collection(self.p)\n self.fig.canvas.draw()\n\n def zoom(self, event):\n\n if not event.inaxes:\n return\n cur_xlim = self.ax.get_xlim()\n cur_ylim = self.ax.get_ylim()\n\n xdata = event.xdata # get event x location\n ydata = event.ydata # get event y location\n\n if event.button == 'down':\n # deal with zoom in\n scale_factor = 1 / self.zoom_scale\n elif event.button == 'up':\n # deal with zoom out\n scale_factor = self.zoom_scale\n else:\n # deal with something that should never happen\n scale_factor = 1\n print (event.button)\n\n new_width = (cur_xlim[1] - cur_xlim[0]) * scale_factor\n new_height = (cur_ylim[1] - cur_ylim[0]) * scale_factor\n\n relx = (cur_xlim[1] - xdata)/(cur_xlim[1] - cur_xlim[0])\n rely = (cur_ylim[1] - ydata)/(cur_ylim[1] - cur_ylim[0])\n\n self.ax.set_xlim([xdata - new_width * (1-relx), xdata + new_width * (relx)])\n self.ax.set_ylim([ydata - new_height * (1-rely), ydata + new_height * (rely)])\n self.ax.figure.canvas.draw()\n\n def reset(self, event):\n\n if not self.click_id:\n self.click_id = fig.canvas.mpl_connect('button_press_event', self.onclick)\n #print (len(self.lines))\n #print (len(self.circles))\n if len(self.points)>5:\n for line in self.lines:\n line.pop(0).remove()\n for circle in self.circles:\n circle.remove()\n self.lines, self.circles = [], []\n self.p.remove()\n self.prev = self.p = None\n self.points = []\n #print (len(self.lines))\n #print (len(self.circles))\n\n def print_points(self):\n\n ret = ''\n for x in self.points:\n ret+='%.2f'%x+' '\n return ret\n\n def submit(self, event):\n\n if not self.right_click:\n print ('Right click before submit is a must!!')\n else:\n\n self.text+=self.radio.value_selected+'\\n'+'%.2f'%self.find_poly_area()+'\\n'+self.print_points()+'\\n\\n'\n self.right_click = False\n #print (self.points)\n\n self.lines, self.circles = [], []\n self.click_id = fig.canvas.mpl_connect('button_press_event', self.onclick)\n\n self.polys.append(Polygon(self.points_to_polygon(), closed=True, color=np.random.rand(3), alpha=0.4, fill=True))\n if self.submit_p:\n self.submit_p.remove()\n self.submit_p = PatchCollection(self.polys, cmap=matplotlib.cm.jet, alpha=0.4)\n self.ax.add_collection(self.submit_p)\n self.points = []\n\nif __name__=='__main__':\n\n ap = argparse.ArgumentParser()\n ap.add_argument(\"-i\", \"--image_dir\", required=True, help=\"Path to the image dir\")\n ap.add_argument(\"-c\", \"--class_file\", required=True, help=\"Path to the classes file of the dataset\")\n ap.add_argument('-w', \"--weights_path\", default=None, help=\"Path to Mask RCNN checkpoint save file\")\n ap.add_argument('-x', \"--config_path\", default=None, help=\"Path to Mask RCNN training config JSON file to load model based on specific parameters\")\n args = vars(ap.parse_args())\n \n args['feedback'] = args['weights_path'] is not None\n\n fig = plt.figure(figsize=(14, 14))\n ax = plt.gca()\n\n gen = COCO_dataset_generator(fig, ax, args)\n\n plt.subplots_adjust(bottom=0.2)\n plt.show()\n\n gen.deactivate_all()\n"
},
{
"alpha_fraction": 0.47491371631622314,
"alphanum_fraction": 0.48155030608177185,
"avg_line_length": 33.78703689575195,
"blob_id": "07a4b33179ac34d8cb1eb4e638541c21bb1970be",
"content_id": "6c9ee5ff4619857bf7a4b4039f8d11f48607c5ab",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3767,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 108,
"path": "/coco_dataset_generator/extras/combine_json_files.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "'''\nUSAGE:\npython combine_json_files.py <LIST OF FILES>\n'''\nimport os\nimport json\nimport glob\nimport sys\nimport numpy as np\nimport argparse\nimport os\n\ndef cleanup_utf8(array):\n arr = [x.encode('ascii', errors='ignore').decode('utf-8') for x in array]\n return list(map(lambda x: x.strip().strip('\\\"').strip('\\''), arr))\n\ndef merge_json(files, outfile='merged_dataset.json', abspath=False):\n\n img_counter = 0\n ann_counter = 0\n \n images, annotations, classes = [], [], []\n \n for file_path in files:\n \n with open(file_path, 'r') as f:\n obj = json.load(f)\n \n for img in obj[\"images\"]:\n img[\"id\"] += img_counter\n\n if not abspath:\n img['file_name'] = os.path.join(\n os.path.abspath(os.path.dirname(file_path)), \n img['file_name'])\n \n for ann in obj[\"annotations\"]:\n ann[\"id\"] += ann_counter\n ann[\"image_id\"] += img_counter\n \n ann_counter += len(obj[\"annotations\"])\n img_counter += len(obj[\"images\"])\n \n if len(images)==0:\n images = obj['images']\n annotations = obj['annotations']\n classes = cleanup_utf8(obj['classes'])\n else:\n obj['classes'] = cleanup_utf8(obj['classes'])\n \n if classes != obj[\"classes\"]:\n print (\"CLASSES MISMATCH BETWEEN:\")\n print (classes)\n print (obj['classes'])\n if len(obj['classes']) < len(classes):\n c1, c2 = obj['classes'], classes\n new = True\n else:\n c1, c2 = classes, obj['classes']\n new = False\n\n mapping = {}\n for idx, c in enumerate(c1):\n try:\n mapping[idx] = c2.index(c)\n except Exception:\n c2.append(c)\n mapping[idx] = len(c2) - 1\n \n print ('MAPPING: ', mapping)\n if not new:\n for idx, ann in enumerate(annotations):\n annotations[idx]['category_id'] = mapping[ann['category_id']-1] + 1\n classes = obj['classes']\n else:\n for idx, ann in enumerate(obj['annotations']):\n obj['annotations'][idx]['category_id'] = mapping[ann['category_id']-1] + 1 \n obj['classes'] = classes\n \n print (\"CHANGE IN NUMBER OF CLASSES HAS BEEN DETECTED BETWEEN JSON FILES\")\n print (\"NOW MAPPING OLD CLASSES TO NEW LIST BASED ON TEXTUAL MATCHING\")\n\n for k, v in mapping.items():\n print (c1[k], \"==>\", c2[v])\n \n remaining = set(c2) - set(c1)\n for r in remaining:\n print (\"NEW CLASS: \", r)\n\n images.extend(obj[\"images\"])\n annotations.extend(obj[\"annotations\"])\n \n with open(outfile, \"w\") as f:\n data = {'images': images, 'annotations':annotations, 'classes': classes, 'categories':[]}\n json.dump(data, f)\n\nif __name__=='__main__':\n \n if len(sys.argv) < 3:\n print (\"Not enough input files to combine into a single dataset file\")\n exit()\n \n ap = argparse.ArgumentParser()\n ap.add_argument('files', nargs='+', help='List of JSON files to combine into single JSON dataset file')\n ap.add_argument('--absolute', nargs='+', help='Flag to use absolute paths in JSON file')\n args = ap.parse_args()\n \n merge_json(args.files, 'merged_json.json', args.absolute)\n \n \n"
},
{
"alpha_fraction": 0.4308374226093292,
"alphanum_fraction": 0.4454163610935211,
"avg_line_length": 37.621620178222656,
"blob_id": "99a3aa426593ccb512e9789a97e366a690392194",
"content_id": "b0c4ea9485a5c13ef164aa25a52e087bf749d98e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8574,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 222,
"path": "/coco_dataset_generator/extras/occlusion_transforms.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport glob\nimport os\nimport argparse\nimport scipy.interpolate\nimport time\nfrom shapely.geometry import Polygon\n\n#from skimage.measure import find_contours\nfrom ..gui.contours import find_contours\n\nfrom PIL import Image, ImageDraw\n\nclass Occlusion_Generator(object):\n\n def __init__(self, strip_width):\n \n self.random_factor = 8\n self.distance = strip_width\n class Annotation(object):\n \n def __init__(self):\n \n self.objects = []\n self.classes = []\n \n self.all_images = []\n \n self.images = []\n \n self.polys = []\n \n self.im_shape = np.asarray(Image.open(glob.glob(os.path.join(args[\"image_dir\"], \"*.jpg\"))[0])).shape\n \n for ptr, f in enumerate(glob.glob(os.path.join(args[\"image_dir\"], \"*.jpg\"))):\n\n print (\"Processing Image %d/%d\"%(ptr+1, len(glob.glob(os.path.join(args[\"image_dir\"], \"*.jpg\")))))\n\n im = Image.open(f).convert('RGBA')\n im.load()\n \n self.images.append(np.asarray(Image.open(f)))\n \n # convert to numpy (for convenience)\n imArray = np.asarray(im)\n \n lines = [x for x in range(50, imArray.shape[0], 100)]\n \n image_contents = Annotation()\n \n with open(f[:-3]+'txt', 'r') as f:\n txt = f.read().split('\\n')\n \n for index in range(6, len(txt), 4):\n \n num = [float(x) for x in txt[index].split(' ')[:-1]]\n num = [(num[i], num[i+1]) for i in range(0, len(num), 2)]\n image_contents.objects.append([num])\n image_contents.classes.append(txt[index-2])\n \n strips = [Annotation() for _ in range(len(lines[2:]))]\n \n poly = [(imArray.shape[1], 0), (0, 0)]\n \n for pos, l in enumerate(lines[2:]):\n \n if ptr == 0:\n\n x, y = [0, imArray.shape[1]], [l, l+self.distance]\n y_interp = scipy.interpolate.interp1d(x, y)\n x_pts, y_pts = [x[0]], [y[0]] \n \n for p in range(0, imArray.shape[1], 5):\n yt = y_interp(p) + (2*np.random.random_sample()-1)*self.random_factor\n x_pts.append(p + (2*np.random.random_sample()-1)*self.random_factor)\n y_pts.append(yt)\n x_pts.append(x[1])\n y_pts.append(y[1])\n \n pts = [(x, y) for x, y in zip(x_pts, y_pts)]\n poly.extend(pts)\n \n self.polys.append(poly)\n \n else:\n \n poly = self.polys[pos]\n \n #ImageDraw.Draw(im).polygon(poly, fill=\"white\", outline=None)\n \n #ImageDraw.Draw(im).line(pts, fill=128)\n \n #im.show()\n #time.sleep(.1)\n \n # create mask\n \n maskimg = Image.new('L', (imArray.shape[1], imArray.shape[0]), 0)\n ImageDraw.Draw(maskimg).polygon(poly, outline=1, fill=1)\n mask = np.array(maskimg)\n #maskimg.show()\n \n for i in range(len(image_contents.classes)):\n \n obj_img = Image.new('L', (imArray.shape[1], imArray.shape[0]), 0)\n ImageDraw.Draw(obj_img).polygon(image_contents.objects[i][0], outline=1, fill=1)\n obj = np.array(obj_img)\n logical_and = mask * obj\n \n if (np.sum(logical_and)>150):\n \n # Mask Polygon\n # Pad to ensure proper polygons for masks that touch image edges.\n padded_mask = np.zeros(\n (logical_and.shape[0] + 2, logical_and.shape[1] + 2), dtype=np.uint8)\n padded_mask[1:-1, 1:-1] = logical_and\n contours = find_contours(padded_mask, 0.5)\n \n strips[pos].objects.append([np.fliplr(verts) - 1 for verts in contours])\n strips[pos].classes.append(image_contents.classes[i])\n \n if ptr == 0:\n poly = list(map(tuple, np.flip(np.array(pts), 0)))\n self.all_images.append(strips)\n \n def polys_to_string(self, polys):\n \n ret = ''\n \n for poly in polys:\n for (x, y) in poly:\n ret+='%.2f %.2f '%(x, y)\n ret+='\\n'\n return ret\n \n def find_poly_area(self, poly):\n \n x, y = np.zeros(len(poly)), np.zeros(len(poly))\n for i, (xp, yp) in enumerate(poly):\n x[i] = xp\n y[i] = yp\n return 0.5*np.abs(np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1))) #shoelace algorithm\n \n def generate_samples(self, num, path):\n \n cumulative_mask = None\n text = ''\n \n if not os.path.exists(path):\n os.mkdir(path)\n \n for i in range(num):\n \n newImage = Image.new('RGBA', (self.im_shape[1], self.im_shape[0]), 0)\n\n text+=\"occ%d\\n%s\\n%d %d\\n\\n\"%(i, os.path.join(path, 'occ_%d.jpg'%(i+1)), self.im_shape[0], self.im_shape[1])\n \n for j in range(len(self.all_images[0])):\n \n rand = np.random.randint(len(self.all_images))\n \n # create mask\n maskimg = Image.new('L', (self.im_shape[1], self.im_shape[0]), 0)\n \n ImageDraw.Draw(maskimg).polygon(self.polys[j], outline=1, fill=1)\n mask = np.array(maskimg)\n \n #Image.fromarray(mask*255, 'L').show()\n \n if cumulative_mask is None:\n cumulative_mask = mask\n else:\n cumulative_mask += mask \n \n #Image.fromarray(cumulative_mask*255, 'L').show()\n \n #time.sleep(.5) \n # assemble new image (uint8: 0-255)\n newImArray = np.empty(self.im_shape[:2]+(4,), dtype='uint8')\n\n # colors (three first columns, RGB)\n newImArray[:,:,:3] = self.images[rand][:,:,:3]\n \n # transparency (4th column)\n newImArray[:,:,3] = mask*255\n\n # back to Image from numpy\n \n newIm = Image.fromarray(newImArray, \"RGBA\")\n \n newImage.paste(newIm, (0, 0), newIm)\n \n for anns, cls in zip(self.all_images[rand][j].objects, self.all_images[rand][j].classes):\n text += cls+'\\n'\n area = 0\n for poly in anns:\n area += self.find_poly_area(poly)\n text+='%.2f\\n'%area\n text += self.polys_to_string(anns)\n text +='\\n'\n \n background = Image.new(\"RGB\", (newImArray.shape[1], newImArray.shape[0]), (0, 0, 0))\n background.paste(newImage, mask=newImage.split()[3]) # 3 is the alpha channel\n \n background.save(os.path.join(path, 'occ_%d.jpg'%(i+1)))\n with open(os.path.join(path, 'occ_%d.txt'%(i+1)), 'w') as f:\n f.write(text)\n text = ''\n print ('Generated %d/%d Images: %s'%(i+1, num, os.path.join(path, 'occ_%d.jpg'%(i+1))))\n \nif __name__==\"__main__\":\n \n ap = argparse.ArgumentParser()\n ap.add_argument(\"-i\", \"--image_dir\", required=True, help=\"Path to the image dir\") \n ap.add_argument(\"-o\", \"--output_dir\", required=True, help=\"Path to the output dir\") \n ap.add_argument(\"-s\", \"--strip_width\", required=True, help=\"width of strip\") \n ap.add_argument(\"-n\", \"--num_images\", required=True, help=\"number of new images to generate\") \n args = vars(ap.parse_args())\n\n occlusion_gen = Occlusion_Generator(int(args['strip_width'])) \n \n occlusion_gen.generate_samples(int(args['num_images']), args['output_dir'])\n"
},
{
"alpha_fraction": 0.49067243933677673,
"alphanum_fraction": 0.5236442685127258,
"avg_line_length": 38.06779479980469,
"blob_id": "d4d2b14e40b1a8b3c477cc017631ec27f715ddf6",
"content_id": "fd08c4b26dda87d7df6233e9cea6c6e0df5d8026",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2305,
"license_type": "permissive",
"max_line_length": 136,
"num_lines": 59,
"path": "/coco_dataset_generator/utils/visualize_json_file.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import json\nimport cv2\nimport argparse\nimport os\nimport numpy as np\n\nif __name__=='__main__':\n \n ap = argparse.ArgumentParser()\n ap.add_argument('json_file', help=\"Path to the JSON dataset file to visualize\")\n ap.add_argument('--save', help='Save a few results to disk to accommodate non-display environments', action='store_true')\n ap.add_argument('--relpath', action='store_true', help='Absolute vs relative paths in dataset JSON')\n args = ap.parse_args()\n \n '''\n cv2.namedWindow('frame', cv2.WND_PROP_FULLSCREEN)\n '''\n\n with open(args.json_file, 'r') as f: \n obj = json.loads(f.read())\n \n images, annotations = obj[\"images\"], obj[\"annotations\"]\n classes = obj[\"classes\"]\n print (classes) \n print (\"Dataset contains %d images, %d objects!\"%(len(images), len(annotations)))\n \n for idx, img in enumerate(images):\n if args.relpath:\n imgpath = os.path.join(os.path.dirname(args.json_file), img['file_name'])\n else:\n imgpath = img['file_name']\n print (imgpath)\n if os.path.exists(imgpath):\n anns = [ann for ann in annotations if ann[\"image_id\"]==img[\"id\"]]\n image_cv2 = cv2.imread(imgpath)\n ann_img = image_cv2.copy()\n for ann in anns:\n s = [int(x) for x in ann['bbox']]\n seg = np.array(ann['segmentation'][0])\n x, y = seg[range(0, len(seg)-1, 2)], seg[range(1, len(seg), 2)]\n seg2d = [[xi, yi] for xi, yi in zip(x,y)]\n cv2.fillPoly(ann_img, np.array([seg2d], dtype = 'int32'), (0, 255, 0))\n cv2.rectangle(image_cv2, (s[0], s[1]), (s[2], s[3]), (0,0,0), 2)\n cv2.putText(image_cv2, classes[ann['category_id']-1], (s[0]-10, s[1]+10), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2)\n image_cv2 = cv2.addWeighted(ann_img,0.25,image_cv2,0.75,0)\n\n if not args.save:\n cv2.imshow('frame', image_cv2)\n q = cv2.waitKey()\n \n else:\n print ('saving sample!')\n cv2.imwrite('sample%d.jpg'%(idx), image_cv2)\n q = 10 \n if idx > 25:\n q = 113\n\n if q == 113: # if q == 'q'\n exit()\n"
},
{
"alpha_fraction": 0.6441805958747864,
"alphanum_fraction": 0.6463737487792969,
"avg_line_length": 55.508548736572266,
"blob_id": "3aabc4c5ff457af32a3e179f01cc4498fd175727",
"content_id": "e25088cad6a1702eb71153ec54c5c3183bf744b5",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 13223,
"license_type": "permissive",
"max_line_length": 688,
"num_lines": 234,
"path": "/README.md",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "# COCO-Style-Dataset-Generator-GUI\n\nThis is a simple GUI-based Widget based on matplotlib in Python to facilitate quick and efficient crowd-sourced generation of annotation masks and bounding boxes using a simple interactive User Interface. Annotation can be in terms of polygon points covering all parts of an object (see instructions in README) or it can simply be a bounding box, for which you click and drag the mouse button. Optionally, one could choose to use a pretrained Mask RCNN model to come up with initial segmentations. This shifts the work load from painstakingly annotating all the objects in every image to altering wrong predictions made by the system which maybe simpler once an efficient model is learnt.\n\n#### Note: This repo only contains code to annotate every object using a single polygon figure. Support for multi-polygon objects and `iscrowd=True` annotations isn't available yet. Feel free to extend the repo as you wish. Also, the code uses xyxy bounding boxes while coco uses xywh; something to keep in mind if you intend to create a custom COCO dataset to plug into other models as COCO datasets.\n\n### REQUIREMENTS:\n\n`Python 3.5+` is required to run the Mask RCNN code. If only the GUI tool is used, `Python2.7` or `Python3.5+` can be used.\n\n###### NOTE: For python2.7, OpenCV needs to be installed from source and configured to be in the environment running the code.\n###### Before installing, please upgrade setuptools using: pip install --upgrade setuptools\n###### For Windows users, please install Visual Studio C++ 14 or higher if necessary using this link: http://go.microsoft.com/fwlink/?LinkId=691126&fixForIE=.exe. \n\n### RUN THE SEGMENTOR GUI:\n\nClone the repo.\n\n```\ngit clone https://github.com/hanskrupakar/COCO-Style-Dataset-Generator-GUI.git\n```\n\n#### Installing Dependencies:\n\nBefore running the code, install required pre-requisite python packages using pip.\n\nIf you wish to use Mask RCNN to prelabel based on a trained model, please use the environment variable `MASK_RCNN=\"y\"`, otherwise there's no need to include it and you could just perform the install.\n\n###### Without Mask RCNN\n\n```\ncd COCO-Style-Dataset-Generator-GUI/\npython setup.py install\n```\n\n###### With Mask RCNN\n\n```\ncd COCO-Style-Dataset-Generator-GUI/\nMASK_RCNN=\"y\" python3 setup.py install\n```\n\n#### Running the instance segmentation GUI without Mask RCNN pretrained predictions:\n\nIn a separate text file, list the target labels/classes line-by-line to be displayed along with the dataset for class labels. For example, look at [classes/products.txt](https://github.com/hanskrupakar/COCO-Style-Dataset-Generator-GUI/blob/master/classes/products.txt)\n\n```\npython3 -m coco_dataset_generator.gui.segment -i background/ -c classes/products.txt\n\npython3 -m coco_dataset_generator.gui.segment_bbox_only -i background/ -c classes/products.txt\n```\n\n#### Running the instance segmentation GUI augmented by initial Mask RCNN pretrained model predictions:\n\nTo run the particular model for the demo, download the pretrained weights from [HERE!!!](https://drive.google.com/file/d/1GaKVP3BvTfMwPbhEm4nF7fLATV-eDkFQ/view?usp=sharing). Download and extract pretrained weights into the repository.\n\n```\npython3 -m coco_dataset_generator.gui.segment -i background/ -c classes/products.txt \\\n -w <MODEL_FILE> [--config <CONFIG_FILE>]\n\npython3 -m coco_dataset_generator.gui.segment_bbox_only -i background/ -c classes/products.txt \\\n -w <MODEL_FILE> [--config <CONFIG_FILE>]\n```\n\nThe configuration file for Mask RCNN becomes relevant when you play around with the configuration parameters that make up the network. In order to seamlessly use the repository with multiple such Mask RCNN models for different types of datasets, you could create a single config file for every project and use them as you please. The base repository has been configured to work well with the demo model provided and so any change to the parameters should be followed by generation of its corresponding config file.\n\nHINT: Use `get_json_config.py` inside `Mask RCNN` to get config file wrt specific parameters of Mask RCNN. You could either clone [Mask_RCNN](https://www.github.com/hanskrupakar/Mask_RCNN), use `pip install -e Mask_RCNN/` to replace the mask_rcnn installed from this repo and then get access to `get_json_config.py` easily or you could find where pip installs `mask_rcnn` and find it directly from the source.\n\n`USAGE: segment.py [-h] -i IMAGE_DIR -c CLASS_FILE [-w WEIGHTS_PATH] [-x CONFIG_PATH]`\n\n`USAGE: segment_bbox_only.py [-h] -i IMAGE_FILE -c CLASSES_FILE [-j JSON_FILE] [--save_csv] [-w WEIGHTS_PATH] [-x CONFIG_PATH]`\n\n##### Optional Arguments\n\n| Shorthand | Flag Name | Description |\n| --------------- | --------------------------- | ---------------------------------------------------------------------------------- |\n| -h | --help | Show this help message and exit |\n| -i IMAGE_DIR | --image_dir IMAGE_DIR | Path to the image dir |\n| -c CLASS_FILE | --class_file CLASS_FILE | Path to object labels |\n| -w WEIGHTS_PATH | --weights_path WEIGHTS_PATH | Path to Mask RCNN checkpoint save file |\n| -j JSON_FILE | --json_file JSON_FILE | Path of JSON file to append dataset to |\n| | --save_csv | Choose option to save dataset as CSV file |\n| -x CONFIG_FILE | --config_file CONFIG_FILE | Path of JSON file for training config; Use `get_json_config` script from Mask RCNN |\n\n### POLYGON SEGMENTATION GUI CONTROLS:\n\n\n\nIn this demo, all the green patches over the objects are the rough masks generated by a pretrained Mask RCNN network.\n\nKey-bindings/\nButtons\n\nEDIT MODE (when `a` is pressed and polygon is being edited)\n\n 'a' toggle vertex markers on and off.\n When vertex markers are on, you can move them, delete them\n\n 'd' delete the vertex under point\n\n 'i' insert a vertex at point near the boundary of the polygon.\n\n Left click Use on any point on the polygon boundary and move around\n by dragging to alter shape of polygon\n\nREGULAR MODE\n\n Scroll Up Zoom into image\n\n Scroll Down Zoom out of image\n\n Left Click Create a point for a polygon mask around an object\n\n Right Click Complete the polygon currently formed by connecting all selected points\n\n Left Click Drag Create a bounding box rectangle from point 1 to point 2 (works only\n when there are no polygon points on screen for particular object)\n\n 'a' Press key on top of overlayed polygon (from Mask RCNN or\n previous annotations) to select it for editing\n\n 'r' Press key on top of overlayed polygon (from Mask RCNN or\n previous annotations) to completely remove it\n\n BRING PREVIOUS ANNOTATIONS Bring back the annotations from the previous image to preserve\n similar annotations.\n\n SUBMIT To be clicked after Right click completes polygon! Finalizes current\n segmentation mask and class label picked.\n After this, the polygon cannot be edited.\n\n NEXT Save all annotations created for current file and move on to next image.\n\n PREV Goto previous image to re-annotate it. This deletes the annotations\n created for the file before the current one in order to\n rewrite the fresh annotations.\n\n RESET If when drawing the polygon using points, the polygon doesn't cover the\n object properly, reset will let you start fresh with the current polygon.\n This deletes all the points on the image.\n\nThe green annotation boxes from the network can be edited by pressing on the Keyboard key `a` when the mouse pointer is on top of a particular such mask. Once you press `a`, the points making up that polygon will show up and you can then edit it using the key bindings specified. Once you're done editing the polygon, press `a` again to finalize the edits. At this point, it will become possible to submit that particular annotation and move on to the next one.\n\nOnce the GUI tool has been used successfully and relevant txt files have been created for all annotated images, one can use `create_json_file.py` to create the COCO-Style JSON file.\n\n```\npython -m coco_dataset_generator.utils.create_json_file -i background/ -c classes/products.txt\n -o output.json -t jpg\n```\n\n```\nUSAGE: create_json_file.py [-h] -i IMAGE_DIR -o FILE_PATH -c CLASS_FILE -t TYPE\n```\n\n##### Optional Arguments\n\n| Shorthand | Flag Name | Description |\n| ------------- | ----------------------- | --------------------------------------- |\n| -i IMAGE_DIR | --image_dir IMAGE_DIR | Path to the image dir |\n| -o FILE_PATH | --file_path FILE_PATH | Path of output file |\n| -c CLASS_FILE | --class_file CLASS_FILE | Path of file with output classes |\n| -t TYPE | --type TYPE | Type of the image files (jpg, png etc.) |\n\n### RECTANGULAR BOUNDING BOX GUI CONTROLS:\n\nThe same GUI is designed slightly differently in case of rectangular bounding box annotations with speed of annotation in mind. Thus, most keys are keyboard bindings. Most ideally, this interface is very suited to serve to track objects across video by dragging around a box of similar size. Since the save button saves multiple frame results together, the JSON file is directly created instead of txt files for each image, which means there wouldn't be a need to use `create_json_file.py`.\n\nKey-bindings/\nButtons\n\nEDIT MODE (when `a` is pressed and rectangle is being edited)\n\n 'a' toggle vertex markers on and off. When vertex markers are on,\n you can move them, delete them\n\n 'i' insert rectangle in the list of final objects to save.\n\n Left click Use on any point on the rectangle boundary and move around by\n dragging to alter shape of rectangle\n\nREGULAR MODE\n\n Scroll Up Zoom into image\n\n Scroll Down Zoom out of image\n\n Left Click Drag Create a bounding box rectangle from point 1 to point 2.\n\n 'a' Press key on top of overlayed polygon (from Mask RCNN or\n previous annotations) to select it for editing\n\n 'r' Press key on top of overlayed polygon (from Mask RCNN or\n previous annotations) to completely remove it\n\n 'n' Press key to move on to next image after completing all\n rectangles in current image\n\n SAVE Save all annotated objects so far\n\n### LIST OF FUNCTIONALITIES:\n\n FILE FUNCTIONALITY\n\n cut_objects.py Cuts objects based on bounding box annotations using dataset.json\n file and creates occlusion-based augmented images dataset.\n\n create_json_file.py Takes a directory of annotated images (use segment.py to annotate\n into text files) and returns a COCO-style JSON file.\n\n extract_frames.py Takes a directory of videos and extracts all the frames of all\n videos into a folder labeled adequately by the video name.\n\n pascal_to_coco.py Takes a PASCAL-style dataset directory with JPEGImages/ and\n Annotations/ folders and uses the bounding box as masks to\n create a COCO-style JSON file.\n\n segment.py Read the instructions above.\n\n segment_bbox_only.py Same functionality but optimized for easier annotation of\n bbox-only datasets.\n\n test_*.py Unit tests.\n\n visualize_dataset.py Visualize the annotations created using the tool.\n\n visualize_json_file.py Visualize the dataset JSON file annotations on the entire dataset.\n\n compute_dataset_statistics.py Find distribution of objects in the dataset by counts.\n\n combine_json_files.py Combine different JSON files together into a single dataset file.\n\n delete_images.py Delete necessary images from the JSON dataset.\n\nNOTE: Please use `python <FILENAME>.py -h` for details on how to use each of the above files.\n"
},
{
"alpha_fraction": 0.5039622187614441,
"alphanum_fraction": 0.5173296928405762,
"avg_line_length": 39.643333435058594,
"blob_id": "a5c814181affb8918b5eac2975afca5f0c4f92bc",
"content_id": "374ec1310c4f01b065be1040b8f7985baab76b3a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12493,
"license_type": "permissive",
"max_line_length": 193,
"num_lines": 300,
"path": "/coco_dataset_generator/gui/segment_bbox_only.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "from matplotlib import pyplot as plt\r\nfrom matplotlib.collections import PatchCollection\r\nfrom matplotlib.patches import Polygon\r\nfrom matplotlib.widgets import RadioButtons\r\nfrom matplotlib.path import Path\r\nimport matplotlib.patches as patches\r\n\r\nfrom PIL import Image\r\nimport matplotlib\r\n\r\nimport argparse\r\nimport numpy as np\r\nimport glob\r\nimport os\r\nfrom matplotlib.widgets import RectangleSelector, Button, RadioButtons\r\nfrom matplotlib.lines import Line2D\r\nfrom matplotlib.artist import Artist\r\n\r\nfrom .poly_editor import PolygonInteractor\r\n\r\nfrom matplotlib.mlab import dist_point_to_segment\r\nimport sys\r\nfrom ..utils.visualize_dataset import return_info\r\n\r\nimport json\r\n\r\nfrom collections import defaultdict\r\n\r\n\r\ndef read_JSON_file(f):\r\n \r\n with open(f, 'r') as g:\r\n d = json.loads(g.read())\r\n \r\n img_paths = [x['file_name'] for x in d['images']]\r\n \r\n rects = [{'bbox': x['segmentation'][0], 'class': x['category_id'], 'image_id': x['image_id']} for x in d['annotations']]\r\n \r\n annotations = defaultdict(list)\r\n \r\n for rect in rects:\r\n r = rect['bbox']\r\n x0, y0 = min(r[0], r[2], r[4], r[6]), min(r[1], r[3], r[5], r[7])\r\n x1, y1 = max(r[0], r[2], r[4], r[6]), max(r[1], r[3], r[5], r[7])\r\n \r\n r = patches.Rectangle((x0,y0),x1-x0,y1-y0,linewidth=1,edgecolor='g',facecolor='g', alpha=0.4) \r\n \r\n annotations[img_paths[rect['image_id']]].append({'bbox': r, 'cls': d['classes'][rect['class']-1]})\r\n \r\n return d['classes'], img_paths, annotations\r\n\r\n\r\nclass COCO_dataset_generator(object): \r\n \r\n def __init__(self, fig, ax, img_dir, classes, model_path, json_file):\r\n \r\n self.RS = RectangleSelector(ax, self.line_select_callback,\r\n drawtype='box', useblit=True,\r\n button=[1, 3], # don't use middle button\r\n minspanx=5, minspany=5,\r\n spancoords='pixels',\r\n interactive=True) \r\n \r\n ax.set_yticklabels([])\r\n ax.set_xticklabels([])\r\n \r\n #self.classes, self.img_paths, _ = read_JSON_file(json_file)\r\n with open(classes, 'r') as f:\r\n self.classes, img_paths = sorted([x.strip().split(',')[0] for x in f.readlines()]), glob.glob(os.path.abspath(os.path.join(img_dir, '*.jpg')))\r\n plt.tight_layout()\r\n\r\n self.ax = ax\r\n self.fig = fig\r\n self.axradio = plt.axes([0.0, 0.0, 0.1, 1])\r\n self.radio = RadioButtons(self.axradio, self.classes)\r\n self.zoom_scale = 1.2\r\n self.zoom_id = self.fig.canvas.mpl_connect('scroll_event', self.zoom) \r\n self.keyboard_id = self.fig.canvas.mpl_connect('key_press_event', self.onkeyboard)\r\n self.selected_poly = False\r\n self.axsave = plt.axes([0.81, 0.05, 0.1, 0.05])\r\n self.b_save = Button(self.axsave, 'Save')\r\n self.b_save.on_clicked(self.save) \r\n self.objects, self.existing_patches, self.existing_rects = [], [], []\r\n self.num_pred = 0\r\n if json_file is None:\r\n self.images, self.annotations = [], [] \r\n self.index = 0\r\n self.ann_id = 0\r\n else:\r\n with open(json_file, 'r') as g:\r\n d = json.loads(g.read())\r\n self.images, self.annotations = d['images'], d['annotations']\r\n self.index = len(self.images)\r\n self.ann_id = len(self.annotations)\r\n prev_files = [x['file_name'] for x in self.images]\r\n for i, f in enumerate(img_paths):\r\n im = Image.open(f)\r\n width, height = im.size\r\n dic = {'file_name': f, 'id': self.index+i, 'height': height, 'width': width} \r\n if f not in prev_files:\r\n self.images.append(dic)\r\n else:\r\n self.index+=1\r\n image = plt.imread(self.images[self.index]['file_name'])\r\n self.ax.imshow(image, aspect='auto')\r\n\r\n if not args['no_feedback']:\r\n from mask_rcnn.get_json_config import get_demo_config \r\n from mask_rcnn import model as modellib\r\n from mask_rcnn.visualize_cv2 import random_colors\r\n \r\n self.config = get_demo_config(len(self.classes)-1, True)\r\n\r\n if 'config_path' in args:\r\n self.config.from_json(args['config_path'])\r\n \r\n plt.connect('draw_event', self.persist)\r\n \r\n # Create model object in inference mode.\r\n self.model = modellib.MaskRCNN(mode=\"inference\", model_dir='/'.join(args['weights_path'].split('/')[:-2]), config=self.config)\r\n\r\n # Load weights trained on MS-COCO\r\n self.model.load_weights(args['weights_path'], by_name=True)\r\n \r\n r = self.model.detect([image], verbose=0)[0]\r\n \r\n # Number of instances\r\n N = r['rois'].shape[0]\r\n \r\n masks = r['masks']\r\n \r\n # Show area outside image boundaries.\r\n height, width = image.shape[:2]\r\n \r\n class_ids, scores, rois = r['class_ids'], r['scores'], r['rois'],\r\n \r\n for i in range(N):\r\n \r\n # Label\r\n class_id = class_ids[i]\r\n score = scores[i] if scores is not None else None\r\n label = self.classes[class_id-1]\r\n pat = patches.Rectangle((rois[i][1], rois[i][0]), rois[i][3]-rois[i][1], rois[i][2]-rois[i][0], linewidth=1, edgecolor='r',facecolor='r', alpha=0.4)\r\n rect = self.ax.add_patch(pat)\r\n \r\n self.objects.append(label)\r\n self.existing_patches.append(pat.get_bbox().get_points())\r\n self.existing_rects.append(pat)\r\n self.num_pred = len(self.objects)\r\n \r\n def line_select_callback(self, eclick, erelease):\r\n 'eclick and erelease are the press and release events'\r\n x1, y1 = eclick.xdata, eclick.ydata\r\n x2, y2 = erelease.xdata, erelease.ydata\r\n \r\n def zoom(self, event):\r\n \r\n if not event.inaxes:\r\n return\r\n cur_xlim = self.ax.get_xlim()\r\n cur_ylim = self.ax.get_ylim()\r\n\r\n xdata = event.xdata # get event x location\r\n ydata = event.ydata # get event y location\r\n\r\n if event.button == 'down':\r\n # deal with zoom in\r\n scale_factor = 1 / self.zoom_scale\r\n elif event.button == 'up':\r\n # deal with zoom out\r\n scale_factor = self.zoom_scale\r\n else:\r\n # deal with something that should never happen\r\n scale_factor = 1\r\n print (event.button)\r\n\r\n new_width = (cur_xlim[1] - cur_xlim[0]) * scale_factor\r\n new_height = (cur_ylim[1] - cur_ylim[0]) * scale_factor\r\n\r\n relx = (cur_xlim[1] - xdata)/(cur_xlim[1] - cur_xlim[0])\r\n rely = (cur_ylim[1] - ydata)/(cur_ylim[1] - cur_ylim[0])\r\n\r\n self.ax.set_xlim([xdata - new_width * (1-relx), xdata + new_width * (relx)])\r\n self.ax.set_ylim([ydata - new_height * (1-rely), ydata + new_height * (rely)])\r\n self.ax.figure.canvas.draw()\r\n\r\n def save(self, event):\r\n \r\n data = {'images':self.images[:self.index+1], 'annotations':self.annotations, 'categories':[], 'classes': self.classes}\r\n\r\n with open('output.json', 'w') as outfile:\r\n json.dump(data, outfile)\r\n \r\n def persist(self, event):\r\n if self.RS.active:\r\n self.RS.update()\r\n \r\n def onkeyboard(self, event):\r\n \r\n if not event.inaxes:\r\n return\r\n elif event.key == 'a':\r\n for i, ((xmin, ymin), (xmax, ymax)) in enumerate(self.existing_patches):\r\n if xmin<=event.xdata<=xmax and ymin<=event.ydata<=ymax:\r\n self.radio.set_active(self.classes.index(self.objects[i]))\r\n self.RS.set_active(True)\r\n self.rectangle = self.existing_rects[i]\r\n self.rectangle.set_visible(False)\r\n coords = self.rectangle.get_bbox().get_points()\r\n self.RS.extents = [coords[0][0], coords[1][0], coords[0][1], coords[1][1]]\r\n self.RS.to_draw.set_visible(True)\r\n self.fig.canvas.draw()\r\n self.existing_rects.pop(i)\r\n self.existing_patches.pop(i)\r\n self.objects.pop(i)\r\n fig.canvas.draw()\r\n break\r\n \r\n elif event.key == 'i':\r\n b = self.RS.extents # xmin, xmax, ymin, ymax\r\n b = [int(x) for x in b]\r\n if b[1]-b[0]>0 and b[3]-b[2]>0:\r\n poly = [b[0], b[2], b[0], b[3], b[1], b[3], b[1], b[2], b[0], b[2]]\r\n area = (b[1]-b[0])*(b[3]-b[2])\r\n bbox = [b[0], b[2], b[1], b[3]]\r\n dic2 = {'segmentation': [poly], 'area': area, 'iscrowd':0, 'image_id':self.index, 'bbox':bbox, 'category_id': self.classes.index(self.radio.value_selected)+1, 'id': self.ann_id}\r\n if dic2 not in self.annotations:\r\n self.annotations.append(dic2)\r\n self.ann_id+=1\r\n rect = patches.Rectangle((b[0],b[2]),b[1]-b[0],b[3]-b[2],linewidth=1,edgecolor='g',facecolor='g', alpha=0.4)\r\n self.ax.add_patch(rect)\r\n \r\n self.RS.set_active(False)\r\n \r\n self.fig.canvas.draw()\r\n self.RS.set_active(True)\r\n elif event.key in ['N', 'n']:\r\n self.ax.clear()\r\n self.index+=1\r\n if (len(self.objects)==self.num_pred):\r\n self.images.pop(self.index-1)\r\n self.index-=1\r\n if self.index==len(self.images):\r\n exit()\r\n image = plt.imread(self.images[self.index]['file_name'])\r\n self.ax.imshow(image)\r\n self.ax.set_yticklabels([])\r\n self.ax.set_xticklabels([])\r\n r = self.model.detect([image], verbose=0)[0]\r\n \r\n # Number of instances\r\n N = r['rois'].shape[0]\r\n \r\n masks = r['masks']\r\n \r\n # Show area outside image boundaries.\r\n height, width = image.shape[:2]\r\n \r\n class_ids, scores, rois = r['class_ids'], r['scores'], r['rois'],\r\n self.existing_rects, self.existing_patches, self.objects = [], [], []\r\n for i in range(N):\r\n \r\n # Label\r\n class_id = class_ids[i]\r\n score = scores[i] if scores is not None else None\r\n label = self.classes[class_id-1]\r\n pat = patches.Rectangle((rois[i][1], rois[i][0]), rois[i][3]-rois[i][1], rois[i][2]-rois[i][0], linewidth=1, edgecolor='r',facecolor='r', alpha=0.4)\r\n rect = self.ax.add_patch(pat)\r\n \r\n self.objects.append(label)\r\n\r\n self.existing_patches.append(pat.get_bbox().get_points())\r\n self.existing_rects.append(pat)\r\n \r\n self.num_pred = len(self.objects)\r\n self.fig.canvas.draw()\r\n \r\n elif event.key in ['q','Q']:\r\n exit()\r\n \r\nif __name__=='__main__':\r\n \r\n ap = argparse.ArgumentParser()\r\n ap.add_argument(\"-i\", \"--image_file\", required=True, help=\"Path to the images dir\")\r\n ap.add_argument(\"-c\", \"--classes_file\", required=True, help=\"Path to classes file\") \r\n ap.add_argument(\"-j\", \"--json_file\", required=False, help=\"Path of JSON file to append dataset to\", default=None)\r\n ap.add_argument(\"--save_csv\", required=False, action=\"store_true\", help=\"Choose option to save dataset as CSV file annotations.csv\")\r\n ap.add_argument('-w', \"--weights_path\", default=None, help=\"Path to Mask RCNN checkpoint save file\")\r\n ap.add_argument('-x', \"--config_path\", default=None, help=\"Path to Mask RCNN JSON config file\")\r\n args = vars(ap.parse_args())\r\n \r\n args[\"no_feedback\"] = 'weights_path' not in args\r\n\r\n fig = plt.figure(figsize=(14, 14))\r\n ax = plt.gca()\r\n \r\n gen = COCO_dataset_generator(fig, ax, args['image_file'], args['classes_file'], args['weights_path'], args['json_file'])\r\n \r\n plt.subplots_adjust(bottom=0.2)\r\n plt.show()\r\n"
},
{
"alpha_fraction": 0.5190947651863098,
"alphanum_fraction": 0.5247524976730347,
"avg_line_length": 33.17073059082031,
"blob_id": "a9a62c3900d1e39c974d9486854590e8722c4e97",
"content_id": "a28e498c66d0fbd56148c3bc9ebea23d1081f91b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1414,
"license_type": "permissive",
"max_line_length": 104,
"num_lines": 41,
"path": "/coco_dataset_generator/extras/split_json_file.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import json\nimport argparse\n\ndef contains(splits):\n# Returns 1D binary map of images to take such that access is O(1)\n MAX, MIN = max([int(x.split('-')[-1]) for x in splits]), min([int(x.split('-')[0]) for x in splits])\n A = [0 for _ in range(MAX-MIN+1)]\n for sp in splits:\n if '-' in sp:\n beg, end = [int(x) for x in sp.split('-')] \n else:\n beg = end = int(sp)\n for idx in range(beg-MIN, end+1-MIN):\n print (idx)\n A[idx] = 1\n\n return A, MIN, MAX\n\nif __name__=='__main__':\n \n ap = argparse.ArgumentParser()\n ap.add_argument('json', help='Path to JSON dataset file')\n ap.add_argument('split', nargs='+', help='Dataset split for splitting')\n ap.add_argument('--out', help='Path to output JSON file', default='cut_dataset.json')\n args = ap.parse_args()\n \n with open(args.json, 'r') as f:\n obj = json.load(f)\n \n A, MIN, MAX = contains(args.split)\n imgs, anns = [], []\n for img in obj['images']:\n if img['id'] >= MIN and img['id'] <= MAX:\n if A[img['id']-MIN]:\n ANN = [ann for ann in obj['annotations'] if ann['image_id']==img['id']]\n anns.extend(ANN)\n imgs.append(img)\n\n with open(args.out, 'w') as f:\n \n json.dump({'images': imgs, 'annotations': anns, 'classes': obj['classes'], 'categories': []}, f)\n \n"
},
{
"alpha_fraction": 0.5529412031173706,
"alphanum_fraction": 0.5568627715110779,
"avg_line_length": 25.789474487304688,
"blob_id": "6260b2be99857b954e1b2628225a55468043fd6c",
"content_id": "d75a41beeae0aa6cfba5fbb3da65abce0822c803",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 510,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 19,
"path": "/coco_dataset_generator/extras/create_binary_dataset.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import argparse\nimport json\n\nif __name__=='__main__':\n \n ap = argparse.ArgumentParser()\n ap.add_argument('json', help='Path to original multi-class JSON file')\n args = ap.parse_args()\n\n with open(args.json, 'r') as f: \n obj = json.load(f)\n\n obj['classes'] = ['object']\n\n for idx in range(len(obj['annotations'])): \n obj['annotations'][idx]['category_id'] = 1\n\n with open('.'.join(args.json.split('.')[:-1])+'_binary.json', 'w') as f:\n json.dump(obj, f)\n\n"
},
{
"alpha_fraction": 0.6625698208808899,
"alphanum_fraction": 0.6648044586181641,
"avg_line_length": 23.86111068725586,
"blob_id": "9e81d6243236b41c0dbab301e80d9f4026da6df0",
"content_id": "292966aa4fc3437b35239bf0abf5c4613973237a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 895,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 36,
"path": "/setup.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "from setuptools import setup, find_packages\nimport os\nimport sys\nimport subprocess\n\ndef install(package):\n subprocess.call([sys.executable, \"-m\", \"pip\", \"install\", package])\n\nif os.getenv('MASK_RCNN'):\n fl = 'requirements_maskrcnn.txt'\nelse:\n fl = 'requirements.txt'\n\nwith open(fl, 'r') as f:\n packs = [x.strip() for x in f.readlines()]\n \nfor p in packs:\n install(p)\t\n\ndependencies = []\n\t\npackages = [\n package for package in find_packages() if package.startswith('coco_dataset_generator')\n]\n\nsetup(name='coco_dataset_generator',\n version='1.0',\n description='COCO Style Dataset Generator GUI',\n author='hanskrupakar',\n author_email='[email protected]',\n license='Open-Source',\n url='https://www.github.com/hanskrupakar/COCO-Style-Dataset-Generator-GUI',\n packages=packages,\n install_requires=dependencies,\n test_suite='unit_tests',\n)\n"
},
{
"alpha_fraction": 0.525444507598877,
"alphanum_fraction": 0.529736340045929,
"avg_line_length": 34.456520080566406,
"blob_id": "60729effb4f7e1323aaed0a61bd3ff70924c7d1d",
"content_id": "82f9df3629809f33b4ea454b452b7ebeb1f6c09d",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3262,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 92,
"path": "/coco_dataset_generator/utils/delete_images.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "import argparse\nimport json\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.patches as patches\n\ndef press(event):\n \n if event.key.lower() == 'q':\n exit()\n \n if event.key.lower() == 'd':\n for ann in anns:\n new_anns.remove(ann)\n print ('Deleted image:', new_imgs[ptr][\"file_name\"], \"from the dataset!\")\n del new_imgs[ptr]\n \n if event.key.lower() == 'j':\n print (\"Saving dataset to file! Please wait!\")\n \n # Account for deletions by changing label space\n \n id_list = [int(img['id']) for img in new_imgs]\n ann_list = [int(ann['id']) for ann in new_anns]\n \n full_img, full_ann = [x for x in range(len(id_list))], [x for x in range(len(ann_list))]\n \n free_img, free_ann = list(set(full_img)-set(id_list)), list(set(full_ann)-set(ann_list))\n change_img, change_ann = list(set(id_list)-set(full_img)), list(set(ann_list)-set(full_ann))\n \n for f, c in zip(free_img, change_img):\n for img in new_imgs:\n if img['id']==c:\n img['id']=f\n for ann in new_anns:\n if ann['image_id']==c:\n ann['image_id']=f\n \n for f, c in zip(free_ann, change_ann):\n for ann in new_anns:\n if ann['id']==c:\n ann['id']=f\n \n data = {'images': new_imgs, 'annotations': new_anns, 'categories':[], 'classes':classes}\n with open('deleted_dataset.json', 'w') as f:\n json.dump(data, f)\n print (\"Dataset saved!\")\n else: \n plt.close()\n \n \nif __name__=='__main__':\n \n ap = argparse.ArgumentParser()\n ap.add_argument('--json_file', required=True, help='Path to JSON file')\n args = ap.parse_args()\n \n with open(args.json_file, 'r') as f:\n obj = json.load(f)\n \n images, annotations = obj[\"images\"], obj[\"annotations\"]\n classes = obj[\"classes\"]\n \n print (\"Total number of images in dataset: \", len(images))\n \n new_imgs, new_anns = images, annotations\n \n for ptr, img in enumerate(images):\n \n fig, ax = plt.subplots()\n plt.tick_params(axis='both', which='both', bottom='off', top='off', \n labelbottom='off', right='off', left='off', labelleft='off')\n \n fig.canvas.mpl_connect('key_press_event', press)\n ax.set_title('d - Delete image; j - Save dataset; q - Exit; others - Next image')\n \n anns = [ann for ann in annotations if ann[\"image_id\"]==img[\"id\"]]\n image = plt.imread(img[\"file_name\"])\n plt.imshow(image)\n for ann in anns:\n s = [int(x) for x in ann['bbox']]\n rect = patches.Rectangle((s[0],s[1]),s[2]-s[0],s[3]-s[1],linewidth=1,edgecolor='r',facecolor='none')\n ax = plt.gca()\n ax.add_patch(rect)\n plt.text(s[0]-10, s[1]+10, classes[ann['category_id']-1])\n plt.show()\n \n print (\"Saving dataset to file! Please wait!\")\n data = {'images': new_imgs, 'annotations': new_anns, 'categories':[], 'classes':classes}\n with open('deleted_dataset.json', 'w') as f:\n json.dump(data, f)\n print (\"Dataset saved!\")\n"
},
{
"alpha_fraction": 0.46292275190353394,
"alphanum_fraction": 0.4840210974216461,
"avg_line_length": 35.21348190307617,
"blob_id": "ea4795b099dc8b2964c5662f891901109eb0fa61",
"content_id": "52bbe4691d1075315dc9d98ca9d809d8855da34b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3223,
"license_type": "permissive",
"max_line_length": 135,
"num_lines": 89,
"path": "/coco_dataset_generator/utils/create_json_file.py",
"repo_name": "TestaVuota/COCO-Style-Dataset-Generator-GUI",
"src_encoding": "UTF-8",
"text": "#coding: utf8\nimport xml.etree.cElementTree as ET\nimport glob\nimport argparse\nimport os\nimport numpy as np\nimport json\nimport unicodedata\n\nfrom PIL import Image\nfrom ..gui.segment import COCO_dataset_generator as cocogen\n\nif __name__=='__main__':\n\n ap = argparse.ArgumentParser()\n ap.add_argument(\"-i\", \"--image_dir\", required=True, help=\"Path to the image dir\")\n ap.add_argument(\"-o\", \"--file_path\", required=True, help=\"Path of output file\")\n ap.add_argument(\"-c\", \"--class_file\", required=True, help=\"Path of file with output classes\")\n ap.add_argument(\"-t\", \"--type\", required=True, help=\"Type of the image files\")\n args = vars(ap.parse_args())\n\n with open(args['class_file'], 'r') as f:\n classes = sorted([unicodedata.normalize('NFKD', x).strip() for x in f.readlines()])\n \n images, anns = [], []\n\t\n img_paths = [x for x in glob.glob(os.path.join(args['image_dir'], '*.'+args['type'])) if os.path.exists(x[:-3]+'txt')]\n\t\n num_imgs = len(img_paths)\n i=0\n for f in sorted(img_paths):\n \n img = Image.open(f)\n width, height = img.size\n dic = {'file_name': f, 'id': i, 'height': height, 'width': width}\n images.append(dic)\n\n ann_index = 0\n \n for i, f in enumerate(sorted(glob.glob(os.path.join(os.path.abspath(args['image_dir']), '*.txt')))):\n ptr = 0\n with open(f, 'r', encoding='utf-8-sig') as g:\n s = g.read()\n s = s.split('\\n')[2:-1]\n \n width, height = [int(x) for x in s[0].split(' ')]\n s = s[2:]\n print (s)\n while(ptr<len(s)):\n \n cat_id = classes.index(s[ptr].encode('utf-8').decode('utf-8-sig'))+1\n area = float(s[ptr+1])\n poly = [[float(x) for x in s[ptr+2].split(' ')[:-1]]]\n \n print (cat_id, area, poly)\n\n if len(s)>ptr+3 and s[ptr+3] != '':\n ind = ptr + 3\n while (ind<len(s) and s[ind]!=''):\n poly.append([float(x) for x in s[ind].split(' ')[:-1]])\n ind+=1\n ptr = ind-3\n \n x1, x2, y1, y2 = None, None, None, None\n for p in poly:\n points = np.reshape(np.array(p), (int(len(p)/2), 2))\n if x1 is None: \n x1, y1 = points.min(0)\n x2, y2 = points.max(0)\n else:\n if points.min(0)[0]<x1:\n x1 = points.min(0)[0]\n if points.min(0)[1]<y1:\n y1 = points.min(0)[1]\n if points.max(0)[0]>x2:\n x2 = points.max(0)[0]\n if points.max(0)[1]>y2:\n y2 = points.max(0)[1]\n \n bbox = [x2, y2, x1, y1]\n dic2 = {'segmentation': poly, 'area': area, 'iscrowd':0, 'image_id':i, 'bbox':bbox, 'category_id': cat_id, 'id': ann_index}\n ann_index+=1\n ptr+=4\n anns.append(dic2)\n \n data = {'images':images, 'annotations':anns, 'categories':[], 'classes': classes}\n\n with open(args['file_path']+'.json', 'w') as outfile:\n json.dump(data, outfile)\n"
}
] | 16 |
WeBuildHub/PyWAS | https://github.com/WeBuildHub/PyWAS | eb9b3d78a7e5fd2c1e38b7f4f4786916b939e91b | 02c00fb8c3ae864deee2e5af94e1b941c30fa251 | 2bd6bc4844691b5341e47b3cbf7e62b94fffc216 | refs/heads/master | 2022-07-16T06:15:01.627326 | 2020-05-10T23:38:10 | 2020-05-10T23:38:10 | 262,653,108 | 2 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.681382954120636,
"alphanum_fraction": 0.6882978677749634,
"avg_line_length": 48.47368240356445,
"blob_id": "f70b53b604df8c7c4d7feb4d6cf7a3a3d872179c",
"content_id": "4e7e0f17ce518613d78887866923234e078c19a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1923,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 38,
"path": "/SECURITY.md",
"repo_name": "WeBuildHub/PyWAS",
"src_encoding": "UTF-8",
"text": "# Security Policy\n\n## Versions Supportées\n\nPour le moment, seule la dernière version en ligne sera patchée pour faille de sécurité, car la version 1.0 de Pya n'est pas encore sortie.\nLe tableau ci-dessous sera mis à jour pour chaque faille de sécurité corrigée à partir de la 1.0, en attendant les failles de sécurité\nnon corrigées seront affichée dans le wiki, pour chaque version à partir de la 0.1, pour le moment, fiez vous au\ntitre ci-dessous qui affichera la dernière version sur laquelle les patchs de sécurités sont suivis et appliqués.\n#0.0.007#\nLes versions non patchées plus récentes que la dernière patchée seront marquées ici: \n\n| Version | Nom du patch le plus récent supporté |\n| ------ | ------------------ |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n| | :: |\n\nPour plus d'informations sur les patchs, veuillez consulter le wiki les déclinant.\n\n## Reporter une vulnerabilité\n\nSachant que le projet que WeBuild développe est open-source, gratuit et libre d'accès au code source, je (directeur de WeBuild)\nme doute bien que des failles de sécurité sont présentes dans les projets que je vous partage avec ce compte.\nAvant de me rapporter quoique ce soit comme faille de sécurité, veuillez vérifier dans le wiki si elle existe.\nJe suppose que vous comprenez: mon équipe et moi-même n'allons pas corriger quelque chose qui est déjà en cours, ce serait ridicule de \ntout recommencer.\nJe vous prie de bien vouloir me parler de ces failles de sécurité par e-mail, ou sur Twitter car je ne garantis pas la diffusion de messages\npar Instagram. pour me contacter: [Contacter le directeur par E-Mail](mailto:[email protected]?subject=[GitHub]%20Security) \n[ou par Twitter en message privé](https://twitter.com/webuildhub)\n"
},
{
"alpha_fraction": 0.6555555462837219,
"alphanum_fraction": 0.656410276889801,
"avg_line_length": 23.893617630004883,
"blob_id": "80c7e8813c93dee2624caf8cf0a258f26570b1bf",
"content_id": "444f33ebb76f666670ad274d387c3abbeb30351f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1172,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 47,
"path": "/PyApp Sys/pya.py",
"repo_name": "WeBuildHub/PyWAS",
"src_encoding": "UTF-8",
"text": "\"\"\"PyApps Systems for modders - recreez votre avenir\"\"\"\n# -*- coding: UTF-8 -*-\nimport os, sys\n\ndef out(caractere_a_afficher):\n\t\"\"\"Imprime le resultat de ce que vous entrez en parametre\"\"\"\n\tprint(caractere_a_afficher)\n\tpass\ndef log(stri):\n\t\"\"\"Enregistre un Log dans le fichier Log\"\"\"\n\tlg = open(\"Logs.log\", \"a\")\n\tlg.write(\"...___...\\n\")\n\tlg.write(stri)\n\tlg.close\n\tpass\ndef start(log):\n\t\"\"\"Inscrit le depart du programme dans le fichier Log personnalisé\"\"\"\n\tlg = open(\"Logs.log\", \"w\")\n\tlg.write(\"Log of\")\n\tpass\ndef ins(nom, repertoire):\n\t\"\"\"Instore un nouveau repertoire dans le répertoire courant grace au module os de Windows\"\"\"\n\tos.system('mkdir ', nom,\" \", repertoire)\n\tpass\n\ndef fun():\n\t\"\"\"Pour le fun\"\"\"\n\tprint('C\\'EST FUN NON ?')\n\tpass\ndef ChangeStats(f, t, c):\n\t\"\"\"Ouvre un fichier avec la methode choisie et le ferme automatiquement.\"\"\"\n\tif t == \"lire\":\n\t\tfile = open(f, \"r\")\n\t\tpass\n\tif t == \"re-ecrit\":\n\t\tfile = open(f, \"w\")\n\t\tfile.write(c)\n\tif t == \"rajouter\":\n\t\tfile = open(f, \"a\")\n\t\tfile.write(c)\n\tfile.close()\n\tpass\ndef userIn(message):\n\t\"\"\"Permet a l'utilisateur de saisir des donnees renvoyees par la fonction\"\"\"\n\tin = raw_input(message)\n\treturn in\n\tpass\n"
},
{
"alpha_fraction": 0.7741407752037048,
"alphanum_fraction": 0.7872340679168701,
"avg_line_length": 121.19999694824219,
"blob_id": "fa2b276b2f88c68464a3d4260d6ac12a4a444266",
"content_id": "33b557fa2fa301484aae05d2392d38c1e3f20108",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 627,
"license_type": "no_license",
"max_line_length": 352,
"num_lines": 5,
"path": "/README.md",
"repo_name": "WeBuildHub/PyWAS",
"src_encoding": "UTF-8",
"text": "# PyWAS\nWebApps Systems Python: Pya <br>\n*Développé par des français* <br>\n**Pya est une API créée avec Python permettant de créer une application de signature WebApps Systems Python en ligne de commande facilement, rapidement et sans aucun problème.** <br>\n**ATTENTION:**Les prochaines versions de Pya sont désormais stockées dans la branche \"1.0-\", et plus \"master\". La première version officielle sera caractérisée par le dépassement de 1000 lignes de codes pour l'API, et une nouvelle branche sera alors créée pour accueillir les versions de Pya jusqu'à la 2.0. Un guide d'utilisation sera fourni avec Pya.\n"
}
] | 3 |
sarath2004/Logging | https://github.com/sarath2004/Logging | 1b01993f1288835e272f78c6adf582bac5c040f1 | dc17f1576c66541de85381d587dcae828c9f28c5 | 39633bd53205401ea68bb029d7bcf95fa521b85c | refs/heads/master | 2023-01-06T06:12:03.944213 | 2020-10-31T06:25:28 | 2020-10-31T06:25:28 | 308,718,661 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5759717226028442,
"alphanum_fraction": 0.5865724086761475,
"avg_line_length": 21.639999389648438,
"blob_id": "7842f35fc3152e29b632976207f8696011fd2efd",
"content_id": "06e10bf7135e54712765ef2c254d824da7a95c98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 566,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 25,
"path": "/logger 2.py",
"repo_name": "sarath2004/Logging",
"src_encoding": "UTF-8",
"text": "import logging\n\nlogging.basicConfig(filename=\"Car Info.log\", level=logging.INFO)\n\n\nclass Car:\n def __init__(self, speed, condition):\n self.speed = speed\n self.condition = condition\n\n def carInfo(self):\n logging.info(\n \"Your current speed is {}km / hr and the condition of your car is {}\".format(\n self.speed, self.condition\n )\n )\n\n def argsReturn(self):\n for key in car1.__dict__.keys():\n print(key)\n\n\ncar1 = Car(speed=60, condition=\"Good\")\ncar1.carInfo()\ncar1.argsReturn()\n"
},
{
"alpha_fraction": 0.5082873106002808,
"alphanum_fraction": 0.5230202674865723,
"avg_line_length": 26.846153259277344,
"blob_id": "9a38f13f780c4161481c2d60f3b66bacbe485011",
"content_id": "601388b304bd8f79bca4ce5a92b94ae4a62c8021",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1086,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 39,
"path": "/logger 3.py",
"repo_name": "sarath2004/Logging",
"src_encoding": "UTF-8",
"text": "import inspect\nimport logging\n\nlogging.basicConfig(filename=\"Parameters Specified.log\", level=logging.INFO)\n\n\nclass Car:\n def __init__(self, **kwargs):\n for key, value in kwargs.items():\n setattr(Car, key, value)\n\n def getKeys(self):\n keys = []\n for i in inspect.getmembers(car1):\n if i[0].startswith(\"_\") is False:\n if inspect.ismethod(i[1]) is False:\n key, _ = i\n keys.append(key)\n return keys\n\n def getValues(self):\n values = []\n for i in inspect.getmembers(car1):\n if i[0].startswith(\"_\") is False:\n if inspect.ismethod(i[1]) is False:\n _, value = i\n values.append(value)\n return values\n\n def carInfo(self):\n logging.info(\n \"The Parameters Specified For Your {} Are {} And {}\".format(\n car1.getValues()[0], car1.getKeys()[2], car1.getKeys()[1]\n )\n )\n\n\ncar1 = Car(speed=60, condition=\"Good\", company=\"Nissan Altima\")\ncar1.carInfo()\n"
}
] | 2 |
mhoegger/BingImages | https://github.com/mhoegger/BingImages | 9ddf045370c65c9c60bc6da1266122c8ce4a9f84 | 2be51ddf41e256df66a52b3cf576a0929a3e023f | f15d52bd9cf4c67cd2b7a219df0e253816ac71c5 | refs/heads/master | 2020-12-28T10:57:24.743174 | 2020-02-04T22:07:23 | 2020-02-04T22:07:23 | 238,300,797 | 0 | 0 | MIT | 2020-02-04T20:35:22 | 2019-10-16T05:03:39 | 2018-05-06T20:01:09 | null | [
{
"alpha_fraction": 0.8979591727256775,
"alphanum_fraction": 0.8979591727256775,
"avg_line_length": 49,
"blob_id": "a3340f8b8491a7577be91789ed185cbfa667b552",
"content_id": "240101c19d98a919350046c25ebd57262d2a6665",
"detected_licenses": [
"MIT",
"Python-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 49,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 1,
"path": "/BingImages/__init__.py",
"repo_name": "mhoegger/BingImages",
"src_encoding": "UTF-8",
"text": "from BingImages.bing_downloader import BingImages"
}
] | 1 |
KyraZzz/Yelp_Api_business_search | https://github.com/KyraZzz/Yelp_Api_business_search | 80386af442ccb917e48feccfaa9a0b253941c9f6 | e190e867434e46897e5233d236804b34aa4ab98d | f430da8fc1df381aacb76451b012a1219290558f | refs/heads/master | 2023-02-21T15:37:34.203651 | 2021-01-23T20:37:45 | 2021-01-23T20:37:45 | 309,657,224 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7316498756408691,
"alphanum_fraction": 0.7419216632843018,
"avg_line_length": 53.96470642089844,
"blob_id": "35494cd646de9c58b6ecfbbf43917a1d3e388ea7",
"content_id": "f56ea0249a085669700eca26e35e26874e9a4433",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4673,
"license_type": "no_license",
"max_line_length": 496,
"num_lines": 85,
"path": "/README.md",
"repo_name": "KyraZzz/Yelp_Api_business_search",
"src_encoding": "UTF-8",
"text": "# Yelp_api_business\nFind the information of business using the Yelp API according to a location.\n\n### Idea:\nIt is great if we can hang out with friends for a Sunday brunch or dinner. However, I do not want to spend a lot of time searching for the restaurants on Tripadvisor. So I would like to create a search program to filter the list of restaurants in Cambridge, Cambridgeshire, the UK which has a high review rate.\n\n# What is an API?\n**API** stands for application programming interface, it enables different applications to communicate and interact with each other. Think about the google map you get from the Airbnb app whenever you are booking a stay, google map has provided an API to be used in the Airbnb app.\n\n# Yelp Fusion\n[Link for Yelp Fusion Website](https://www.yelp.com/developers)\n**Yelp** provides various API to connect customers and businesses. It holds a database which stores business around the world and customers can utilise the APIs to interact with **Yelp's database** and extract information about businesses such as `name`, `review_count`, `categories`, `rating`, `price`, `location` etc.\n\n# Aim:\n* Utilise Yelp API to do a business search\n\n# Steps:\n1. Create an App on Yelp's Developers site to obtain a private API key. **Note that the private API key has to be hidden if you want to publish on an opensource platform like Github**(I will mention the steps to hide the key in the later sections)\n2. Authenticate API calls with the API Key. There are several rules in **Authentication** which we need to take care of(read the Authentication documentation for more details).\n3. Start interacting with the API to do a business search.\n\n## Step 1: Create an App on Yelp's Developers site to obtain a private API key\nAfter creating an APP, you will get a **client ID** and an **API key**.\n\n``` python\nimport requests\nimport config # hide the API key to ensure the safety\n```\nCreate a python file called `config.py` to store your **API Key**, then import the file into the main python file (in my case, `app.py`)\n\nGo to the `business endpoints` section on **Yelp fusion**, copy down the URL(https://api.yelp.com/v3/businesses/search) for a business search request. For later usage, I created a new variable to store the URL.\n``` python\nurl = \"https://api.yelp.com/v3/businesses/search\"\n```\n\n## Step 2: Authenticate API calls with the API Key\nWhen we are requesting a visit to the API, it will authenticate our identity and privilege. According to the [Authentication documentation](https://www.yelp.com/developers/documentation/v3/business_search), The `requests.get()` function requires at least 3 parameters: a URL, a header and a list of filtering arguments. For the filtering argument list, a location or the equivalent latitude and longitude are required. This can help **Yelp API** to locate the list of business in the target area.\n\n``` python\nheaders = {\n \"Authorization\": \"Bearer \" + config.api_key\n}\nparams = {\n \"term\": \"food\",\n \"location\": \"Cambridge\"\n}\nresponse = requests.get(url, headers=headers, params=params)\n```\n\n## 3. Start interacting with the API to do a business search.\nLet us have a look at the response:\n```python\ntext = response.text # gives us a string object\nbus_dict = response.json() # gives us a dictionary object\n```\nSince we want to do a business search, the `response.json()` is much more helpful than the previous one. So what does the `bus_dict` store?\n``` python\nprint(bus_dict.keys())\ndict_keys(['businesses', 'total', 'region'])\n\n# print(bus_dict[\"businesses\"]) this piece of codes gives us a list of dictionaries which stores the detailed information for each business.\n\nprint(bus_dict[\"total\"])\n494\n\nprint(bus_dict[\"region\"])\n{'center': {'longitude': 0.1146697998046875, 'latitude': 52.20419549090858}}\n```\nHence, we only care about the `bus_dict[\"businesses\"]` section of the dictionary.\n\n``` python\nbusinesses = response.json()[\"businesses\"]\nhigh_rate_bus = [item[\"name\"]\n for item in businesses if item[\"rating\"] > 4]\nprint(high_rate_bus)\n```\nWe use a `list comprehension` technique to compress the length of the code: `[expression for an item in list if ...]`. Here we want to search for the name of the restaurants which has a `rating > 4` (indicates a fair restaurant).\n\nHere is our result:\n```\n['Pint Shop', 'Noodles Plus', 'The Ivy Cambridge Brasserie', 'MillWorks', 'The Trailer of Life', 'Bread and Meat', 'Honest Burgers Cambridge', 'Bibimbap House', 'The Senate', 'Butch Annies', 'Golden House']\n```\n\n# Extension ideas:\nThere are more things we can do for interaction with APIs, what about creating a react app to set up our self-owned review system! More to come...\n\n"
},
{
"alpha_fraction": 0.6452241539955139,
"alphanum_fraction": 0.6471734642982483,
"avg_line_length": 31.0625,
"blob_id": "a9519f3ec34cb1492f923cf4d54ce0b45cf82fbf",
"content_id": "4857b0072d52bc5093f4e7d6edbef3dc38f1a78a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1026,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 32,
"path": "/app.py",
"repo_name": "KyraZzz/Yelp_Api_business_search",
"src_encoding": "UTF-8",
"text": "import requests\nimport config # hide the api key to ensure the safety\n\nurl = \"https://api.yelp.com/v3/businesses/search\"\nheaders = {\n \"Authorization\": \"Bearer \" + config.api_key\n}\nparams = {\n \"term\": \"food\",\n \"location\": \"Cambridge\"\n}\n\nresponse = requests.get(url, headers=headers, params=params)\n'''\ntext = response.text\nbus_dict = response.json()\nprint(bus_dict.keys())\nprint(bus_dict[\"businesses\"])\nprint(bus_dict[\"total\"])\nprint(bus_dict[\"region\"])\n'''\nbusinesses = response.json()[\"businesses\"]\nhigh_rate_bus = [item[\"name\"]\n for item in businesses if item[\"rating\"] > 4]\n# print(high_rate_bus)\nbus_dict = response.json()\nprint(bus_dict[\"businesses\"])\n# dict_keys(['id', 'alias', 'name', 'image_url', 'is_closed', 'url', 'review_count',\n# 'categories', 'rating', 'coordinates', 'transactions', 'price', 'location',\n# 'phone', 'display_phone', 'distance'])\n# all_names = [[item[\"name\"]for item in businesses], [item[\"categories\"] for item in businesses]]\n# print(all_names)\n"
}
] | 2 |
RedgeCastelino/Master_thesis_shared | https://github.com/RedgeCastelino/Master_thesis_shared | ce30be3906f6968859c93e508cbe4ace56de0237 | de2f4b229f3df4f219a08f3d4d7e8d3d40750c55 | 5425b9dbb15da20faaca1cfd98cebef8a5423216 | refs/heads/main | 2023-03-12T12:32:36.555096 | 2021-03-01T14:34:57 | 2021-03-01T14:34:57 | 343,441,415 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.8093023300170898,
"alphanum_fraction": 0.8093023300170898,
"avg_line_length": 52.75,
"blob_id": "15767edaa2c2f9dd14fd56aa401c1f83b3df028b",
"content_id": "ad66050506d541fddc47aa3b0fc8f7b3be82f2ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 215,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 4,
"path": "/devel/share/vehicle_control/cmake/vehicle_control-msg-paths.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-msg-paths.cmake.develspace.in\n\nset(vehicle_control_MSG_INCLUDE_DIRS \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg\")\nset(vehicle_control_MSG_DEPENDENCIES std_msgs)\n"
},
{
"alpha_fraction": 0.8101266026496887,
"alphanum_fraction": 0.8164557218551636,
"avg_line_length": 38.5,
"blob_id": "803e6decf0d2f5093bfa060ec13a61a5f858fbe1",
"content_id": "ee491d34024f1d3d942aa39a825e5f6642a28149",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 316,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 8,
"path": "/build/osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.5418449640274048,
"alphanum_fraction": 0.5596440434455872,
"avg_line_length": 44.373016357421875,
"blob_id": "b300e33cde8d5efa79a83b9b14348382ee69ab64",
"content_id": "605c5f52e2322d20a744f3f0078fd407cb944b0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11686,
"license_type": "no_license",
"max_line_length": 228,
"num_lines": 252,
"path": "/src/fusion/src/fusion_function.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Dec 13 09:30:05 2020\r\n\r\n@author: Redge\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport rospy\r\nimport math\r\nimport message_filters\r\nimport tf\r\nfrom object_list.msg import ObjectList, ObjectsList\r\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\r\n\r\nfrom scipy.spatial import distance as di\r\nfrom scipy.stats import chi2\r\nfrom scipy.linalg import sqrtm\r\nfrom object_list.msg import ObjectList, ObjectsList\r\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\r\n\r\n# import function\r\n#import ClassAssociate\r\n#from ObjectAssociation import feature_select\r\n\r\n\r\ndef temp_alignment(track,egoveh):\r\n \"function to perform temporal alignment/prediction of objects_list\"\r\n \"Returns objects list with state vector predicted to current time\"\r\n #global now\r\n now = rospy.Time.now()\r\n\r\n for i, obj in enumerate(track.obj_list):\r\n\r\n t = float(now.to_sec()) - float(obj.time)\r\n \r\n if obj.geometric.ax <= 0.5:\r\n obj.geometric.ax = 0\r\n if obj.geometric.ay <= 10.5:\r\n obj.geometric.ay = 0\r\n yaw = egoveh.yawrate * t\r\n \r\n \r\n state = np.array([[float(obj.geometric.x)], [float(obj.geometric.vx)], [float(obj.geometric.ax)], [float(obj.geometric.y)], [float(obj.geometric.vy)],[float(obj.geometric.ay)]])\r\n\r\n a = np.array([[np.cos(yaw), t * np.cos(yaw), 0 * t * t * np.cos(yaw) / 2, np.sin(yaw), t * np.sin(yaw),\r\n 0 * t * t * np.sin(yaw) / 2],\r\n [0, np.cos(yaw), 0 * t * np.cos(yaw), 0, np.sin(yaw), 0 * t * np.sin(yaw)],\r\n [0, 0, np.cos(yaw), 0, 0, np.sin(yaw)],\r\n [-np.sin(yaw), -t * np.sin(yaw), 0 * -t * t * np.sin(yaw) / 2, np.cos(yaw), t * np.cos(yaw),\r\n 0 * t * t * np.cos(yaw) / 2],\r\n [0, -np.sin(yaw), -t * np.sin(yaw), 0, np.cos(yaw), t * np.cos(yaw)],\r\n [0, 0, -np.sin(yaw), 0, 0, np.cos(yaw)]])\r\n u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\r\n b = np.array(\r\n [[-t * np.cos(yaw), 0 * -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), 0 * -t * t * np.sin(yaw) / 2],\r\n [0, 0, 0, 0],\r\n [0, 0, 0, 0],\r\n [t * np.sin(yaw), 0 * t * t * np.sin(yaw), -t * np.cos(yaw), 0 * -t * t * np.cos(yaw)],\r\n [0, 0, 0, 0],\r\n [0, 0, 0, 0]])\r\n g = np.array([[t * t * t * np.cos(yaw) / 6, -t * t * t * np.cos(yaw) / 6, t * t * t * np.sin(yaw) / 6,\r\n -t * t * t * np.sin(yaw) / 6], [t * t * np.cos(yaw) / 2, 0, t * t * np.sin(yaw) / 2, 0],\r\n [t * np.cos(yaw), 0, t * np.sin(yaw), 0],\r\n [-t * t * t * np.sin(yaw) / 6, t * t * t * np.sin(yaw) / 6, -t * t * t * np.cos(yaw) / 6,\r\n -t * t * t * np.cos(yaw) / 6], [-t * t * np.sin(yaw) / 2, 0, t * t * np.cos(yaw) / 2, 0],\r\n [-t * np.sin(yaw), 0, t * np.cos(yaw), 0]])\r\n\r\n eta_s = np.array([[10],[0],[10] ,[0]])\r\n id = np.zeros((6, 6))\r\n np.fill_diagonal(id, rospy.get_param(\"fusion_process_noise\")) # rospy.get_param(\"Fusion_process_noise\")\r\n covariance = np.reshape(obj.covariance,(6,6))\r\n predicted_state = a.dot(state) + b.dot(u)\r\n predicted_covariance = (a.dot(covariance)).dot(a.transpose())+ id #+g.dot(eta_s)#+(g.dot(c_s)).dot(g.transpose()) #\r\n obj.covariance = predicted_covariance.flatten()\r\n obj.geometric.x = float(predicted_state[0])\r\n obj.geometric.vx = float(predicted_state[1])\r\n obj.geometric.ax = float(predicted_state[2])\r\n\r\n obj.geometric.y = float(predicted_state[3])\r\n obj.geometric.vy = float(predicted_state[4])\r\n obj.geometric.ay = float(predicted_state[5])\r\n\r\n\r\n return track\r\n\r\ndef temp_alignment_obj(ob,egoveh,sensor_property,objs_list):\r\n \"function to perform temporal alignment/prediction of single object from objects_list\"\r\n \"Returns object with state vector predicted to current time\"\r\n \r\n #global now\r\n now = rospy.Time.now()\r\n obj = ob\r\n t = float(now.to_sec()) - float(objs_list.header.stamp.to_sec())\r\n \r\n if obj.geometric.ax <= 0.5:\r\n obj.geometric.ax = 0\r\n if obj.geometric.ay <= 0.5:\r\n obj.geometric.ay = 0\r\n\r\n\r\n yaw = egoveh.newyaw\r\n\r\n state = np.array([[float(obj.geometric.x)], [float(obj.geometric.vx)], [float(obj.geometric.ax)], [float(obj.geometric.y)], [float(obj.geometric.vy)],[float(obj.geometric.ay)]])\r\n\r\n\r\n a = np.array([[np.cos(yaw), t * np.cos(yaw), t * t * np.cos(yaw) / 2, np.sin(yaw), t * np.sin(yaw),\r\n t * t * np.sin(yaw) / 2],\r\n [0, np.cos(yaw), t * np.cos(yaw), 0, np.sin(yaw), t * np.sin(yaw)],\r\n [0, 0, np.cos(yaw), 0, 0, np.sin(yaw)],\r\n [-np.sin(yaw), -t * np.sin(yaw), -t * t * np.sin(yaw) / 2, np.cos(yaw), t * np.cos(yaw),\r\n t * t * np.cos(yaw) / 2],\r\n [0, -np.sin(yaw), -t * np.sin(yaw), 0, np.cos(yaw), t * np.cos(yaw)],\r\n [0, 0, -np.sin(yaw), 0, 0, np.cos(yaw)]])\r\n u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\r\n\r\n b = np.array(\r\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\r\n [0, 0, 0, 0],\r\n [0, 0, 0, 0],\r\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)],\r\n [0, 0, 0, 0],\r\n [0, 0, 0, 0]])\r\n\r\n g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],\r\n [t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],\r\n [t*np.cos(yaw),0,t*np.sin(yaw),0],\r\n [-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],\r\n [-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],\r\n [-t*np.sin(yaw),0,t*np.cos(yaw),0]])\r\n\r\n c_s =np.array([[0.1, 0, 0, 0],[0, 0, 0, 0],[0, 0, 0.1, 0] ,[0, 0, 0,0]])\r\n\r\n covariance = np.reshape(obj.covariance,(6,6))\r\n predicted_state = a.dot(state) + b.dot(u)\r\n\r\n id = np.zeros((6, 6))\r\n np.fill_diagonal(id, rospy.get_param(\"fusion_process_noise\"))\r\n predicted_covariance = (a.dot(covariance)).dot(a.transpose()) + id \r\n obj.covariance = predicted_covariance.flatten()\r\n obj.geometric.x = float(predicted_state[0])\r\n obj.geometric.vx = float(predicted_state[1])\r\n obj.geometric.ax = float(predicted_state[2])\r\n obj.geometric.y = float(predicted_state[3])\r\n obj.geometric.vy = float(predicted_state[4])\r\n\r\n obj.geometric.ay = float(predicted_state[5])\r\n\r\n return(obj)\r\n\r\ndef information_matrix_fusion(glob_pred_obj,prev_obj_aligned,predict_obj,sensor_id):\r\n \"\"\"\r\n Function to perfrom the Information matrix fusion.\r\n\r\n \"\"\"\r\n \r\n\r\n global_state_matrix = np.array([[float(glob_pred_obj.geometric.x)], [float(glob_pred_obj.geometric.vx)],\r\n [float(glob_pred_obj.geometric.ax)], [float(glob_pred_obj.geometric.y)],\r\n [float(glob_pred_obj.geometric.vy)], [float(glob_pred_obj.geometric.ay)]])\r\n\r\n global_cvarience_matrix = np.reshape(glob_pred_obj.covariance, (6, 6)) #+ id\r\n\r\n sensor_state_matrix = np.array([[float(predict_obj.geometric.x)], [float(predict_obj.geometric.vx)],\r\n [float(predict_obj.geometric.ax)],\r\n [float(predict_obj.geometric.y)], [float(predict_obj.geometric.vy)],\r\n [float(predict_obj.geometric.ay)]])\r\n sensor_covarience_matrix = np.reshape(predict_obj.covariance, (6, 6))\r\n\r\n previous_sensor_state_matrix = np.array(\r\n [[float(prev_obj_aligned.geometric.x)], [float(prev_obj_aligned.geometric.vx)],\r\n [float(prev_obj_aligned.geometric.ax)], [float(prev_obj_aligned.geometric.y)],\r\n [float(prev_obj_aligned.geometric.vy)], [float(prev_obj_aligned.geometric.ay)]])\r\n previous_sensor_covarience_matrix = np.reshape(prev_obj_aligned.covariance, (6, 6))\r\n\r\n glob_pred_obj_inv = np.linalg.inv(global_cvarience_matrix)\r\n\r\n sensor_covarience_matrix_inv = np.linalg.inv(sensor_covarience_matrix)\r\n\r\n\r\n previous_sensor_covarience_matrix_inv = np.linalg.pinv(previous_sensor_covarience_matrix)\r\n\r\n inverse_fused_covarience_matrix = (np.linalg.pinv(global_cvarience_matrix)) + ((np.linalg.pinv(sensor_covarience_matrix)) )#- (previous_sensor_covarience_matrix_inv))\r\n fused_covarience_matrix = np.linalg.pinv(inverse_fused_covarience_matrix)\r\n\r\n fused_state_matrix = fused_covarience_matrix.dot((glob_pred_obj_inv.dot(global_state_matrix)) + (sensor_covarience_matrix_inv.dot(sensor_state_matrix))-previous_sensor_covarience_matrix_inv.dot(previous_sensor_state_matrix))\r\n\r\n return [fused_state_matrix, fused_covarience_matrix]\r\n\r\n\r\n\r\ndef cross_covarience_recurssion_fusion(glob_pred_obj,predict_obj):\r\n \"Function to perform cross covariance recursion fusion on object list \"\r\n \r\n \r\n \r\n global_state_matrix = np.array([[float(glob_pred_obj.geometric.x)], [float(glob_pred_obj.geometric.vx)],\r\n [float(glob_pred_obj.geometric.ax)], [float(glob_pred_obj.geometric.y)],\r\n [float(glob_pred_obj.geometric.vy)], [float(glob_pred_obj.geometric.ay)]])\r\n global_cvarience_matrix = np.reshape(glob_pred_obj.covariance, (6, 6))\r\n\r\n sensor_state_matrix = np.array([[float(predict_obj.geometric.x)], [float(predict_obj.geometric.vx)],\r\n [float(predict_obj.geometric.ax)],\r\n [float(predict_obj.geometric.y)], [float(predict_obj.geometric.vy)],\r\n [float(predict_obj.geometric.ay)]])\r\n sensor_covarience_matrix = np.reshape(predict_obj.covariance, (6, 6))\r\n\r\n global_covariance_inv = np.linalg.pinv(global_cvarience_matrix)\r\n sensor_covariance_inv = np.linalg.pinv(sensor_covarience_matrix)\r\n\r\n\r\n inverse_fused_covarience_matrix = global_covariance_inv + sensor_covariance_inv\r\n fused_covarience_matrix = np.linalg.pinv(inverse_fused_covarience_matrix)\r\n\r\n fused_state_matrix = fused_covarience_matrix.dot(\r\n (global_covariance_inv.dot(global_state_matrix)) + (sensor_covariance_inv.dot(sensor_state_matrix)))\r\n\r\n \r\n return [fused_state_matrix, fused_covarience_matrix]\r\n\r\ndef evaluate_time(globaltrack):\r\n \"Function to perform Object management.\"\r\n \"Existance is penalized if object not updated for more than 0.5secs \" \r\n \r\n \"Global object is deleted if object Existance falls below threshold existance\" \r\n time_stamp = rospy.Time.now()\r\n time_elapsed = float(time_stamp.to_sec())\r\n\r\n\r\n for i,obj in enumerate(globaltrack.obj_list):\r\n\r\n time = time_elapsed - float(obj.time)\r\n #print('time',time)\r\n\r\n if time > 0.5:\r\n obj.prop_existence -= 0.05\r\n print('lengt',len(globaltrack.obj_list))\r\n print(obj.prop_existence,time)\r\n if obj.prop_existence < rospy.get_param(\"threshold_exist\"):\r\n\r\n print('deleting',obj.prop_existence)\r\n globaltrack.obj_list.remove(obj)\r\n\r\n\r\n return(globaltrack)\r\n \r\ndef rotate (x,y,angle):\r\n \"function to perform roatation of coordinate frame\"\r\n rotx = x * math.cos(angle) - y * math.sin(angle)\r\n roty = x * math.sin(angle) + y * math.cos(angle)\r\n\r\n return [rotx,roty]\r\n"
},
{
"alpha_fraction": 0.7823529243469238,
"alphanum_fraction": 0.7941176295280457,
"avg_line_length": 33,
"blob_id": "c67b835aea416c307e77a530b445090d78002247",
"content_id": "3bab8e2eec6596d5e158371220f4b15a07b8bb16",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 170,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 5,
"path": "/devel/lib/python2.7/dist-packages/osi3_bridge/msg/__init__.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "from ._Dimension3d import *\nfrom ._GroundTruthMovingObjects import *\nfrom ._MovingObject import *\nfrom ._Orientation3d import *\nfrom ._TrafficUpdateMovingObject import *\n"
},
{
"alpha_fraction": 0.7671440243721008,
"alphanum_fraction": 0.7687157988548279,
"avg_line_length": 43.385013580322266,
"blob_id": "9ccad12bd21485b433452856342a83bbbed00156",
"content_id": "6cc84566d881b8436418365557edb1887e9425d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 17178,
"license_type": "no_license",
"max_line_length": 255,
"num_lines": 387,
"path": "/build/vehicle_control/Makefile",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMAKE generated file: DO NOT EDIT!\n# Generated by \"Unix Makefiles\" Generator, CMake Version 3.10\n\n# Default target executed when no arguments are given to make.\ndefault_target: all\n\n.PHONY : default_target\n\n# Allow only one \"make -f Makefile2\" at a time, but pass parallelism.\n.NOTPARALLEL:\n\n\n#=============================================================================\n# Special targets provided by cmake.\n\n# Disable implicit rules so canonical targets will work.\n.SUFFIXES:\n\n\n# Remove some rules from gmake that .SUFFIXES does not remove.\nSUFFIXES =\n\n.SUFFIXES: .hpux_make_needs_suffix_list\n\n\n# Suppress display of executed commands.\n$(VERBOSE).SILENT:\n\n\n# A target that is always out of date.\ncmake_force:\n\n.PHONY : cmake_force\n\n#=============================================================================\n# Set environment variables for the build.\n\n# The shell in which to execute make rules.\nSHELL = /bin/sh\n\n# The CMake executable.\nCMAKE_COMMAND = /usr/bin/cmake\n\n# The command to remove a file.\nRM = /usr/bin/cmake -E remove -f\n\n# Escaping for special characters.\nEQUALS = =\n\n# The top-level source directory on which CMake was run.\nCMAKE_SOURCE_DIR = /home/student/Desktop/Redge_Thesis/vil/src\n\n# The top-level build directory on which CMake was run.\nCMAKE_BINARY_DIR = /home/student/Desktop/Redge_Thesis/vil/build\n\n#=============================================================================\n# Targets provided globally by CMake.\n\n# Special rule for the target install/strip\ninstall/strip: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip\n\n# Special rule for the target install/strip\ninstall/strip/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip/fast\n\n# Special rule for the target install/local\ninstall/local: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local\n\n# Special rule for the target install/local\ninstall/local/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local/fast\n\n# Special rule for the target install\ninstall: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install\n\n# Special rule for the target install\ninstall/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install/fast\n\n# Special rule for the target rebuild_cache\nrebuild_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running CMake to regenerate build system...\"\n\t/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)\n.PHONY : rebuild_cache\n\n# Special rule for the target rebuild_cache\nrebuild_cache/fast: rebuild_cache\n\n.PHONY : rebuild_cache/fast\n\n# Special rule for the target edit_cache\nedit_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"No interactive CMake dialog available...\"\n\t/usr/bin/cmake -E echo No\\ interactive\\ CMake\\ dialog\\ available.\n.PHONY : edit_cache\n\n# Special rule for the target edit_cache\nedit_cache/fast: edit_cache\n\n.PHONY : edit_cache/fast\n\n# Special rule for the target test\ntest:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running tests...\"\n\t/usr/bin/ctest --force-new-ctest-process $(ARGS)\n.PHONY : test\n\n# Special rule for the target test\ntest/fast: test\n\n.PHONY : test/fast\n\n# Special rule for the target list_install_components\nlist_install_components:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Available install components are: \\\"Unspecified\\\" \\\"dev\\\" \\\"lib\\\"\"\n.PHONY : list_install_components\n\n# Special rule for the target list_install_components\nlist_install_components/fast: list_install_components\n\n.PHONY : list_install_components/fast\n\n# The main all target\nall: cmake_check_build_system\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles /home/student/Desktop/Redge_Thesis/vil/build/vehicle_control/CMakeFiles/progress.marks\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/all\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles 0\n.PHONY : all\n\n# The main clean target\nclean:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/clean\n.PHONY : clean\n\n# The main clean target\nclean/fast: clean\n\n.PHONY : clean/fast\n\n# Prepare targets for installation.\npreinstall: all\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/preinstall\n.PHONY : preinstall\n\n# Prepare targets for installation.\npreinstall/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/preinstall\n.PHONY : preinstall/fast\n\n# clear depends\ndepend:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1\n.PHONY : depend\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_geneus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_geneus.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_geneus.dir/rule\n\n# Convenience name for target.\nvehicle_control_geneus: vehicle_control/CMakeFiles/vehicle_control_geneus.dir/rule\n\n.PHONY : vehicle_control_geneus\n\n# fast build rule for target.\nvehicle_control_geneus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_geneus.dir/build.make vehicle_control/CMakeFiles/vehicle_control_geneus.dir/build\n.PHONY : vehicle_control_geneus/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_gencpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/rule\n\n# Convenience name for target.\nvehicle_control_gencpp: vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/rule\n\n.PHONY : vehicle_control_gencpp\n\n# fast build rule for target.\nvehicle_control_gencpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/build\n.PHONY : vehicle_control_gencpp/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages_eus: vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/rule\n\n.PHONY : vehicle_control_generate_messages_eus\n\n# fast build rule for target.\nvehicle_control_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/build\n.PHONY : vehicle_control_generate_messages_eus/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages_cpp: vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/rule\n\n.PHONY : vehicle_control_generate_messages_cpp\n\n# fast build rule for target.\nvehicle_control_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/build\n.PHONY : vehicle_control_generate_messages_cpp/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages: vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/rule\n\n.PHONY : vehicle_control_generate_messages\n\n# fast build rule for target.\nvehicle_control_generate_messages/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/build\n.PHONY : vehicle_control_generate_messages/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages_nodejs: vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/rule\n\n.PHONY : vehicle_control_generate_messages_nodejs\n\n# fast build rule for target.\nvehicle_control_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/build\n.PHONY : vehicle_control_generate_messages_nodejs/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/rule\n.PHONY : vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/rule\n\n# Convenience name for target.\n_vehicle_control_generate_messages_check_deps_Trajectory: vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/rule\n\n.PHONY : _vehicle_control_generate_messages_check_deps_Trajectory\n\n# fast build rule for target.\n_vehicle_control_generate_messages_check_deps_Trajectory/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/build.make vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/build\n.PHONY : _vehicle_control_generate_messages_check_deps_Trajectory/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_genlisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/rule\n\n# Convenience name for target.\nvehicle_control_genlisp: vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/rule\n\n.PHONY : vehicle_control_genlisp\n\n# fast build rule for target.\nvehicle_control_genlisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/build\n.PHONY : vehicle_control_genlisp/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages_lisp: vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/rule\n\n.PHONY : vehicle_control_generate_messages_lisp\n\n# fast build rule for target.\nvehicle_control_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/build\n.PHONY : vehicle_control_generate_messages_lisp/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/rule\n\n# Convenience name for target.\nvehicle_control_gennodejs: vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/rule\n\n.PHONY : vehicle_control_gennodejs\n\n# fast build rule for target.\nvehicle_control_gennodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/build.make vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/build\n.PHONY : vehicle_control_gennodejs/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/rule\n\n# Convenience name for target.\nvehicle_control_generate_messages_py: vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/rule\n\n.PHONY : vehicle_control_generate_messages_py\n\n# fast build rule for target.\nvehicle_control_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/build\n.PHONY : vehicle_control_generate_messages_py/fast\n\n# Convenience name for target.\nvehicle_control/CMakeFiles/vehicle_control_genpy.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 vehicle_control/CMakeFiles/vehicle_control_genpy.dir/rule\n.PHONY : vehicle_control/CMakeFiles/vehicle_control_genpy.dir/rule\n\n# Convenience name for target.\nvehicle_control_genpy: vehicle_control/CMakeFiles/vehicle_control_genpy.dir/rule\n\n.PHONY : vehicle_control_genpy\n\n# fast build rule for target.\nvehicle_control_genpy/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_genpy.dir/build.make vehicle_control/CMakeFiles/vehicle_control_genpy.dir/build\n.PHONY : vehicle_control_genpy/fast\n\n# Help Target\nhelp:\n\t@echo \"The following are some of the valid targets for this Makefile:\"\n\t@echo \"... all (the default if no target is provided)\"\n\t@echo \"... clean\"\n\t@echo \"... depend\"\n\t@echo \"... install/strip\"\n\t@echo \"... install/local\"\n\t@echo \"... vehicle_control_geneus\"\n\t@echo \"... install\"\n\t@echo \"... vehicle_control_gencpp\"\n\t@echo \"... vehicle_control_generate_messages_eus\"\n\t@echo \"... rebuild_cache\"\n\t@echo \"... vehicle_control_generate_messages_cpp\"\n\t@echo \"... vehicle_control_generate_messages\"\n\t@echo \"... vehicle_control_generate_messages_nodejs\"\n\t@echo \"... _vehicle_control_generate_messages_check_deps_Trajectory\"\n\t@echo \"... vehicle_control_genlisp\"\n\t@echo \"... vehicle_control_generate_messages_lisp\"\n\t@echo \"... vehicle_control_gennodejs\"\n\t@echo \"... vehicle_control_generate_messages_py\"\n\t@echo \"... edit_cache\"\n\t@echo \"... test\"\n\t@echo \"... vehicle_control_genpy\"\n\t@echo \"... list_install_components\"\n.PHONY : help\n\n\n\n#=============================================================================\n# Special targets to cleanup operation of make.\n\n# Special rule to run CMake to check the build system integrity.\n# No rule that depends on this can have commands that come from listfiles\n# because they might be regenerated.\ncmake_check_build_system:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0\n.PHONY : cmake_check_build_system\n\n"
},
{
"alpha_fraction": 0.6639506816864014,
"alphanum_fraction": 0.6804668307304382,
"avg_line_length": 27.923566818237305,
"blob_id": "c7349e37dd737fd15da9dc92440e735eef683d58",
"content_id": "46457e18282273200afbc3ab8a56f04361d7b164",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4541,
"license_type": "no_license",
"max_line_length": 213,
"num_lines": 157,
"path": "/src/object_list/scripts/Debug_markers_obj_list.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport roslib; roslib.load_manifest('visualization_marker_tutorials')\nimport rospy\nfrom std_msgs.msg import String\nfrom object_list.msg import ObjectsList\nfrom object_list.msg import ObjectList\n\nfrom geometry_msgs.msg import Quaternion\nfrom visualization_msgs.msg import Marker\nfrom visualization_msgs.msg import MarkerArray\nimport rospy\nimport math\nimport tf\n\nOFFSET_CAR_X = -2.3 # distance to front\ncar_ego_x = 0\ncar_ego_y = 0\ndata_alt = 0\ntopic = 'visualization_marker_array'\npublisher = rospy.Publisher(topic, MarkerArray,queue_size=10)\nrospy.init_node('Objekt_Visualization')\nbr = tf.TransformBroadcaster()\n\n#define each color to the specific class, input value ist the name(string) from the classifciation\ndef evaluateColor(Class): \n class_List = {\n\t\"car\": [1,0,0,1],\n\t\"truck\":[0,1,0,1],\n\t\"motorcycle\": [0,0,1,1],\n\t\"bicycle\": [1,1,0,1],\n\t\"pedestrian\": [1,0,1,3],\n\t\"stacionary\": [0,1,1,3],\n\t\"other\":[1,1,1,2] \n }\n return class_List.get(Class)\n \n \ndef evaluateClassification(objectClass):\n \n temp_prop = 0\n result = \"\"\n #tmp includes all Attributes of the message Classification\n tmp = [a for a in dir(objectClass) if not a.startswith('__') and not a.startswith('_') and not callable(getattr(objectClass,a))]\n \n\n for i in range(len(tmp)):\n if(getattr(objectClass, tmp[i]) > temp_prop ):\n temp_prop = getattr(objectClass, tmp[i])\n result = tmp[i]\n return (result) # return value is the name of the class whith the highest probability\n \n \n\n\ndef evaluateObject(objectData):\n marker = Marker()\n #r, g, b, typ = evaluateColor(evaluateClassification(objectData.classification))\n marker.header.frame_id = \"/ego\"\n \n marker.type = 1\n \n marker.action = marker.ADD\n marker.scale.x = objectData.dimension.length\n marker.scale.y = objectData.dimension.width\n \n marker.scale.z = 2.0\n marker.color.a = 1.0\n \n marker.color.r = 1.0\n marker.color.g = 0.0\n marker.color.b = 0.0\n\n marker.pose.orientation = Quaternion(*tf.transformations.quaternion_from_euler(0,0,objectData.geometric.yaw))\n #marker.pose.orientation.w = 1\n #print(marker.pose.orientation)\n marker.pose.position.x = objectData.geometric.x\n marker.pose.position.y = objectData.geometric.y\n marker.pose.position.z = 1.0\n marker.lifetime = rospy.Duration(0.1)\n return marker\n\ndef evaluateObjectID(objectData):\n marker = Marker()\n\n marker.header.frame_id = \"/world\"\n marker.id = i\n marker.type = typ\n\n marker.action = marker.ADD\n marker.scale.x = objectData.dimension.lenght\n marker.scale.y = objectData.dimension.width\n\n marker.scale.z = 2.0\n marker.color.a = 1.0\n\n marker.color.r = 1.0\n marker.color.g = 0.0\n marker.color.b = 0.0\n\n marker.pose.orientation.w = 1.0\n marker.pose.position.x = data.position.x\n marker.pose.position.y = data.position.y\n marker.pose.position.z = 1.0\n marker.lifetime = rospy.Duration(0.1)\n marker.text = \"ID:\" + str(objectData.obj_id)\n return marker\n\n\ndef callback_simulation(data):\n\n global car_ego_x\n global car_ego_y \n \n \n\n markerArray = MarkerArray()\n\n\n for i in range(len(data.obj_list)):\n markerObj = evaluateObject(data.obj_list[i])\n #markerID = evaluateObjectID(data.obj_list[i])\n #markerID.id = i*2+1\n markerArray.markers.append(markerObj)\n #markerArray.markers.append(markerID)\n\n \n #rospy.loginfo(markerArray)\n publisher.publish(markerArray)\n \n \ndef callback_egovehicle(data):\n global car_ego_x\n global car_ego_y\n\n car_ego_x = data.object.position.x\n car_ego_y = data.object.position.y\n\n br.sendTransform((car_ego_x,car_ego_y,0),tf.transformations.quaternion_from_euler(data.object.orientation.roll,data.object.orientation.pitch,data.object.orientation.yaw),rospy.Time.now(),\"chassis\",\"base_link\")\n\ndef listener():\n\n # In ROS, nodes are uniquely named. If two nodes with the same\n # name are launched, the previous one is kicked off. The\n # anonymous=True flag means that rospy will choose a unique\n # name for our 'listener' node so that multiple listeners can\n # run simultaneously.\n \n\n #rospy.Subscriber(\"chatter\", String, callback)\n #rospy.Subscriber('/sensor0/afterKF', ObjectsList, callback_simulation)\n rospy.Subscriber('/sensor0/afterKF', ObjectsList, callback_simulation)\n\n # spin() simply keeps python from exiting until this node is stopped\n rospy.spin()\n\nif __name__ == '__main__':\n listener()\n"
},
{
"alpha_fraction": 0.796875,
"alphanum_fraction": 0.796875,
"avg_line_length": 43.79999923706055,
"blob_id": "7e44e0b0c69b4d8b8f09baa4f78a3b214f322d79",
"content_id": "03309ce13d2c0d7a0ac57aad05765f48826bd807",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 448,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 10,
"path": "/build/vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/vehicle_control_generate_messages_eus\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/vehicle_control/msg/Trajectory.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/vehicle_control/manifest.l\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/vehicle_control_generate_messages_eus.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6121924519538879,
"alphanum_fraction": 0.6225560903549194,
"avg_line_length": 38.86805725097656,
"blob_id": "863253375ec33af2021c2465ca7f152a4f7ca7b0",
"content_id": "78a5daa88f45597cfc69e15a6b6e948723557787",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 22965,
"license_type": "no_license",
"max_line_length": 176,
"num_lines": 576,
"path": "/src/sensor_model/scripts/sensor_model.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport numpy as np\nimport rospy\nimport math\nimport message_filters\nimport matplotlib.pyplot as plt\nimport tf\n# import all necessary ROS messages\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\n# import function\nfrom rotate import rotate\n\n# import class\nfrom ClassSens import Sens\nfrom ClassSens import Features\n\n\n\n\n\n\n\nc=0\ncount =0\nyawcount=0\nclassification = {\n # id: Obj_list type OSI type\n 0 : \"other\", #unknown\n 1 : \"other\", #other\n 2 : \"car\", #car\n 3 : \"pedestrian\", #pedestrian\n 4 : \"other\", #animal\n 5 : \"truck\", #truck\n 6 : \"other\", #trailer\n 7 : \"motorcycle\", #motorbike\n 8 : \"bicycle\", #bicycle\n 9 : \"truck\", #bus\n 10 : \"other\", #tram\n 11 : \"other\", #train\n 12 : \"other\", #wheelchair\n}\ncounter = 0\ndef sensor_model():\n\n rospy.init_node('sensor_model_ideal', anonymous=False) # Start node\n\n # Subscriber the data in callback function\n ego_data = message_filters.Subscriber(\"/ego_data\", TrafficUpdateMovingObject)\n osi_objs = message_filters.Subscriber(\"/osi3_moving_obj\", GroundTruthMovingObjects)\n #ts = message_filters.ApproximateTimeSynchronizer([ego_data, osi_objs], 10, 0.1)\n ts = message_filters.TimeSynchronizer([ego_data, osi_objs], 3)\n ts.registerCallback(callback)\n\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\n\ndef callback(ego,osi_objs):\n tic = rospy.Time.now()\n global ntime\n global otime\n\n fixed = tf.TransformBroadcaster()\n fixed.sendTransform((0,0,0),tf.transformations.quaternion_from_euler(0, 0, 1),rospy.Time.now(),\"ego\",\"map\")\n\n\n\n\n\n #\n [osi_objs_noego] = find_ego(osi_objs)\n\n #else :\n # ego = ego_dataCOPY\n # copy the time from received osi GT message\n header = osi_objs.header\n ntime = float(osi_objs.header.stamp.secs) + float(osi_objs.header.stamp.nsecs)/1000000000\n\n # Return object list with objects inside the field of view with position origin on the sensor origin and rotation\n data_process(ego.object,osi_objs_noego,header)\n toc = rospy.Time.now()\n time = toc.to_sec() - tic.to_sec()\n print('sensormodel time', time)\n # Update the time\n otime=ntime\n\ndef find_ego (osi_objs):\n\n # find the smaller id number inside the list\n ID=osi_objs.objects[0].id\n IDpos=0\n for i in range(len(osi_objs.objects)):\n if osi_objs.objects[i].id < ID: # take into account that the EGO is the first spawn Object\n ID = osi_objs.objects[i].id\n IDpos = i\n\n\n # Assign all other ID's to the obj_list\n\n osi_objs_noego = [x for x in osi_objs.objects if not x.id == ID]\n\n\n return [osi_objs_noego]\n\n\ndef data_process (ego,osi_objs_noego,header):\n\n # Import all sensor parameter from Sensor class <-- Parameter are changeable in Ros launch File\n sens = Sens()\n\n global count # Counter for iniciating yawrate calculation = 0\n global counter # Counter for intitiating old osi data\n global osi_old #\n global ntime\n global otime\n\n global yawrate\n\n if counter == 0:\n osi_old = osi_objs_noego\n counter += 1\n\n if count == 0:\n yawrate = 0\n\n objs_list = ObjectsList()\n objs_list.header.stamp = header.stamp\n objs_list.header.frame_id = \"Sensor\"\n #Assign Sensor properties to topic\n objs_list.sensor_property.sensor_id = rospy.get_param(\"sensorID\")\n objs_list.sensor_property.sensortype = rospy.get_param(\"sensortype\")\n if rospy.get_param(\"sensortype\") == 0:\n objs_list.sensor_property.posx_variance = float(rospy.get_param(\"rangerr\"))/3\n objs_list.sensor_property.posy_variance = float(rospy.get_param(\"rangerr\"))/3\n objs_list.sensor_property.trust_existance = float(rospy.get_param(\"trust_existance\"))\n objs_list.sensor_property.trust_car = float(rospy.get_param(\"trust_car\"))\n objs_list.sensor_property.trust_truck = float(rospy.get_param(\"trust_truck\"))\n objs_list.sensor_property.trust_motorcycle = float(rospy.get_param(\"trust_motorcycle\"))\n objs_list.sensor_property.trust_bicycle = float(rospy.get_param(\"trust_bicycle\"))\n objs_list.sensor_property.trust_pedestrian = float(rospy.get_param(\"trust_pedestrian\"))\n objs_list.sensor_property.trust_stationary = float(rospy.get_param(\"trust_stationary\"))\n objs_list.sensor_property.trust_other = float(rospy.get_param(\"trust_other\"))\n elif rospy.get_param(\"sensortype\") != 5 :\n objs_list.sensor_property.posx_variance= float(rospy.get_param(\"posxerr\"))/3\n #print(objs_list.sensor_property.posx_variance)\n objs_list.sensor_property.posy_variance = float(rospy.get_param(\"posyerr\"))/3\n objs_list.sensor_property.trust_existance = float(rospy.get_param(\"trust_existance\"))\n objs_list.sensor_property.trust_car = float(rospy.get_param(\"trust_car\"))\n objs_list.sensor_property.trust_truck = float(rospy.get_param(\"trust_truck\"))\n objs_list.sensor_property.trust_motorcycle = float(rospy.get_param(\"trust_motorcycle\"))\n objs_list.sensor_property.trust_bicycle = float(rospy.get_param(\"trust_bicycle\"))\n objs_list.sensor_property.trust_pedestrian = float(rospy.get_param(\"trust_pedestrian\"))\n objs_list.sensor_property.trust_stationary = float(rospy.get_param(\"trust_stationary\"))\n objs_list.sensor_property.trust_other = float(rospy.get_param(\"trust_other\"))\n\n objs_list.sensor_property.velx_variance = float(rospy.get_param(\"velerr\"))/3\n objs_list.sensor_property.vely_variance = float(rospy.get_param(\"velerr\"))/3\n\n for i in range (len(osi_objs_noego)):\n\n # Rotate and Translate the object position from map to ego\n osi_objs_noego[i].position.x = osi_objs_noego[i].position.x - ego.position.x\n osi_objs_noego[i].position.y = osi_objs_noego[i].position.y - ego.position.y\n [osi_objs_noego[i].position.x, osi_objs_noego[i].position.y] = rotate(osi_objs_noego[i].position.x, osi_objs_noego[i].position.y, -ego.orientation.yaw)\n\n # Rotate and Translate the object position from ego to sensor\n osi_objs_noego[i].position.x = osi_objs_noego[i].position.x - sens.pos.x\n osi_objs_noego[i].position.y = osi_objs_noego[i].position.y - sens.pos.y\n [osi_objs_noego[i].position.x, osi_objs_noego[i].position.y] = rotate(osi_objs_noego[i].position.x,osi_objs_noego[i].position.y, -sens.rot.yaw)\n\n # Calculate the object orientation from map to sensor\n osi_objs_noego[i].orientation.yaw -= (ego.orientation.yaw + sens.rot.yaw)\n\n # Keep the angles are between -pi and pi\n if osi_objs_noego[i].orientation.yaw < -math.pi:\n osi_objs_noego[i].orientation.yaw += 2*math.pi\n elif osi_objs_noego[i].orientation.yaw > math.pi:\n osi_objs_noego[i].orientation.yaw -= 2*math.pi\n\n # changed to absolute , real sensor should add ego velocity and give overground velocity/objs of objects\n # Rotate and Translate the object velocity from map to sensor (Relative Velocity)\n [osi_objs_noego[i].velocity.x, osi_objs_noego[i].velocity.y] = rotate(osi_objs_noego[i].velocity.x,\n osi_objs_noego[i].velocity.y,\n -(ego.orientation.yaw + sens.rot.yaw))\n\n osi_objs_noego[i].velocity.x = osi_objs_noego[i].velocity.x - ego.velocity.x\n osi_objs_noego[i].velocity.y = osi_objs_noego[i].velocity.y - ego.velocity.y\n\n # Transpose the object acceleration from map to sensor.\n [osi_objs_noego[i].acceleration.x, osi_objs_noego[i].acceleration.y] = rotate(osi_objs_noego[i].acceleration.x,\n osi_objs_noego[i].acceleration.y,\n -(ego.orientation.yaw + sens.rot.yaw))\n osi_objs_noego[i].acceleration.x = osi_objs_noego[i].acceleration.x - ego.acceleration.x\n osi_objs_noego[i].acceleration.y = osi_objs_noego[i].acceleration.y - ego.acceleration.y\n\n\n # Calculate features\n [features,features_check] = calculate_features(osi_objs_noego[i],sens)\n\n\n # If one of the features is inside the field of view it calculates the error and fullfil the Aeberhard Object List\n if features_check == 1: # Just entities inside FOV\n\n ## Include statistical errors on\n osi_objs_noego[i] = include_sens_error (osi_objs_noego[i])\n ## Initialize the Object list\n obj_list= ObjectList()\n\n ## fullfil object list\n obj_list.geometric.x = osi_objs_noego[i].position.x #relative\n obj_list.geometric.y = osi_objs_noego[i].position.y #relative\n obj_list.geometric.vx = osi_objs_noego[i].velocity.x #relative\n obj_list.geometric.vy = osi_objs_noego[i].velocity.y #relative\n obj_list.geometric.ax = osi_objs_noego[i].acceleration.x #relative\n obj_list.geometric.ay = osi_objs_noego[i].acceleration.y #relative\n obj_list.geometric.yaw = osi_objs_noego[i].orientation.yaw #- (ego.orientation.yaw +sens.rot.yaw)\n\n obj_list.obj_id=osi_objs_noego[i].id\n\n\n\n\n ## Necessary to include errors\n obj_list.dimension.length = osi_objs_noego[i].dimension.length\n obj_list.dimension.width = osi_objs_noego[i].dimension.width\n obj_list.dimension.length_variance = 0.2\n obj_list.dimension.width_variance = 0.2\n\n obj_list.features = features\n\n ## Necessary to include errors\n if classification[osi_objs_noego[i].type] == \"car\":\n obj_list.classification.car = 1\n obj_list.prop_mov = 1\n elif classification[osi_objs_noego[i].type] == \"truck\":\n obj_list.classification.truck = 1\n obj_list.prop_mov = 1\n elif classification[osi_objs_noego[i].type] == \"motorcycle\":\n obj_list.classification.motorcycle = 1\n obj_list.prop_mov = 1\n elif classification[osi_objs_noego[i].type] == \"bicycle\":\n obj_list.classification.bicycle = 1\n elif classification[osi_objs_noego[i].type] == \"pedestrian\":\n obj_list.classification.pedestrian = 1\n elif classification[osi_objs_noego[i].type] == \"stacionary\":\n obj_list.classification.stacionary = 1\n elif classification[osi_objs_noego[i].type] == \"other\":\n obj_list.classification.other = 1\n\n if count != 0:\n t = float(str(ntime)) - float(str(otime))\n if t == 0 :\n t = 1/(rospy.get_param('freq'))\n #t = t / 1000000000.0\n obj_list.geometric.yawrate = (osi_objs_noego[i].orientation.yaw - osi_old[i].orientation.yaw) / t\n else:\n obj_list.geometric.yawrate = 0\n #obj_list.geometric.yaw -= egoyaw\n objs_list.obj_list.append(obj_list)\n\n count=1\n\n# Publish the object list\n pub = rospy.Publisher(\"objs_list\", ObjectsList, queue_size=10,latch=True)\n pub.publish(objs_list)\n osi_old = osi_objs_noego\n\n\ndef include_sens_error (new):\n global osi_old\n\n # FOR CAMERA OR LIDAR\n if rospy.get_param(\"sensortype\") == 1:\n #import standard deviation of errors from parameters\n std_rx = rospy.get_param(\"posxerr\")\n std_ry = rospy.get_param(\"posyerr\")\n\n # generate erros in posx and posy with normal distribution\n posx_error = np.random.normal(loc=0.0, scale = std_rx )\n posy_error = np.random.normal(loc=0.0, scale = std_ry )\n\n # add errors to postion\n new.position.x += posx_error\n new.position.y += posy_error\n #set velocity and accln to 0 since camera/lidar does not give velocity or accelration\n new.velocity.x = 0\n new.velocity.y = 0\n new.acceleration.x = 0\n new.acceleration.y = 0\n\n\n elif rospy.get_param(\"sensortype\") == 0:\n # FOR RADAR\n\n x= (new.position.y)/(new.position.x)\n #determine azimuth angle\n azi=np.arctan2(new.position.y,new.position.x)\n range=np.sqrt(np.square(new.position.x)+np.square(new.position.y))\n #import range and azi error from paramters\n std_range = rospy.get_param(\"rangerr\")\n\n std_azi = rospy.get_param(\"azierr\")\n\n #add errors with normal distribution\n range_error = np.random.normal(loc=0.0, scale = std_range ) # generates error with normal distribution\n azi_error = np.random.normal(loc=0.0, scale = std_azi ) # generates error with normal distribution\n range += range_error\n azi += azi_error\n\n #import velocity error from paramters\n std_v=rospy.get_param(\"velerr\")\n\n vel_err = np.random.normal(loc=0.0, scale = std_v ) # generates error with normal distribution\n # using velocity in x and y direction determin radial velocity\n v = np.sqrt(np.square(new.velocity.x)+np.square(new.velocity.y))\n #add errors to radial velocity\n v += vel_err\n\n #resolve range and velocity to positions and velocities in x and y direction\n new.position.x = range * np.cos(azi)\n new.position.y = range * np.sin(azi)\n #new.velocity.x = v * np.cos(azi)\n if new.velocity.y < 0:\n new.velocity.y = -abs(v * np.sin(azi))\n else:\n new.velocity.y = abs(v * np.sin(azi))\n if new.velocity.x <0 :\n new.velocity.x = -abs(v * np.cos(azi))\n else :\n new.velocity.x = abs(v * np.cos(azi))\n new.acceleration.x = 0\n new.acceleration.y = 0\n return new\n\n'''\ndef calculate_features(obj,sens):\n\n obj_list = ObjectList()\n features = obj_list.features # import a all features as bool\n x = Features() # Import a class with all features as float 0.0\n y = Features()\n # Version 1 do not include the verification of the hided features - Just take into account if it is inside the FOV\n\n # Calculate the features\n features_check = 0\n\n tg_wl = math.atan(obj.dimension.width / obj.dimension.length)\n hip_wl = 0.5 * math.sqrt(obj.dimension.width ** 2 + obj.dimension.length ** 2)\n beta = obj.orientation.yaw - tg_wl\n psi = obj.orientation.yaw + tg_wl\n\n x.FL = obj.position.x + hip_wl * math.cos(psi)\n y.FL = obj.position.y - hip_wl * math.sin(psi)\n\n # x.FL = obj.position.x + obj.dimension.length * math.cos(obj.orientation.yaw) + obj.dimension.width * math.sin(obj.orientation.yaw)\n # y.FL = obj.position.y + obj.dimension.length * math.sin(obj.orientation.yaw) - obj.dimension.width * math.cos(obj.orientation.yaw)\n [features.FL, features_check] = evaluate_feature(x.FL, y.FL, sens, features_check)\n\n # x.FR = obj.position.x + obj.dimension.length * math.cos(obj.orientation.yaw) - obj.dimension.width * math.sin(obj.orientation.yaw)\n # y.FR = obj.position.y + obj.dimension.length * math.sin(obj.orientation.yaw) + obj.dimension.width * math.cos(obj.orientation.yaw)\n x.FR = obj.position.x + hip_wl * math.cos(beta)\n y.FR = obj.position.y - hip_wl * math.sin(beta)\n [features.FR, features_check] = evaluate_feature(x.FR, y.FR, sens, features_check)\n\n # x.RR = obj.position.x - obj.dimension.length * math.cos(obj.orientation.yaw) - obj.dimension.width * math.sin(obj.orientation.yaw)\n # y.RR = obj.position.y - obj.dimension.length * math.sin(obj.orientation.yaw) + obj.dimension.width * math.cos(obj.orientation.yaw)\n x.RR = obj.position.x - hip_wl * math.cos(-psi)\n y.RR = obj.position.y - hip_wl * math.sin(-psi)\n [features.RR, features_check] = evaluate_feature(x.RR, y.RR, sens, features_check)\n\n # x.RL = obj.position.x + obj.dimension.length * math.cos(obj.orientation.yaw) + obj.dimension.width * math.sin(obj.orientation.yaw)\n # y.RL = obj.position.y + obj.dimension.length * math.sin(obj.orientation.yaw) - obj.dimension.width * math.cos(obj.orientation.yaw)\n x.RL = obj.position.x - hip_wl * math.cos(-beta)\n y.RL = obj.position.y - hip_wl * math.sin(-beta)\n [features.RL, features_check] = evaluate_feature(x.RL, y.RL, sens, features_check)\n\n x.FM = (x.FR + x.FL) / 2\n y.FM = (y.FR + y.FL) / 2\n [features.FM, features_check] = evaluate_feature(x.FM, y.FM, sens, features_check)\n\n x.ML = (x.RL + x.FL) / 2\n y.ML = (y.RL + y.FL) / 2\n [features.ML, features_check] = evaluate_feature(x.ML, y.ML, sens, features_check)\n\n x.MR = (x.RR + x.FR) / 2\n y.MR = (y.RR + y.FR) / 2\n [features.MR, features_check] = evaluate_feature(x.MR, y.MR, sens, features_check)\n\n x.RM = (x.RR + x.RL) / 2\n y.RM = (y.RR + y.RL) / 2\n [features.RM, features_check] = evaluate_feature(x.RM, y.RM, sens, features_check)\n\n #plt.plot(y.RR, x.RR, 'g^', y.RL, x.RL, 'go', y.FR, x.FR, 'r^', y.FL, x.FL, 'ro', y.FM, x.FM, 'rs', y.ML, x.ML, 'bo', y.MR, x.MR, 'b^', y.RM, x.RM, 'gs' )\n #plt.show()\n\n return [features, features_check]\n'''\ndef calculate_features(obj,sens):\n\n obj_list = ObjectList()\n features = obj_list.features # import a all features as bool\n x = Features() # Import a class with all features as float 0.0\n y = Features()\n # Version 1 do not include the verification of the hided features - Just take into account if it is inside the FOV\n\n # Calculate the features\n features_check = 0\n\n tg_wl = math.atan(obj.dimension.width / obj.dimension.length)\n hip_wl = 0.5 * math.sqrt(obj.dimension.width ** 2 + obj.dimension.length ** 2)\n beta = obj.orientation.yaw - tg_wl\n psi = obj.orientation.yaw + tg_wl\n\n x.FL = obj.position.x + hip_wl * math.cos(psi)\n y.FL = obj.position.y + hip_wl * math.sin(psi)\n\n [features.FL, features_check] = evaluate_feature(x.FL, y.FL, sens, features_check)\n\n x.FR = obj.position.x + hip_wl * math.cos(beta)\n y.FR = obj.position.y + hip_wl * math.sin(beta)\n\n [features.FR, features_check] = evaluate_feature(x.FR, y.FR, sens, features_check)\n\n x.RR = obj.position.x - hip_wl * math.cos(-psi)\n y.RR = obj.position.y - hip_wl * math.sin(psi)\n [features.RR, features_check] = evaluate_feature(x.RR, y.RR, sens, features_check)\n\n x.RL = obj.position.x - hip_wl * math.cos(-beta)\n y.RL = obj.position.y - hip_wl * math.sin(beta)\n [features.RL, features_check] = evaluate_feature(x.RL, y.RL, sens, features_check)\n\n x.FM = (x.FR + x.FL) / 2\n y.FM = (y.FR + y.FL) / 2\n [features.FM, features_check] = evaluate_feature(x.FM, y.FM, sens, features_check)\n\n x.ML = (x.RL + x.FL) / 2\n y.ML = (y.RL + y.FL) / 2\n [features.ML, features_check] = evaluate_feature(x.ML, y.ML, sens, features_check)\n\n x.MR = (x.RR + x.FR) / 2\n y.MR = (y.RR + y.FR) / 2\n [features.MR, features_check] = evaluate_feature(x.MR, y.MR, sens, features_check)\n\n x.RM = (x.RR + x.RL) / 2\n y.RM = (y.RR + y.RL) / 2\n [features.RM, features_check] = evaluate_feature(x.RM, y.RM, sens, features_check)\n\n #if more than two features are availabel\n X = np.asarray( [x.FL, x.FM, x.FR, x.MR, x.RR, x.RM, x.RL, x.ML]) # Vector of x position for the Features\n Y = np.asarray( [y.FL, y.FM, y.FR, y.MR, y.RR, y.RM, y.RL, y.ML]) # Vector of y position for the Features\n FOV_features = np.asarray([features.FL, features.FM, features.FR, features.MR, features.RR, features.RM, features.RL, features.ML]) # Vector of y position for the Features\n hidden_features = evaluate_hidden_features(X,Y)\n\n features_list = FOV_features*hidden_features\n\n\n [features.FL, features.FM, features.FR, features.MR, features.RR, features.RM, features.RL, features.ML] = features_list\n\n #plt.plot(y.RR, x.RR, 'g^', y.RL, x.RL, 'go', y.FR, x.FR, 'r^', y.FL, x.FL, 'ro', y.FM, x.FM, 'rs', y.ML, x.ML, 'bo', y.MR, x.MR, 'b^', y.RM, x.RM, 'gs' )\n #plt.show()\n\n return [features, features_check]\n\ndef evaluate_hidden_features(X,Y):\n dist = (X ** 2 + Y ** 2) ** 0.5\n angle = np.arctan(-Y/X)\n\n ## Eliminate the furtherst and\n furthest_feature = np.argmax(dist)\n hidden_furthest = np.ones(8)\n hidden_furthest[furthest_feature] = 0\n hidden_furthest[last_feature(furthest_feature)] = 0\n hidden_furthest[next_feature(furthest_feature)] = 0\n\n # Find nearest feature\n nearest_feature1 = np.argmin(dist)\n dist[nearest_feature1]= 999\n nearest_feature2 = np.argmin(dist)\n\n if nearest_feature1 <= 2 and nearest_feature2 <= 2: ##Front is the main side\n pos_angle1 = 0\n pos_angle2 = 2\n elif 2 <= nearest_feature1 <= 4 and 2 <= nearest_feature2 <= 4: ##Rigth is the main side\n pos_angle1 = 2\n pos_angle2 = 4\n elif 4 <= nearest_feature1 <= 6 and 4 <= nearest_feature2 <= 6: ##Back is the main side\n pos_angle1 = 4\n pos_angle2 = 6\n elif (6 <= nearest_feature1 or nearest_feature1 ==0) and (6 <= nearest_feature2 or nearest_feature2 ==0): ##Left is the main side\n pos_angle1 = 6\n pos_angle2 = 0\n\n if angle[pos_angle1]>angle[pos_angle2]:\n angle_big = angle[pos_angle1]\n angle_small = angle[pos_angle2]\n else:\n angle_big = angle[pos_angle2]\n angle_small = angle[pos_angle1]\n\n hidden_angle = np.ones(8)\n\n for i in range(len(angle)):\n if angle_small <= angle[i] <= angle_big:\n hidden_angle[i] = 0\n hidden_angle[pos_angle1] = 1\n hidden_angle[pos_angle1+1] = 1\n hidden_angle[pos_angle2] = 1\n\n hidden = hidden_angle * hidden_furthest\n\n return hidden\n\ndef last_feature(x):\n y = x - 1\n if y == -1:\n y = 7\n return y\n\ndef next_feature(x):\n y = x + 1\n if y == 8:\n y = 0\n return y\n\ndef evaluate_feature (x,y,sens,check):\n\n # calculates object azimuth\n if x < 0:\n azimuth = math.pi - abs(math.atan(foo(y,x)))\n else:\n azimuth = abs(math.atan(foo(y,x)))\n\n # check if the object is inside the field of view of the sensor.\n if (math.sqrt(x**2 + y**2) <= sens.fov.r and azimuth <= sens.fov.angle/2):\n feature = 1\n check=1\n else:\n feature = 0\n\n return [feature,check]\n\ndef foo(x,y):\n try:\n return x/y\n except ZeroDivisionError:\n return 0\n\n\n'''\ndef evaluate_feature (x,y,sens,check):\n\n # calculates object azimuth\n if x < 0:\n azimuth = math.pi - abs(math.atan(foo(y,x)))\n else:\n azimuth = abs(math.atan(foo(y,x)))\n\n # check if the object is inside the field of view of the sensor.\n if (math.sqrt(x**2 + y**2) <= sens.fov.r and azimuth <= sens.fov.angle/2):\n feature = 1\n check=1\n else:\n feature = 0\n\n return [feature,check]\n\ndef foo(x,y):\n try:\n return x/y\n except ZeroDivisionError:\n return 0\n\n'''\nif __name__ == '__main__':\n sensor_model()\n\n"
},
{
"alpha_fraction": 0.7955390214920044,
"alphanum_fraction": 0.7955390214920044,
"avg_line_length": 66.25,
"blob_id": "b7f67aae3ea3d234eb917eef92aa351f3bbbf46e",
"content_id": "8492d0d62b1a34f04dcc445914453e67fe4fd3ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1076,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 16,
"path": "/build/object_list/CMakeFiles/object_list_generate_messages_lisp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/object_list_generate_messages_lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/Features.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/Geometric.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/SensorProperty.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/ObjectList.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/Classification.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/ObjectsList.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/Dimension.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list/msg/EgoData.lisp\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/object_list_generate_messages_lisp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.817258894443512,
"alphanum_fraction": 0.8375634551048279,
"avg_line_length": 97.5,
"blob_id": "780c9fceb87160bbc6d13c4307b20f62db625034",
"content_id": "101d41670dd6b7ce0a08b13f0cc7d9a48419a5c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 197,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 2,
"path": "/build/osi3_bridge/catkin_generated/osi3_bridge-msg-extras.cmake.installspace.in",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(osi3_bridge_MESSAGE_FILES \"msg/Dimension3d.msg;msg/Orientation3d.msg;msg/MovingObject.msg;msg/GroundTruthMovingObjects.msg;msg/TrafficUpdateMovingObject.msg\")\nset(osi3_bridge_SERVICE_FILES \"\")\n"
},
{
"alpha_fraction": 0.7560975551605225,
"alphanum_fraction": 0.7707316875457764,
"avg_line_length": 50.25,
"blob_id": "da3db53142fa8e89460fb4ed0e244d3626041211",
"content_id": "1e15480e73a6c08519f81f7671ac6116879cd1b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 205,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 4,
"path": "/build/osi3_bridge/catkin_generated/installspace/osi3_bridge-msg-paths.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-msg-paths.cmake.installspace.in\n\n_prepend_path(\"${osi3_bridge_DIR}/..\" \"msg\" osi3_bridge_MSG_INCLUDE_DIRS UNIQUE)\nset(osi3_bridge_MSG_DEPENDENCIES geometry_msgs;std_msgs)\n"
},
{
"alpha_fraction": 0.7982456088066101,
"alphanum_fraction": 0.7982456088066101,
"avg_line_length": 37,
"blob_id": "ce249496014b8b5fd55099c367ab75e0f4dda004",
"content_id": "231205a760b777cadd3f50c7dfb22f25cc8ae545",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 342,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 9,
"path": "/build/vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/vehicle_control_generate_messages_cpp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/vehicle_control/Trajectory.h\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/vehicle_control_generate_messages_cpp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6740068197250366,
"alphanum_fraction": 0.6860586404800415,
"avg_line_length": 25.151750564575195,
"blob_id": "25bb09c98e3eb3391f70e14f27d480e6506efedf",
"content_id": "d64681d89aa6dfce27e6e8ffe17dbe0fed779064",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6721,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 257,
"path": "/devel/include/object_list/EgoData.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/EgoData.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_EGODATA_H\n#define OBJECT_LIST_MESSAGE_EGODATA_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n#include <std_msgs/Header.h>\n#include <object_list/Geometric.h>\n#include <object_list/Dimension.h>\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct EgoData_\n{\n typedef EgoData_<ContainerAllocator> Type;\n\n EgoData_()\n : header()\n , geometric()\n , dimension() {\n }\n EgoData_(const ContainerAllocator& _alloc)\n : header(_alloc)\n , geometric(_alloc)\n , dimension(_alloc) {\n (void)_alloc;\n }\n\n\n\n typedef ::std_msgs::Header_<ContainerAllocator> _header_type;\n _header_type header;\n\n typedef ::object_list::Geometric_<ContainerAllocator> _geometric_type;\n _geometric_type geometric;\n\n typedef ::object_list::Dimension_<ContainerAllocator> _dimension_type;\n _dimension_type dimension;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::EgoData_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::EgoData_<ContainerAllocator> const> ConstPtr;\n\n}; // struct EgoData_\n\ntypedef ::object_list::EgoData_<std::allocator<void> > EgoData;\n\ntypedef boost::shared_ptr< ::object_list::EgoData > EgoDataPtr;\ntypedef boost::shared_ptr< ::object_list::EgoData const> EgoDataConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::EgoData_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::EgoData_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::EgoData_<ContainerAllocator1> & lhs, const ::object_list::EgoData_<ContainerAllocator2> & rhs)\n{\n return lhs.header == rhs.header &&\n lhs.geometric == rhs.geometric &&\n lhs.dimension == rhs.dimension;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::EgoData_<ContainerAllocator1> & lhs, const ::object_list::EgoData_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::EgoData_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::EgoData_<ContainerAllocator> const>\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::EgoData_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::EgoData_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::EgoData_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::EgoData_<ContainerAllocator> const>\n : TrueType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::EgoData_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"e7e294d9eaab8d77f6809dd7e07899e8\";\n }\n\n static const char* value(const ::object_list::EgoData_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xe7e294d9eaab8d77ULL;\n static const uint64_t static_value2 = 0xf6809dd7e07899e8ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::EgoData_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/EgoData\";\n }\n\n static const char* value(const ::object_list::EgoData_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::EgoData_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"Header header\\n\"\n\"object_list/Geometric geometric\\n\"\n\"object_list/Dimension dimension\\n\"\n\"\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: std_msgs/Header\\n\"\n\"# Standard metadata for higher-level stamped data types.\\n\"\n\"# This is generally used to communicate timestamped data \\n\"\n\"# in a particular coordinate frame.\\n\"\n\"# \\n\"\n\"# sequence ID: consecutively increasing ID \\n\"\n\"uint32 seq\\n\"\n\"#Two-integer timestamp that is expressed as:\\n\"\n\"# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')\\n\"\n\"# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')\\n\"\n\"# time-handling sugar is provided by the client library\\n\"\n\"time stamp\\n\"\n\"#Frame this data is associated with\\n\"\n\"string frame_id\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Geometric\\n\"\n\"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 vx\\n\"\n\"float64 vy\\n\"\n\"float64 ax\\n\"\n\"float64 ay\\n\"\n\"float64 yaw\\n\"\n\"float64 yawrate\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Dimension\\n\"\n\"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 length_variance\\n\"\n\"float64 width_variance\\n\"\n\"\\n\"\n;\n }\n\n static const char* value(const ::object_list::EgoData_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::EgoData_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.header);\n stream.next(m.geometric);\n stream.next(m.dimension);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct EgoData_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::EgoData_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::EgoData_<ContainerAllocator>& v)\n {\n s << indent << \"header: \";\n s << std::endl;\n Printer< ::std_msgs::Header_<ContainerAllocator> >::stream(s, indent + \" \", v.header);\n s << indent << \"geometric: \";\n s << std::endl;\n Printer< ::object_list::Geometric_<ContainerAllocator> >::stream(s, indent + \" \", v.geometric);\n s << indent << \"dimension: \";\n s << std::endl;\n Printer< ::object_list::Dimension_<ContainerAllocator> >::stream(s, indent + \" \", v.dimension);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_EGODATA_H\n"
},
{
"alpha_fraction": 0.642903745174408,
"alphanum_fraction": 0.658932089805603,
"avg_line_length": 27.165353775024414,
"blob_id": "071c7db7aebb4340a93685ef2c493a72854fc59c",
"content_id": "ba47939fe8000fe6bb2eeeb427a1713cc97d8d99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 10731,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 381,
"path": "/devel/include/object_list/ObjectList.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/ObjectList.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_OBJECTLIST_H\n#define OBJECT_LIST_MESSAGE_OBJECTLIST_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n#include <object_list/Geometric.h>\n#include <object_list/Dimension.h>\n#include <object_list/Classification.h>\n#include <object_list/Features.h>\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct ObjectList_\n{\n typedef ObjectList_<ContainerAllocator> Type;\n\n ObjectList_()\n : obj_id(0)\n , time(0.0)\n , geometric()\n , covariance()\n , dimension()\n , prop_existence(0.0)\n , prop_nonexistence(0.0)\n , prop_persistance(0.0)\n , prop_mov(0.0)\n , classification()\n , classification_mass()\n , features()\n , sensors_fused() {\n covariance.assign(0.0);\n\n classification_mass.assign(0.0);\n }\n ObjectList_(const ContainerAllocator& _alloc)\n : obj_id(0)\n , time(0.0)\n , geometric(_alloc)\n , covariance()\n , dimension(_alloc)\n , prop_existence(0.0)\n , prop_nonexistence(0.0)\n , prop_persistance(0.0)\n , prop_mov(0.0)\n , classification(_alloc)\n , classification_mass()\n , features(_alloc)\n , sensors_fused(_alloc) {\n (void)_alloc;\n covariance.assign(0.0);\n\n classification_mass.assign(0.0);\n }\n\n\n\n typedef int32_t _obj_id_type;\n _obj_id_type obj_id;\n\n typedef double _time_type;\n _time_type time;\n\n typedef ::object_list::Geometric_<ContainerAllocator> _geometric_type;\n _geometric_type geometric;\n\n typedef boost::array<double, 36> _covariance_type;\n _covariance_type covariance;\n\n typedef ::object_list::Dimension_<ContainerAllocator> _dimension_type;\n _dimension_type dimension;\n\n typedef double _prop_existence_type;\n _prop_existence_type prop_existence;\n\n typedef double _prop_nonexistence_type;\n _prop_nonexistence_type prop_nonexistence;\n\n typedef double _prop_persistance_type;\n _prop_persistance_type prop_persistance;\n\n typedef double _prop_mov_type;\n _prop_mov_type prop_mov;\n\n typedef ::object_list::Classification_<ContainerAllocator> _classification_type;\n _classification_type classification;\n\n typedef boost::array<double, 12> _classification_mass_type;\n _classification_mass_type classification_mass;\n\n typedef ::object_list::Features_<ContainerAllocator> _features_type;\n _features_type features;\n\n typedef std::vector<int64_t, typename ContainerAllocator::template rebind<int64_t>::other > _sensors_fused_type;\n _sensors_fused_type sensors_fused;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::ObjectList_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::ObjectList_<ContainerAllocator> const> ConstPtr;\n\n}; // struct ObjectList_\n\ntypedef ::object_list::ObjectList_<std::allocator<void> > ObjectList;\n\ntypedef boost::shared_ptr< ::object_list::ObjectList > ObjectListPtr;\ntypedef boost::shared_ptr< ::object_list::ObjectList const> ObjectListConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::ObjectList_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::ObjectList_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::ObjectList_<ContainerAllocator1> & lhs, const ::object_list::ObjectList_<ContainerAllocator2> & rhs)\n{\n return lhs.obj_id == rhs.obj_id &&\n lhs.time == rhs.time &&\n lhs.geometric == rhs.geometric &&\n lhs.covariance == rhs.covariance &&\n lhs.dimension == rhs.dimension &&\n lhs.prop_existence == rhs.prop_existence &&\n lhs.prop_nonexistence == rhs.prop_nonexistence &&\n lhs.prop_persistance == rhs.prop_persistance &&\n lhs.prop_mov == rhs.prop_mov &&\n lhs.classification == rhs.classification &&\n lhs.classification_mass == rhs.classification_mass &&\n lhs.features == rhs.features &&\n lhs.sensors_fused == rhs.sensors_fused;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::ObjectList_<ContainerAllocator1> & lhs, const ::object_list::ObjectList_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::ObjectList_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::ObjectList_<ContainerAllocator> const>\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::ObjectList_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::ObjectList_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::ObjectList_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::ObjectList_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::ObjectList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"d5793b04b71b063f6fee4d02602a19de\";\n }\n\n static const char* value(const ::object_list::ObjectList_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xd5793b04b71b063fULL;\n static const uint64_t static_value2 = 0x6fee4d02602a19deULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::ObjectList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/ObjectList\";\n }\n\n static const char* value(const ::object_list::ObjectList_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::ObjectList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"int32 obj_id\\n\"\n\"float64 time\\n\"\n\"Geometric geometric\\n\"\n\"float64[36] covariance\\n\"\n\"Dimension dimension\\n\"\n\"float64 prop_existence\\n\"\n\"float64 prop_nonexistence\\n\"\n\"float64 prop_persistance\\n\"\n\"float64 prop_mov \\n\"\n\"Classification classification\\n\"\n\"float64[12] classification_mass\\n\"\n\"Features features\\n\"\n\"int64[] sensors_fused\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Geometric\\n\"\n\"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 vx\\n\"\n\"float64 vy\\n\"\n\"float64 ax\\n\"\n\"float64 ay\\n\"\n\"float64 yaw\\n\"\n\"float64 yawrate\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Dimension\\n\"\n\"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 length_variance\\n\"\n\"float64 width_variance\\n\"\n\"\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Classification\\n\"\n\"float32 car\\n\"\n\"float32 truck\\n\"\n\"float32 motorcycle\\n\"\n\"float32 bicycle\\n\"\n\"float32 pedestrian\\n\"\n\"float32 stacionary\\n\"\n\"float32 other\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Features\\n\"\n\"uint8 FL\\n\"\n\"uint8 FM\\n\"\n\"uint8 FR\\n\"\n\"uint8 MR\\n\"\n\"uint8 RR\\n\"\n\"uint8 RM\\n\"\n\"uint8 RL\\n\"\n\"uint8 ML\\n\"\n;\n }\n\n static const char* value(const ::object_list::ObjectList_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::ObjectList_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.obj_id);\n stream.next(m.time);\n stream.next(m.geometric);\n stream.next(m.covariance);\n stream.next(m.dimension);\n stream.next(m.prop_existence);\n stream.next(m.prop_nonexistence);\n stream.next(m.prop_persistance);\n stream.next(m.prop_mov);\n stream.next(m.classification);\n stream.next(m.classification_mass);\n stream.next(m.features);\n stream.next(m.sensors_fused);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct ObjectList_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::ObjectList_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::ObjectList_<ContainerAllocator>& v)\n {\n s << indent << \"obj_id: \";\n Printer<int32_t>::stream(s, indent + \" \", v.obj_id);\n s << indent << \"time: \";\n Printer<double>::stream(s, indent + \" \", v.time);\n s << indent << \"geometric: \";\n s << std::endl;\n Printer< ::object_list::Geometric_<ContainerAllocator> >::stream(s, indent + \" \", v.geometric);\n s << indent << \"covariance[]\" << std::endl;\n for (size_t i = 0; i < v.covariance.size(); ++i)\n {\n s << indent << \" covariance[\" << i << \"]: \";\n Printer<double>::stream(s, indent + \" \", v.covariance[i]);\n }\n s << indent << \"dimension: \";\n s << std::endl;\n Printer< ::object_list::Dimension_<ContainerAllocator> >::stream(s, indent + \" \", v.dimension);\n s << indent << \"prop_existence: \";\n Printer<double>::stream(s, indent + \" \", v.prop_existence);\n s << indent << \"prop_nonexistence: \";\n Printer<double>::stream(s, indent + \" \", v.prop_nonexistence);\n s << indent << \"prop_persistance: \";\n Printer<double>::stream(s, indent + \" \", v.prop_persistance);\n s << indent << \"prop_mov: \";\n Printer<double>::stream(s, indent + \" \", v.prop_mov);\n s << indent << \"classification: \";\n s << std::endl;\n Printer< ::object_list::Classification_<ContainerAllocator> >::stream(s, indent + \" \", v.classification);\n s << indent << \"classification_mass[]\" << std::endl;\n for (size_t i = 0; i < v.classification_mass.size(); ++i)\n {\n s << indent << \" classification_mass[\" << i << \"]: \";\n Printer<double>::stream(s, indent + \" \", v.classification_mass[i]);\n }\n s << indent << \"features: \";\n s << std::endl;\n Printer< ::object_list::Features_<ContainerAllocator> >::stream(s, indent + \" \", v.features);\n s << indent << \"sensors_fused[]\" << std::endl;\n for (size_t i = 0; i < v.sensors_fused.size(); ++i)\n {\n s << indent << \" sensors_fused[\" << i << \"]: \";\n Printer<int64_t>::stream(s, indent + \" \", v.sensors_fused[i]);\n }\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_OBJECTLIST_H\n"
},
{
"alpha_fraction": 0.5392042398452759,
"alphanum_fraction": 0.5506144165992737,
"avg_line_length": 22.768115997314453,
"blob_id": "8ff0420610de6901749551d263d9b888f91e59f1",
"content_id": "40cfbd87947066c38043e96c4133a85350702631",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3419,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 138,
"path": "/src/osi3_bridge/src/udp.c",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#include \"udp.h\"\r\n\r\nbool udp_init(SOCKET * s)\r\n{\r\n#ifdef _WIN32\r\n WSADATA wsa;\r\n int rc = WSAStartup(MAKEWORD(2,0),&wsa);\r\n if(rc != 0)\r\n {\r\n fprintf(stderr, \"Fehler: startWinsock, fehler code: %d\\n\", rc);\r\n return false;\r\n }\r\n#endif\r\n *s = socket(AF_INET, SOCK_DGRAM, 0);\r\n#ifdef __linux__\r\n if(*s < 0)\r\n#elif _WIN32\r\n if(*s == INVALID_SOCKET)\r\n#endif\r\n {\r\n int err = ERROR_CODE;\r\n fprintf(stderr, \"Fehler: Der Socket konnte nicht erstellt werden, fehler code: %d\\n\", err);\r\n return false;\r\n }\r\n\r\n return true;\r\n}\r\n\r\nbool udp_bind(SOCKET * s, uint16_t port)\r\n{\r\n struct sockaddr_in addr;\r\n int rc;\r\n\taddr.sin_family = AF_INET;\r\n\taddr.sin_addr.s_addr = htonl(INADDR_ANY);\r\n addr.sin_port = htons(port);\r\n rc = bind(*s, (struct sockaddr *)&addr, sizeof(addr));\r\n#ifdef __linux__\r\n if(rc < 0)\r\n#elif _WIN32\r\n if(rc == SOCKET_ERROR)\r\n#endif\r\n {\r\n int err = ERROR_CODE;\r\n fprintf(stderr, \"Fehler: Binden an Port war nicht möglich, fehler code: %d\\n\", err);\r\n return false;\r\n }\r\n return true;\r\n}\r\n\r\nbool udp_send(SOCKET * s, const char * addr, uint16_t port, const uint8_t * data, size_t data_size)\r\n{\r\n SOCKADDR_IN addr_in;\r\n addr_in.sin_family = AF_INET;\r\n addr_in.sin_port = htons(port);\r\n addr_in.sin_addr.s_addr = inet_addr(addr);\r\n int rc = sendto (*s, (const char *)data, data_size, 0, (SOCKADDR*)&addr_in, sizeof(SOCKADDR_IN));\r\n#ifdef __linux__\r\n if(rc < 0)\r\n#elif _WIN32\r\n if(rc == SOCKET_ERROR)\r\n#endif\r\n {\r\n fprintf(stderr, \"Fehler: sendto, fehler code: %d\\n\", ERROR_CODE);\r\n return false;\r\n }\r\n return true;\r\n}\r\n\r\nbool udp_recv(SOCKET * s, uint8_t * data, size_t * data_size,\r\n char * remote_addr, uint16_t * remote_port)\r\n{\r\n SOCKADDR_IN remoteAddr;\r\n socklen_t remoteAddrLen = sizeof(SOCKADDR_IN);\r\n int rc = recvfrom(*s, (char *)data, *data_size, 0, (SOCKADDR*)&remoteAddr, &remoteAddrLen);\r\n\r\n#ifdef __linux__\r\n if(rc < 0)\r\n#elif _WIN32\r\n if(rc == SOCKET_ERROR)\r\n#endif\r\n { \r\n fprintf(stderr, \"Fehler: recvfrom, fehler code: %d\\n\", ERROR_CODE);\r\n return false;\r\n }\r\n \r\n if(remote_addr != NULL)\r\n {\r\n strcpy(remote_addr, inet_ntoa(remoteAddr.sin_addr));\r\n *remote_port = ntohs(remoteAddr.sin_port);\r\n }\r\n *data_size = rc;\r\n return true;\r\n}\r\n\r\nbool udp_recv_timeout(SOCKET * s, uint8_t * data, size_t * data_size,\r\n char * remote_addr, uint16_t * remote_port)\r\n{\r\n\r\n#ifdef __linux__\r\n return false;\r\n#elif _WIN32\r\n struct timeval tv_timeout = { 0, 0 };\r\n fd_set fdset;\r\n \r\n FD_ZERO( &fdset );\r\n FD_SET( *s, &fdset );\r\n int rc_select = select( *s+1, &fdset, NULL, NULL, (PTIMEVAL)&tv_timeout );\r\n if ( rc_select == SOCKET_ERROR )\r\n {\r\n fprintf(stderr, \"Fehler: select, fehler code: %d\\n\", ERROR_CODE);\r\n return false;\r\n }\r\n \r\n \r\n if ( FD_ISSET( *s, &fdset ) )\r\n {\r\n return udp_recv(s, data, data_size, remote_addr, remote_port);\r\n }\r\n \r\n return false;\r\n#endif \r\n}\r\n\r\nbool udp_close(SOCKET * s)\r\n{\r\n#ifdef __linux__\r\n int rc = close(*s);\r\n if(rc < 0)\r\n#elif _WIN32\r\n int rc = closesocket(*s);\r\n if(rc == SOCKET_ERROR)\r\n#endif\r\n {\r\n fprintf(stderr, \"Fehler: socketclose, fehler code: %d\\n\", ERROR_CODE);\r\n return false;\r\n }\r\n return true;\r\n}\r\n"
},
{
"alpha_fraction": 0.4993394911289215,
"alphanum_fraction": 0.5066490769386292,
"avg_line_length": 48.79824447631836,
"blob_id": "028d4b877386aa1581657a3958f4ff2256629429",
"content_id": "571cfd167b3d451f2608e81dfe04d06870f87eaa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11355,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 228,
"path": "/src/osi3_bridge/src/osi3_publisher.cpp",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#include <thread>\n#include <mutex>\n\n#include <ros/ros.h>\n#include <osi3_bridge/GroundTruthMovingObjects.h>\n#include <osi_version.pb.h>\n#include <osi_groundtruth.pb.h>\n#include <osi_protocol_header.h>\nextern \"C\" {\n #include <udp.h>\n}\n\n#define MAX_BUFFER_SIZE 65535 // UDP Buffer\n#define DEFAULT_UDP_PORT 22222\n#define DEFAULT_LOOP_RATE 22222\n\nvoid collector(osi3_bridge::GroundTruthMovingObjects &msg, std::mutex &msg_mutex, int port)\n{\n SOCKET udp_sock;\n static osi3::InterfaceVersion currentInterfaceVersion = osi3::InterfaceVersion::descriptor()->file()->options().GetExtension(osi3::current_interface_version);\n void *network_buffer = new uint8_t[MAX_BUFFER_SIZE];\n \n if(!udp_init(&udp_sock))\n {\n ROS_FATAL(\"UDP Init\");\n google::protobuf::ShutdownProtobufLibrary();\n delete [] (uint8_t *)network_buffer;\n return;\n }\n if(!udp_bind(&udp_sock, port))\n {\n ROS_FATAL(\"UDP Bind\");\n udp_close(&udp_sock);\n google::protobuf::ShutdownProtobufLibrary();\n delete [] (uint8_t *)network_buffer;\n return;\n }\n \n while(ros::ok())\n {\n size_t cur_size = MAX_BUFFER_SIZE;\n if(!udp_recv(&udp_sock, (uint8_t *)network_buffer, &cur_size, NULL, NULL))\n {\n ROS_FATAL(\"UDP Recv\");\n break;\n }\n \n if(osiph_check_magic_id(network_buffer) && osiph_check_protocol_version(network_buffer) &&\n osiph_get_osi_version_major(network_buffer) == currentInterfaceVersion.version_major() &&\n osiph_get_osi_version_minor(network_buffer) == currentInterfaceVersion.version_minor() &&\n osiph_get_osi_version_patch(network_buffer) == currentInterfaceVersion.version_patch() &&\n osiph_get_payload_size(network_buffer) == cur_size - osiph_get_header_size())\n {\n if(osiph_get_payload_type(network_buffer) == osi_GroundTruth)\n {\n osi3::GroundTruth osi_in;\n osi3_bridge::GroundTruthMovingObjects ros_in;\n std::string payload((char *)osiph_get_payload(network_buffer),\n osiph_get_payload_size(network_buffer));\n osi_in.ParseFromString(payload);\n \n if(osi_in.has_timestamp() && \n osi_in.timestamp().has_seconds() &&\n osi_in.timestamp().has_nanos())\n {\n ros_in.header.stamp = ros::Time(osi_in.timestamp().seconds(),\n osi_in.timestamp().nanos());\n }\n else\n {\n ros_in.header.stamp = ros::Time();\n }\n \n if(osi_in.moving_object_size() > 0)\n {\n ros_in.objects.resize(osi_in.moving_object_size());\n }\n \n for(int obj_cnt = 0; obj_cnt < osi_in.moving_object_size(); ++obj_cnt)\n {\n ros_in.objects[obj_cnt].id = osi_in.moving_object(obj_cnt).id().value();\n \n ros_in.objects[obj_cnt].dimension.length = osi_in.moving_object(obj_cnt).base().dimension().length();\n ros_in.objects[obj_cnt].dimension.width = osi_in.moving_object(obj_cnt).base().dimension().width();\n ros_in.objects[obj_cnt].dimension.height = osi_in.moving_object(obj_cnt).base().dimension().height();\n \n ros_in.objects[obj_cnt].position.x = osi_in.moving_object(obj_cnt).base().position().x();\n ros_in.objects[obj_cnt].position.y = osi_in.moving_object(obj_cnt).base().position().y();\n ros_in.objects[obj_cnt].position.z = osi_in.moving_object(obj_cnt).base().position().z();\n \n ros_in.objects[obj_cnt].orientation.roll = osi_in.moving_object(obj_cnt).base().orientation().roll();\n ros_in.objects[obj_cnt].orientation.roll = osi_in.moving_object(obj_cnt).base().orientation().pitch();\n ros_in.objects[obj_cnt].orientation.roll = osi_in.moving_object(obj_cnt).base().orientation().yaw();\n \n ros_in.objects[obj_cnt].velocity.x = osi_in.moving_object(obj_cnt).base().velocity().x();\n ros_in.objects[obj_cnt].velocity.y = osi_in.moving_object(obj_cnt).base().velocity().y();\n ros_in.objects[obj_cnt].velocity.z = osi_in.moving_object(obj_cnt).base().velocity().z();\n \n ros_in.objects[obj_cnt].acceleration.x = osi_in.moving_object(obj_cnt).base().acceleration().x();\n ros_in.objects[obj_cnt].acceleration.y = osi_in.moving_object(obj_cnt).base().acceleration().y();\n ros_in.objects[obj_cnt].acceleration.z = osi_in.moving_object(obj_cnt).base().acceleration().z();\n \n switch(osi_in.moving_object(obj_cnt).type())\n {\n case osi3::MovingObject_Type_TYPE_UNKNOWN:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_UNKNOWN;\n break;\n case osi3::MovingObject_Type_TYPE_OTHER:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_OTHER;\n break;\n case osi3::MovingObject_Type_TYPE_PEDESTRIAN:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_PEDESTRIAN;\n break;\n case osi3::MovingObject_Type_TYPE_ANIMAL:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_ANIMAL;\n break;\n case osi3::MovingObject_Type_TYPE_VEHICLE:\n switch(osi_in.moving_object(obj_cnt).vehicle_classification().type())\n {\n case osi3::MovingObject_VehicleClassification_Type_TYPE_UNKNOWN:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_UNKNOWN;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_OTHER:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_OTHER;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_SMALL_CAR:\n case osi3::MovingObject_VehicleClassification_Type_TYPE_COMPACT_CAR:\n case osi3::MovingObject_VehicleClassification_Type_TYPE_MEDIUM_CAR:\n case osi3::MovingObject_VehicleClassification_Type_TYPE_LUXURY_CAR:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_CAR;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_DELIVERY_VAN:\n case osi3::MovingObject_VehicleClassification_Type_TYPE_HEAVY_TRUCK:\n case osi3::MovingObject_VehicleClassification_Type_TYPE_SEMITRAILER:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_TRUCK;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_TRAILER:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_TRAILER;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_MOTORBIKE:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_MOTORBIKE;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_BICYCLE:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_BICYCLE;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_BUS:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_BUS;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_TRAM:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_TRAM;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_TRAIN:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_TRAIN;\n break;\n case osi3::MovingObject_VehicleClassification_Type_TYPE_WHEELCHAIR:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_WHEELCHAIR;\n break;\n default:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_UNKNOWN;\n break;\n }\n break;\n default:\n ros_in.objects[obj_cnt].type = osi3_bridge::MovingObject::TYPE_UNKNOWN;\n break;\n }\n msg_mutex.lock();\n msg = ros_in;\n msg_mutex.unlock();\n ROS_INFO(\"Neue UDP Nachricht\");\n }\n }\n else\n {\n // TODO: Was macht man mit anderen Payload Types\n ROS_WARN(\"Falscher Type\");\n }\n }\n else\n {\n // TODO: Was machet man mit anderen Paketen?\n ROS_WARN(\"Falsches Paket\");\n }\n }\n \n udp_close(&udp_sock);\n google::protobuf::ShutdownProtobufLibrary();\n delete [] (uint8_t *)network_buffer;\n}\n\nint main(int argc, char **argv)\n{\n GOOGLE_PROTOBUF_VERIFY_VERSION;\n uint32_t i = 0;\n int param_port;\n int param_loop_rate;\n \n ros::init(argc, argv, \"osi3_bridge_publisher\");\n ros::NodeHandle n;\n ros::Publisher osi3_pub = n.advertise<osi3_bridge::GroundTruthMovingObjects>(\"from_osi3\", 10);\n n.param(\"loop_rate\", param_loop_rate, DEFAULT_LOOP_RATE);\n n.param(\"listen_port\", param_port, DEFAULT_UDP_PORT);\n ros::Rate loop_rate(param_loop_rate);\n \n osi3_bridge::GroundTruthMovingObjects msg;\n std::mutex msg_mutex;\n std::thread t1(collector, std::ref(msg), std::ref(msg_mutex), param_port);\n \n \n while(ros::ok())\n {\n if(msg.objects.size() > 0)\n {\n msg_mutex.lock();\n msg.header.seq = i++;\n osi3_pub.publish(msg);\n msg_mutex.unlock();\n \n ROS_INFO(\"ROS Publish: %lf\", msg.objects[0].position.x);\n }\n \n loop_rate.sleep();\n }\n \n t1.join();\n \n return 0;\n}\n\n"
},
{
"alpha_fraction": 0.6785249710083008,
"alphanum_fraction": 0.6938177943229675,
"avg_line_length": 27.369230270385742,
"blob_id": "267c85462c8a68e0bd53bb1e98e08fbb83644236",
"content_id": "1fd885ca276c6664aeec5abb03a070d96f3b2e45",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9220,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 325,
"path": "/devel/include/object_list/SensorProperty.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/SensorProperty.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_SENSORPROPERTY_H\n#define OBJECT_LIST_MESSAGE_SENSORPROPERTY_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct SensorProperty_\n{\n typedef SensorProperty_<ContainerAllocator> Type;\n\n SensorProperty_()\n : sensor_id(0)\n , sensortype(0.0)\n , posx_variance(0.0)\n , posy_variance(0.0)\n , velx_variance(0.0)\n , vely_variance(0.0)\n , trust_existance(0.0)\n , trust_car(0.0)\n , trust_truck(0.0)\n , trust_motorcycle(0.0)\n , trust_bicycle(0.0)\n , trust_pedestrian(0.0)\n , trust_stationary(0.0)\n , trust_other(0.0) {\n }\n SensorProperty_(const ContainerAllocator& _alloc)\n : sensor_id(0)\n , sensortype(0.0)\n , posx_variance(0.0)\n , posy_variance(0.0)\n , velx_variance(0.0)\n , vely_variance(0.0)\n , trust_existance(0.0)\n , trust_car(0.0)\n , trust_truck(0.0)\n , trust_motorcycle(0.0)\n , trust_bicycle(0.0)\n , trust_pedestrian(0.0)\n , trust_stationary(0.0)\n , trust_other(0.0) {\n (void)_alloc;\n }\n\n\n\n typedef int32_t _sensor_id_type;\n _sensor_id_type sensor_id;\n\n typedef double _sensortype_type;\n _sensortype_type sensortype;\n\n typedef double _posx_variance_type;\n _posx_variance_type posx_variance;\n\n typedef double _posy_variance_type;\n _posy_variance_type posy_variance;\n\n typedef double _velx_variance_type;\n _velx_variance_type velx_variance;\n\n typedef double _vely_variance_type;\n _vely_variance_type vely_variance;\n\n typedef double _trust_existance_type;\n _trust_existance_type trust_existance;\n\n typedef double _trust_car_type;\n _trust_car_type trust_car;\n\n typedef double _trust_truck_type;\n _trust_truck_type trust_truck;\n\n typedef double _trust_motorcycle_type;\n _trust_motorcycle_type trust_motorcycle;\n\n typedef double _trust_bicycle_type;\n _trust_bicycle_type trust_bicycle;\n\n typedef double _trust_pedestrian_type;\n _trust_pedestrian_type trust_pedestrian;\n\n typedef double _trust_stationary_type;\n _trust_stationary_type trust_stationary;\n\n typedef double _trust_other_type;\n _trust_other_type trust_other;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::SensorProperty_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::SensorProperty_<ContainerAllocator> const> ConstPtr;\n\n}; // struct SensorProperty_\n\ntypedef ::object_list::SensorProperty_<std::allocator<void> > SensorProperty;\n\ntypedef boost::shared_ptr< ::object_list::SensorProperty > SensorPropertyPtr;\ntypedef boost::shared_ptr< ::object_list::SensorProperty const> SensorPropertyConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::SensorProperty_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::SensorProperty_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::SensorProperty_<ContainerAllocator1> & lhs, const ::object_list::SensorProperty_<ContainerAllocator2> & rhs)\n{\n return lhs.sensor_id == rhs.sensor_id &&\n lhs.sensortype == rhs.sensortype &&\n lhs.posx_variance == rhs.posx_variance &&\n lhs.posy_variance == rhs.posy_variance &&\n lhs.velx_variance == rhs.velx_variance &&\n lhs.vely_variance == rhs.vely_variance &&\n lhs.trust_existance == rhs.trust_existance &&\n lhs.trust_car == rhs.trust_car &&\n lhs.trust_truck == rhs.trust_truck &&\n lhs.trust_motorcycle == rhs.trust_motorcycle &&\n lhs.trust_bicycle == rhs.trust_bicycle &&\n lhs.trust_pedestrian == rhs.trust_pedestrian &&\n lhs.trust_stationary == rhs.trust_stationary &&\n lhs.trust_other == rhs.trust_other;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::SensorProperty_<ContainerAllocator1> & lhs, const ::object_list::SensorProperty_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::SensorProperty_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::SensorProperty_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::SensorProperty_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::SensorProperty_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::SensorProperty_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::SensorProperty_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::SensorProperty_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"b92131fc47bc49b0227fc3ddb6760ee8\";\n }\n\n static const char* value(const ::object_list::SensorProperty_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xb92131fc47bc49b0ULL;\n static const uint64_t static_value2 = 0x227fc3ddb6760ee8ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::SensorProperty_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/SensorProperty\";\n }\n\n static const char* value(const ::object_list::SensorProperty_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::SensorProperty_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"int32 sensor_id\\n\"\n\"float64 sensortype\\n\"\n\"float64 posx_variance\\n\"\n\"float64 posy_variance\\n\"\n\"float64 velx_variance\\n\"\n\"float64 vely_variance\\n\"\n\"float64 trust_existance\\n\"\n\"float64 trust_car\\n\"\n\"float64 trust_truck\\n\"\n\"float64 trust_motorcycle\\n\"\n\"float64 trust_bicycle\\n\"\n\"float64 trust_pedestrian\\n\"\n\"float64 trust_stationary\\n\"\n\"float64 trust_other \\n\"\n;\n }\n\n static const char* value(const ::object_list::SensorProperty_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::SensorProperty_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.sensor_id);\n stream.next(m.sensortype);\n stream.next(m.posx_variance);\n stream.next(m.posy_variance);\n stream.next(m.velx_variance);\n stream.next(m.vely_variance);\n stream.next(m.trust_existance);\n stream.next(m.trust_car);\n stream.next(m.trust_truck);\n stream.next(m.trust_motorcycle);\n stream.next(m.trust_bicycle);\n stream.next(m.trust_pedestrian);\n stream.next(m.trust_stationary);\n stream.next(m.trust_other);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct SensorProperty_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::SensorProperty_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::SensorProperty_<ContainerAllocator>& v)\n {\n s << indent << \"sensor_id: \";\n Printer<int32_t>::stream(s, indent + \" \", v.sensor_id);\n s << indent << \"sensortype: \";\n Printer<double>::stream(s, indent + \" \", v.sensortype);\n s << indent << \"posx_variance: \";\n Printer<double>::stream(s, indent + \" \", v.posx_variance);\n s << indent << \"posy_variance: \";\n Printer<double>::stream(s, indent + \" \", v.posy_variance);\n s << indent << \"velx_variance: \";\n Printer<double>::stream(s, indent + \" \", v.velx_variance);\n s << indent << \"vely_variance: \";\n Printer<double>::stream(s, indent + \" \", v.vely_variance);\n s << indent << \"trust_existance: \";\n Printer<double>::stream(s, indent + \" \", v.trust_existance);\n s << indent << \"trust_car: \";\n Printer<double>::stream(s, indent + \" \", v.trust_car);\n s << indent << \"trust_truck: \";\n Printer<double>::stream(s, indent + \" \", v.trust_truck);\n s << indent << \"trust_motorcycle: \";\n Printer<double>::stream(s, indent + \" \", v.trust_motorcycle);\n s << indent << \"trust_bicycle: \";\n Printer<double>::stream(s, indent + \" \", v.trust_bicycle);\n s << indent << \"trust_pedestrian: \";\n Printer<double>::stream(s, indent + \" \", v.trust_pedestrian);\n s << indent << \"trust_stationary: \";\n Printer<double>::stream(s, indent + \" \", v.trust_stationary);\n s << indent << \"trust_other: \";\n Printer<double>::stream(s, indent + \" \", v.trust_other);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_SENSORPROPERTY_H\n"
},
{
"alpha_fraction": 0.6834915280342102,
"alphanum_fraction": 0.693845272064209,
"avg_line_length": 36.16042709350586,
"blob_id": "30d4b3fe2069ab310fdf99a397549c6efaffb6a8",
"content_id": "aba0d6757bbf48860464f06e5c1cf782bc3aa741",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6954,
"license_type": "no_license",
"max_line_length": 153,
"num_lines": 187,
"path": "/src/fusion/src/fusion.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport numpy as np\nimport rospy\nimport math\nimport message_filters\nimport tf\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\n\n# import function\n#import ClassAssociate\n#from ObjectAssociation import feature_select\n#import Objects\nfrom scipy.spatial import distance\nfrom Association import *\nfrom fusion_function import *\nfrom ClassExistance_Objects import * \nfrom ClassFusion import *\nmain_data = fusion()\n\n\nglobaltrack = ObjectsList()\ncount=0\negoveh = Ego()\nprev_time = 0 # time stamp of prev message\ncurrent_time = 0 # time stamp of current message\n\n\n\ndef sensor_fusion():\n\n rospy.init_node('sensor_fusion', anonymous=False) # Start node\n\n #rate = rospy.Rate(rospy.get_param(\"freq\")) # Define the node frequency 100hz\n #subscribe to sensor data (add additional sensors here\n sensor1 = message_filters.Subscriber(\"/sensor0/obj_list_egoframe\", ObjectsList)\n sensor2 = message_filters.Subscriber(\"/sensor1/obj_list_egoframe\", ObjectsList)\n\n\n\n ego_data = message_filters.Subscriber('/ego_data', TrafficUpdateMovingObject)\n\n #add all sensors to time synchronizer\n #ts = message_filters.ApproximateTimeSynchronizer([sensor1,sensor2,ego_data], 10,0.1)\n ts = message_filters.TimeSynchronizer([sensor1, sensor2, ego_data], 30)\n #ts = message_filters.TimeSynchronizer([sensor1, ego_data], 10)\n ts.registerCallback(callback)\n\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\n#def callback(sensor1,sensor2,ego_data):\ndef callback(sensor1,sensor2,ego_data):\n global count\n global main_data\n global prev_time\n global current_time\n tic = rospy.Time.now()\n #print('time',rospy.Time.now())\n main_data.sensorslist = []\n print('callback started')\n #print('sensor12',sensor1.header.stamp.nsecs)\n count= count +1\n\n main_data.sensorslist.append(sensor1)\n main_data.sensorslist.append(sensor2)\n\n main_data.egoveh.vel.x = ego_data.object.velocity.x\n main_data.egoveh.vel.y = ego_data.object.velocity.y\n main_data.egoveh.acc.x = ego_data.object.acceleration.x\n main_data.egoveh.acc.y = ego_data.object.acceleration.y\n\n if count == 0:\n main_data.egoveh.neworientation = ego_data.object.orientation.yaw\n count += 1\n main_data.egoveh.testyaw = main_data.egoveh.neworientation\n main_data.egoveh.newyaw = 0\n prev_time = ego_data.header.stamp.to_sec()\n current_time = ego_data.header.stamp.to_sec()\n #ego_veh.\n else:\n main_data.egoveh.oldorientation = main_data.egoveh.neworientation\n main_data.egoveh.neworientation = ego_data.object.orientation.yaw\n main_data.egoveh.newyaw = main_data.egoveh.oldorientation - main_data.egoveh.neworientation\n prev_time = current_time\n current_time = ego_data.header.stamp.to_sec()\n\n main_data.egoveh.t = current_time-prev_time\n main_data.egoveh.yawrate = main_data.egoveh.newyaw/ main_data.egoveh.t\n\n\n #print('time interval',(nt-ot))\n #print('process time',((rospy.Time.now()).to_sec()-nt))\n\n if len(main_data.globaltrack.obj_list) == 0:\n #print('RAN')\n main_data.globaltrack = main_data.sensorslist[0]\n a = 0\n for i, obj in enumerate(main_data.globaltrack.obj_list):\n obj.obj_id = int(a)\n obj.sensors_fused = [main_data.sensorslist[0].sensor_property.sensor_id]\n a += 1\n Sensor_obj = SensorObject(obj,main_data.sensorslist[0].sensor_property)\n Sensor_obj.set_existance_probability_mass_factors()\n Sensor_obj.set_classification_mass_factors()\n obj.classification_mass = Sensor_obj.list_classification_mass_factor\n\n #print('fusion runs')\n main_data.fuse()\n\n #print(main_data.AssignmentList)\n #print(len(main_data.globaltrack.obj_list))\n #main_data.evaluate_time\n main_data.sensorlist_previous = main_data.sensorslist\n #print(len(main_data.globaltrack.obj_list))\n #main_data.fuse()\n pub = rospy.Publisher('fused_data', ObjectsList, queue_size=10,latch=True)\n toc = rospy.Time.now()\n time = toc.to_sec() - tic.to_sec()\n print('fusion time', time)\n print(\"test\")\n pub.publish(main_data.globaltrack)\n\n\n\n\n\n'''\ndef sensor_fusion():\n\n rospy.init_node('sensor_fusion', anonymous=False) # Start node\n #rate = rospy.Rate(rospy.get_param(\"freq\")) # Define the node frequency 100hz\n #subscribe to sensor data (add additional sensors here\n ego_data = rospy.Subscriber('/ego_data', TrafficUpdateMovingObject, ego_callback)\n sensor1 = rospy.Subscriber(\"/sensor0/obj_list_egoframe\", ObjectsList,sensor_callback)\n sensor2 = rospy.Subscriber(\"/sensor1/obj_list_egoframe\", ObjectsList,sensor_callback)\n\n\n\n #ego_data = rospy.Subscriber('/ego_data', TrafficUpdateMovingObject,ego_callback)\n\n #add all sensors to time synchronizer\n #ts = message_filters.ApproximateTimeSynchronizer([sensor1,sensor2,ego_data], 10,0.1)\n #ts = message_filters.TimeSynchronizer([sensor1, sensor2, ego_data], 10)\n #ts.registerCallback(callback)\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\ndef ego_callback(ego_data):\n global count\n global main_data\n main_data.egoveh.vel.x = ego_data.object.velocity.x\n main_data.egoveh.vel.y = ego_data.object.velocity.y\n main_data.egoveh.acc.x = ego_data.object.acceleration.x\n main_data.egoveh.acc.y = ego_data.object.acceleration.y # (obj.geometric.y + obj.geometric.vy * t * obj.geometric.ay *t*t/2) *np.sin(egoveh.newyaw)\n\n if count == 0:\n main_data.egoveh.neworientation = ego_data.object.orientation.yaw\n count += 1\n main_data.egoveh.testyaw = main_data.egoveh.neworientation\n main_data.egoveh.newyaw = 0\n else:\n main_data.egoveh.oldorientation = main_data.egoveh.neworientation\n main_data.egoveh.neworientation = ego_data.object.orientation.yaw\n main_data.egoveh.newyaw = 0 # main_data.egoveh.oldorientation - main_data.egoveh.neworientation\n\ndef sensor_callback(sensor):\n global main_data\n main_data.sensorslist = []\n main_data.sensorslist.append(sensor)\n main_data.associate()\n\n # print(main_data.AssignmentList)\n # print(len(main_data.globaltrack.obj_list))\n # main_data.evaluate_time\n main_data.sensorlist_previous = main_data.sensorslist\n # print(len(main_data.globaltrack.obj_list))\n # main_data.fuse()\n pub = rospy.Publisher('fused_data', ObjectsList, queue_size=10, latch=True)\n pub.publish(main_data.globaltrack)\n #ego = tf.TransformBroadcaster()\n #ego.sendTransform((ego_data.object.position.x, ego_data.object.position.y, 0),\n # tf.transformations.quaternion_from_euler(0, 0, ego_data.object.orientation.yaw), rospy.Time.now(),\n # \"ego\", \"map\")\n'''\n\nif __name__ == '__main__':\n sensor_fusion()\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 39.55555725097656,
"blob_id": "a403db9cd48cdd2c0c8c02b409f126b93942a481",
"content_id": "e1cfff93a5a5dd20ff366a2240ea6d0cfa413521",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 365,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 9,
"path": "/build/vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/vehicle_control_generate_messages_lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/vehicle_control/msg/Trajectory.lisp\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/vehicle_control_generate_messages_lisp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.48625850677490234,
"alphanum_fraction": 0.5287075042724609,
"avg_line_length": 30.144067764282227,
"blob_id": "def17d251b16e2252b43b4dd14542512e964f4a1",
"content_id": "eb73b4ed2c797137e2055d08cf6f3ac9e105c9dc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3675,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 118,
"path": "/src/sensor_model/scripts/KF.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport numpy as np\nimport rospy\nimport math\nfrom ClassKF import KF\n# import all necessary ROS messages\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\nKFlist=[]\ncountlist=[]\ncount = 0\ncounter =0\n\nfor i in range(1000):\n a = KF()\n KFlist.append(a)\n #countlist.append(0)\n\n\n\ndef sensor_model():\n # Node initialization\n rospy.init_node('KF', anonymous=False) # Start node\n rate = rospy.Rate(100) # Define the node frequency 1hz\n\n # Subscriber the data in callback function\n rospy.Subscriber(\"obj_list_egoframe\", ObjectsList, callback)\n #rospy.Subscriber(\"obj_list_egoframe\", ObjectsList, callback)\n\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\n\ndef callback(objs_list):\n global KF\n global count\n global counter\n global oldtime\n global newtime\n\n #print(objs_list)\n '''for i in objs_list.object_list:\n a=KF\n if counter = 0\n newtime = float(str(objs_list.header.stamp))\n oldtime = newtime\n t= 1\n count+=1\n else:\n newtime = float(str(objs_list.header.stamp))\n t= newtime-oldtime\n oldtime = newtime'''\n for i,a in enumerate(objs_list.obj_list):\n x = KFlist[a.obj_id]\n #print(x)\n if x.track == 0:\n\n x.track = 1\n x.newtime= float(str(objs_list.header.stamp))\n x.oldtime= x.newtime\n t = 1\n\n\n x.a = np.array([[1,t,t*t/2,0,0,0],[0,1,t,0,0,0],[0,0,1,0,0,0],[0,0,0,1,t,t*t/2],[0,0,0,0,1,t],[0,0,0,0,0,1]])\n #x.xnn = np.array([[a.geometric.x], [a.geometric.vx], [a.geometric.ax], [a.geometric.y],[a.geometric.vy],[a.geometric.ay]])\n x.g = np.array([[t*t*t/6,0],[t*t/2,0],[t,0],[0,t*t*t/6],[0,t*t/2],[0,t]])\n #KF[a.obj_id]=x\n else:\n x.newtime = float(str(objs_list.header.stamp))\n t = x.newtime-x.oldtime\n if t >= 1.5:\n x=KF()\n x.track = 1\n x.newtime= float(str(objs_list.header.stamp))\n\n t = 1\n x.a = np.array([[1,t,t*t/2,0,0,0],[0,1,t,0,0,0],[0,0,1,0,0,0],[0,0,0,1,t,t*t/2],[0,0,0,0,1,t],[0,0,0,0,0,1]])\n\n x.g = np.array([[t*t*t/6,0],[t*t/2,0],[t,0],[0,t*t*t/6],[0,t*t/2],[0,t]])\n #KF[a.obj_id]=x\n\n x.oldtime= x.newtime\n\n x.yn = np.array([[a.geometric.x], [a.geometric.y]]) # column vector\n\n #Kalman equations\n x.xn_nm1 =x.a.dot(x.xnn) #column vector\n #print(type(x.g))\n\n x.pn_nm1 = (x.a.dot(x.pnn)).dot(x.a.transpose()) +(x.g.dot(x.c_s)).dot(x.g.transpose())\n x.gamma_n = x.yn -x.c.dot(x.xn_nm1)\n x.s_n = (x.c.dot(x.pn_nm1)).dot(x.c.transpose()) + x.c_m\n x.k_n = (x.pn_nm1.dot(x.c.transpose())).dot(np.linalg.inv(x.s_n))\n x.xnn = x.xn_nm1 + x.k_n.dot(x.gamma_n)\n I = np.zeros((6,6),int)\n np.fill_diagonal(I,1)\n x.pnn = (I - x.k_n.dot(x.c)).dot(x.pn_nm1)\n KFlist[a.obj_id]=x\n a.geometric.x = x.xnn[0]\n a.geometric.vx = x.xnn[1]\n a.geometric.ax = x.xnn[2]\n a.geometric.y = x.xnn[3]\n a.geometric.vy = x.xnn[4]\n a.geometric.ay = x.xnn[5]\n pub = rospy.Publisher('afterKF', ObjectsList, queue_size=10)\n pub.publish(objs_list)\n print(objs_list)\n\n #x.a = [[1,t,t*t/2,0,0,0],[0,1,t,0,0,0],[0,0,1,0,0,0],[0,0,0,1,t,t*t/2],[0,0,0,0,1,t],[0,0,0,0,0,1]]\n #x.xnn\n #KF.append(x)\n\n print (objs_list)\n\n\n\nif __name__ == '__main__':\n sensor_model()\n"
},
{
"alpha_fraction": 0.7679240107536316,
"alphanum_fraction": 0.7827784419059753,
"avg_line_length": 54.822471618652344,
"blob_id": "ccf9bf90e541f9ea1dbafe61b0ecaa1dc4bac66c",
"content_id": "0fcba234444cab1b0ce66cfa91c1a6bb583d0e2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 24841,
"license_type": "no_license",
"max_line_length": 349,
"num_lines": 445,
"path": "/build/osi3_bridge/cmake/osi3_bridge-genmsg.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-genmsg.cmake.em\n\nmessage(STATUS \"osi3_bridge: 5 messages, 0 services\")\n\nset(MSG_I_FLAGS \"-Iosi3_bridge:/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg;-Igeometry_msgs:/opt/ros/melodic/share/geometry_msgs/cmake/../msg;-Istd_msgs:/opt/ros/melodic/share/std_msgs/cmake/../msg\")\n\n# Find all generators\nfind_package(gencpp REQUIRED)\nfind_package(geneus REQUIRED)\nfind_package(genlisp REQUIRED)\nfind_package(gennodejs REQUIRED)\nfind_package(genpy REQUIRED)\n\nadd_custom_target(osi3_bridge_generate_messages ALL)\n\n# verify that message/service dependencies have not changed since configure\n\n\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_custom_target(_osi3_bridge_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"osi3_bridge\" \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_custom_target(_osi3_bridge_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"osi3_bridge\" \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" \"osi3_bridge/Orientation3d:geometry_msgs/Vector3:osi3_bridge/Dimension3d\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_custom_target(_osi3_bridge_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"osi3_bridge\" \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" \"geometry_msgs/Vector3:osi3_bridge/Orientation3d:osi3_bridge/MovingObject:osi3_bridge/Dimension3d:std_msgs/Header\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_custom_target(_osi3_bridge_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"osi3_bridge\" \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" \"geometry_msgs/Vector3:osi3_bridge/Orientation3d:osi3_bridge/MovingObject:osi3_bridge/Dimension3d:std_msgs/Header\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_custom_target(_osi3_bridge_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"osi3_bridge\" \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" \"\"\n)\n\n#\n# langs = gencpp;geneus;genlisp;gennodejs;genpy\n#\n\n### Section generating for lang: gencpp\n### Generating Messages\n_generate_msg_cpp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_cpp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_cpp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_cpp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_cpp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_cpp(osi3_bridge\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n \"${ALL_GEN_OUTPUT_FILES_cpp}\"\n)\n\nadd_custom_target(osi3_bridge_generate_messages_cpp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_cpp}\n)\nadd_dependencies(osi3_bridge_generate_messages osi3_bridge_generate_messages_cpp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_cpp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_cpp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_cpp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_cpp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_cpp _osi3_bridge_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(osi3_bridge_gencpp)\nadd_dependencies(osi3_bridge_gencpp osi3_bridge_generate_messages_cpp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS osi3_bridge_generate_messages_cpp)\n\n### Section generating for lang: geneus\n### Generating Messages\n_generate_msg_eus(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_eus(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_eus(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_eus(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_eus(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_eus(osi3_bridge\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n \"${ALL_GEN_OUTPUT_FILES_eus}\"\n)\n\nadd_custom_target(osi3_bridge_generate_messages_eus\n DEPENDS ${ALL_GEN_OUTPUT_FILES_eus}\n)\nadd_dependencies(osi3_bridge_generate_messages osi3_bridge_generate_messages_eus)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_eus _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_eus _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_eus _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_eus _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_eus _osi3_bridge_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(osi3_bridge_geneus)\nadd_dependencies(osi3_bridge_geneus osi3_bridge_generate_messages_eus)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS osi3_bridge_generate_messages_eus)\n\n### Section generating for lang: genlisp\n### Generating Messages\n_generate_msg_lisp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_lisp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_lisp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_lisp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_lisp(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_lisp(osi3_bridge\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n \"${ALL_GEN_OUTPUT_FILES_lisp}\"\n)\n\nadd_custom_target(osi3_bridge_generate_messages_lisp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_lisp}\n)\nadd_dependencies(osi3_bridge_generate_messages osi3_bridge_generate_messages_lisp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_lisp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_lisp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_lisp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_lisp _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_lisp _osi3_bridge_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(osi3_bridge_genlisp)\nadd_dependencies(osi3_bridge_genlisp osi3_bridge_generate_messages_lisp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS osi3_bridge_generate_messages_lisp)\n\n### Section generating for lang: gennodejs\n### Generating Messages\n_generate_msg_nodejs(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_nodejs(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_nodejs(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_nodejs(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_nodejs(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_nodejs(osi3_bridge\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n \"${ALL_GEN_OUTPUT_FILES_nodejs}\"\n)\n\nadd_custom_target(osi3_bridge_generate_messages_nodejs\n DEPENDS ${ALL_GEN_OUTPUT_FILES_nodejs}\n)\nadd_dependencies(osi3_bridge_generate_messages osi3_bridge_generate_messages_nodejs)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_nodejs _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_nodejs _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_nodejs _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_nodejs _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_nodejs _osi3_bridge_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(osi3_bridge_gennodejs)\nadd_dependencies(osi3_bridge_gennodejs osi3_bridge_generate_messages_nodejs)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS osi3_bridge_generate_messages_nodejs)\n\n### Section generating for lang: genpy\n### Generating Messages\n_generate_msg_py(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_py(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_py(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_py(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/geometry_msgs/cmake/../msg/Vector3.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n)\n_generate_msg_py(osi3_bridge\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_py(osi3_bridge\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n \"${ALL_GEN_OUTPUT_FILES_py}\"\n)\n\nadd_custom_target(osi3_bridge_generate_messages_py\n DEPENDS ${ALL_GEN_OUTPUT_FILES_py}\n)\nadd_dependencies(osi3_bridge_generate_messages osi3_bridge_generate_messages_py)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_py _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_py _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_py _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_py _osi3_bridge_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg\" NAME_WE)\nadd_dependencies(osi3_bridge_generate_messages_py _osi3_bridge_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(osi3_bridge_genpy)\nadd_dependencies(osi3_bridge_genpy osi3_bridge_generate_messages_py)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS osi3_bridge_generate_messages_py)\n\n\n\nif(gencpp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/osi3_bridge\n DESTINATION ${gencpp_INSTALL_DIR}\n )\nendif()\nif(TARGET geometry_msgs_generate_messages_cpp)\n add_dependencies(osi3_bridge_generate_messages_cpp geometry_msgs_generate_messages_cpp)\nendif()\nif(TARGET std_msgs_generate_messages_cpp)\n add_dependencies(osi3_bridge_generate_messages_cpp std_msgs_generate_messages_cpp)\nendif()\n\nif(geneus_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/osi3_bridge\n DESTINATION ${geneus_INSTALL_DIR}\n )\nendif()\nif(TARGET geometry_msgs_generate_messages_eus)\n add_dependencies(osi3_bridge_generate_messages_eus geometry_msgs_generate_messages_eus)\nendif()\nif(TARGET std_msgs_generate_messages_eus)\n add_dependencies(osi3_bridge_generate_messages_eus std_msgs_generate_messages_eus)\nendif()\n\nif(genlisp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/osi3_bridge\n DESTINATION ${genlisp_INSTALL_DIR}\n )\nendif()\nif(TARGET geometry_msgs_generate_messages_lisp)\n add_dependencies(osi3_bridge_generate_messages_lisp geometry_msgs_generate_messages_lisp)\nendif()\nif(TARGET std_msgs_generate_messages_lisp)\n add_dependencies(osi3_bridge_generate_messages_lisp std_msgs_generate_messages_lisp)\nendif()\n\nif(gennodejs_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/osi3_bridge\n DESTINATION ${gennodejs_INSTALL_DIR}\n )\nendif()\nif(TARGET geometry_msgs_generate_messages_nodejs)\n add_dependencies(osi3_bridge_generate_messages_nodejs geometry_msgs_generate_messages_nodejs)\nendif()\nif(TARGET std_msgs_generate_messages_nodejs)\n add_dependencies(osi3_bridge_generate_messages_nodejs std_msgs_generate_messages_nodejs)\nendif()\n\nif(genpy_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge)\n install(CODE \"execute_process(COMMAND \\\"/usr/bin/python2\\\" -m compileall \\\"${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\\\")\")\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/osi3_bridge\n DESTINATION ${genpy_INSTALL_DIR}\n )\nendif()\nif(TARGET geometry_msgs_generate_messages_py)\n add_dependencies(osi3_bridge_generate_messages_py geometry_msgs_generate_messages_py)\nendif()\nif(TARGET std_msgs_generate_messages_py)\n add_dependencies(osi3_bridge_generate_messages_py std_msgs_generate_messages_py)\nendif()\n"
},
{
"alpha_fraction": 0.7670532464981079,
"alphanum_fraction": 0.7808272838592529,
"avg_line_length": 48.25581359863281,
"blob_id": "7a40de318e95e4fe781f691e03480ef85fa247fe",
"content_id": "ff0b50f4f14e3ee57fcf765622058b3a89d6e4e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 48715,
"license_type": "no_license",
"max_line_length": 269,
"num_lines": 989,
"path": "/build/osi3_bridge/Makefile",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMAKE generated file: DO NOT EDIT!\n# Generated by \"Unix Makefiles\" Generator, CMake Version 3.10\n\n# Default target executed when no arguments are given to make.\ndefault_target: all\n\n.PHONY : default_target\n\n# Allow only one \"make -f Makefile2\" at a time, but pass parallelism.\n.NOTPARALLEL:\n\n\n#=============================================================================\n# Special targets provided by cmake.\n\n# Disable implicit rules so canonical targets will work.\n.SUFFIXES:\n\n\n# Remove some rules from gmake that .SUFFIXES does not remove.\nSUFFIXES =\n\n.SUFFIXES: .hpux_make_needs_suffix_list\n\n\n# Suppress display of executed commands.\n$(VERBOSE).SILENT:\n\n\n# A target that is always out of date.\ncmake_force:\n\n.PHONY : cmake_force\n\n#=============================================================================\n# Set environment variables for the build.\n\n# The shell in which to execute make rules.\nSHELL = /bin/sh\n\n# The CMake executable.\nCMAKE_COMMAND = /usr/bin/cmake\n\n# The command to remove a file.\nRM = /usr/bin/cmake -E remove -f\n\n# Escaping for special characters.\nEQUALS = =\n\n# The top-level source directory on which CMake was run.\nCMAKE_SOURCE_DIR = /home/student/Desktop/Redge_Thesis/vil/src\n\n# The top-level build directory on which CMake was run.\nCMAKE_BINARY_DIR = /home/student/Desktop/Redge_Thesis/vil/build\n\n#=============================================================================\n# Targets provided globally by CMake.\n\n# Special rule for the target install/local\ninstall/local: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local\n\n# Special rule for the target install/local\ninstall/local/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local/fast\n\n# Special rule for the target install\ninstall: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install\n\n# Special rule for the target install\ninstall/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install/fast\n\n# Special rule for the target edit_cache\nedit_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"No interactive CMake dialog available...\"\n\t/usr/bin/cmake -E echo No\\ interactive\\ CMake\\ dialog\\ available.\n.PHONY : edit_cache\n\n# Special rule for the target edit_cache\nedit_cache/fast: edit_cache\n\n.PHONY : edit_cache/fast\n\n# Special rule for the target test\ntest:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running tests...\"\n\t/usr/bin/ctest --force-new-ctest-process $(ARGS)\n.PHONY : test\n\n# Special rule for the target test\ntest/fast: test\n\n.PHONY : test/fast\n\n# Special rule for the target list_install_components\nlist_install_components:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Available install components are: \\\"Unspecified\\\" \\\"dev\\\" \\\"lib\\\"\"\n.PHONY : list_install_components\n\n# Special rule for the target list_install_components\nlist_install_components/fast: list_install_components\n\n.PHONY : list_install_components/fast\n\n# Special rule for the target rebuild_cache\nrebuild_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running CMake to regenerate build system...\"\n\t/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)\n.PHONY : rebuild_cache\n\n# Special rule for the target rebuild_cache\nrebuild_cache/fast: rebuild_cache\n\n.PHONY : rebuild_cache/fast\n\n# Special rule for the target install/strip\ninstall/strip: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip\n\n# Special rule for the target install/strip\ninstall/strip/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip/fast\n\n# The main all target\nall: cmake_check_build_system\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles /home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/CMakeFiles/progress.marks\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/all\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles 0\n.PHONY : all\n\n# The main clean target\nclean:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/clean\n.PHONY : clean\n\n# The main clean target\nclean/fast: clean\n\n.PHONY : clean/fast\n\n# Prepare targets for installation.\npreinstall: all\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/preinstall\n.PHONY : preinstall\n\n# Prepare targets for installation.\npreinstall/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/preinstall\n.PHONY : preinstall/fast\n\n# clear depends\ndepend:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1\n.PHONY : depend\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n\n# Convenience name for target.\nrun_tests_osi3_bridge_roslaunch-check_launch: osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n\n.PHONY : run_tests_osi3_bridge_roslaunch-check_launch\n\n# fast build rule for target.\nrun_tests_osi3_bridge_roslaunch-check_launch/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/build\n.PHONY : run_tests_osi3_bridge_roslaunch-check_launch/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/rule\n\n# Convenience name for target.\nrun_tests_osi3_bridge_roslaunch-check: osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/rule\n\n.PHONY : run_tests_osi3_bridge_roslaunch-check\n\n# fast build rule for target.\nrun_tests_osi3_bridge_roslaunch-check/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/build\n.PHONY : run_tests_osi3_bridge_roslaunch-check/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/rule\n\n# Convenience name for target.\n_run_tests_osi3_bridge: osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/rule\n\n.PHONY : _run_tests_osi3_bridge\n\n# fast build rule for target.\n_run_tests_osi3_bridge/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/build\n.PHONY : _run_tests_osi3_bridge/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/rule\n\n# Convenience name for target.\nrun_tests_osi3_bridge: osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/rule\n\n.PHONY : run_tests_osi3_bridge\n\n# fast build rule for target.\nrun_tests_osi3_bridge/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/build\n.PHONY : run_tests_osi3_bridge/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/rule\n\n# Convenience name for target.\nosi3_bridge_publisher: osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/rule\n\n.PHONY : osi3_bridge_publisher\n\n# fast build rule for target.\nosi3_bridge_publisher/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build\n.PHONY : osi3_bridge_publisher/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/rule\n\n# Convenience name for target.\nroscpp_generate_messages_py: osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/rule\n\n.PHONY : roscpp_generate_messages_py\n\n# fast build rule for target.\nroscpp_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/build\n.PHONY : roscpp_generate_messages_py/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/rule\n\n# Convenience name for target.\nstd_msgs_generate_messages_py: osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/rule\n\n.PHONY : std_msgs_generate_messages_py\n\n# fast build rule for target.\nstd_msgs_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/build\n.PHONY : std_msgs_generate_messages_py/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/rule\n\n# Convenience name for target.\ngeometry_msgs_generate_messages_eus: osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/rule\n\n.PHONY : geometry_msgs_generate_messages_eus\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/build\n.PHONY : geometry_msgs_generate_messages_eus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nroscpp_generate_messages_nodejs: osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule\n\n.PHONY : roscpp_generate_messages_nodejs\n\n# fast build rule for target.\nroscpp_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/build\n.PHONY : roscpp_generate_messages_nodejs/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nroscpp_generate_messages_eus: osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/rule\n\n.PHONY : roscpp_generate_messages_eus\n\n# fast build rule for target.\nroscpp_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/build\n.PHONY : roscpp_generate_messages_eus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages_eus: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/rule\n\n.PHONY : osi3_bridge_generate_messages_eus\n\n# fast build rule for target.\nosi3_bridge_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/build\n.PHONY : osi3_bridge_generate_messages_eus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nstd_msgs_generate_messages_eus: osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/rule\n\n.PHONY : std_msgs_generate_messages_eus\n\n# fast build rule for target.\nstd_msgs_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/build\n.PHONY : std_msgs_generate_messages_eus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nrosgraph_msgs_generate_messages_nodejs: osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule\n\n.PHONY : rosgraph_msgs_generate_messages_nodejs\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build\n.PHONY : rosgraph_msgs_generate_messages_nodejs/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule\n\n# Convenience name for target.\nrosgraph_msgs_generate_messages_py: osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule\n\n.PHONY : rosgraph_msgs_generate_messages_py\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build\n.PHONY : rosgraph_msgs_generate_messages_py/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nroscpp_generate_messages_cpp: osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/rule\n\n.PHONY : roscpp_generate_messages_cpp\n\n# fast build rule for target.\nroscpp_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/build\n.PHONY : roscpp_generate_messages_cpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nroscpp_generate_messages_lisp: osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/rule\n\n.PHONY : roscpp_generate_messages_lisp\n\n# fast build rule for target.\nroscpp_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/build\n.PHONY : roscpp_generate_messages_lisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\ngeometry_msgs_generate_messages_nodejs: osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/rule\n\n.PHONY : geometry_msgs_generate_messages_nodejs\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/build\n.PHONY : geometry_msgs_generate_messages_nodejs/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\ngeometry_msgs_generate_messages_cpp: osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/rule\n\n.PHONY : geometry_msgs_generate_messages_cpp\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/build\n.PHONY : geometry_msgs_generate_messages_cpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\ngeometry_msgs_generate_messages_lisp: osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/rule\n\n.PHONY : geometry_msgs_generate_messages_lisp\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/build\n.PHONY : geometry_msgs_generate_messages_lisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/rule\n\n# Convenience name for target.\ngeometry_msgs_generate_messages_py: osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/rule\n\n.PHONY : geometry_msgs_generate_messages_py\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/build\n.PHONY : geometry_msgs_generate_messages_py/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/rule\n\n# Convenience name for target.\nclean_test_results_osi3_bridge: osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/rule\n\n.PHONY : clean_test_results_osi3_bridge\n\n# fast build rule for target.\nclean_test_results_osi3_bridge/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/build\n.PHONY : clean_test_results_osi3_bridge/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages_py: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/rule\n\n.PHONY : osi3_bridge_generate_messages_py\n\n# fast build rule for target.\nosi3_bridge_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/build\n.PHONY : osi3_bridge_generate_messages_py/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n\n# Convenience name for target.\n_run_tests_osi3_bridge_roslaunch-check_launch: osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/rule\n\n.PHONY : _run_tests_osi3_bridge_roslaunch-check_launch\n\n# fast build rule for target.\n_run_tests_osi3_bridge_roslaunch-check_launch/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/build\n.PHONY : _run_tests_osi3_bridge_roslaunch-check_launch/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nstd_msgs_generate_messages_cpp: osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/rule\n\n.PHONY : std_msgs_generate_messages_cpp\n\n# fast build rule for target.\nstd_msgs_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/build\n.PHONY : std_msgs_generate_messages_cpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nrosgraph_msgs_generate_messages_eus: osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule\n\n.PHONY : rosgraph_msgs_generate_messages_eus\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build\n.PHONY : rosgraph_msgs_generate_messages_eus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nrosgraph_msgs_generate_messages_lisp: osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule\n\n.PHONY : rosgraph_msgs_generate_messages_lisp\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build\n.PHONY : rosgraph_msgs_generate_messages_lisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/rule\n\n# Convenience name for target.\n_osi3_bridge_generate_messages_check_deps_Orientation3d: osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/rule\n\n.PHONY : _osi3_bridge_generate_messages_check_deps_Orientation3d\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_Orientation3d/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_Orientation3d/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/rule\n\n# Convenience name for target.\nosi3_bridge_genlisp: osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/rule\n\n.PHONY : osi3_bridge_genlisp\n\n# fast build rule for target.\nosi3_bridge_genlisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/build\n.PHONY : osi3_bridge_genlisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/rule\n\n# Convenience name for target.\n_osi3_bridge_generate_messages_check_deps_Dimension3d: osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/rule\n\n.PHONY : _osi3_bridge_generate_messages_check_deps_Dimension3d\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_Dimension3d/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_Dimension3d/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/rule\n\n# Convenience name for target.\n_osi3_bridge_generate_messages_check_deps_MovingObject: osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/rule\n\n.PHONY : _osi3_bridge_generate_messages_check_deps_MovingObject\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_MovingObject/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_MovingObject/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/rule\n\n# Convenience name for target.\n_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects: osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/rule\n\n.PHONY : _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nstd_msgs_generate_messages_nodejs: osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/rule\n\n.PHONY : std_msgs_generate_messages_nodejs\n\n# fast build rule for target.\nstd_msgs_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/build\n.PHONY : std_msgs_generate_messages_nodejs/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/rule\n\n# Convenience name for target.\nosi3_bridge_gennodejs: osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/rule\n\n.PHONY : osi3_bridge_gennodejs\n\n# fast build rule for target.\nosi3_bridge_gennodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/build\n.PHONY : osi3_bridge_gennodejs/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/rule\n\n# Convenience name for target.\n_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject: osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/rule\n\n.PHONY : _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages_cpp: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/rule\n\n.PHONY : osi3_bridge_generate_messages_cpp\n\n# fast build rule for target.\nosi3_bridge_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/build\n.PHONY : osi3_bridge_generate_messages_cpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nrosgraph_msgs_generate_messages_cpp: osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule\n\n.PHONY : rosgraph_msgs_generate_messages_cpp\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build\n.PHONY : rosgraph_msgs_generate_messages_cpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/rule\n\n.PHONY : osi3_bridge_generate_messages\n\n# fast build rule for target.\nosi3_bridge_generate_messages/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/build\n.PHONY : osi3_bridge_generate_messages/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/rule\n\n# Convenience name for target.\nosi3_bridge_genpy: osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/rule\n\n.PHONY : osi3_bridge_genpy\n\n# fast build rule for target.\nosi3_bridge_genpy/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/build\n.PHONY : osi3_bridge_genpy/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/rule\n\n# Convenience name for target.\nosi3_bridge_gencpp: osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/rule\n\n.PHONY : osi3_bridge_gencpp\n\n# fast build rule for target.\nosi3_bridge_gencpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/build\n.PHONY : osi3_bridge_gencpp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/rule\n\n# Convenience name for target.\n_run_tests_osi3_bridge_roslaunch-check: osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/rule\n\n.PHONY : _run_tests_osi3_bridge_roslaunch-check\n\n# fast build rule for target.\n_run_tests_osi3_bridge_roslaunch-check/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/build\n.PHONY : _run_tests_osi3_bridge_roslaunch-check/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/rule\n\n# Convenience name for target.\nosi3_bridge_geneus: osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/rule\n\n.PHONY : osi3_bridge_geneus\n\n# fast build rule for target.\nosi3_bridge_geneus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/build\n.PHONY : osi3_bridge_geneus/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages_lisp: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/rule\n\n.PHONY : osi3_bridge_generate_messages_lisp\n\n# fast build rule for target.\nosi3_bridge_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/build\n.PHONY : osi3_bridge_generate_messages_lisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nstd_msgs_generate_messages_lisp: osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/rule\n\n.PHONY : std_msgs_generate_messages_lisp\n\n# fast build rule for target.\nstd_msgs_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/build\n.PHONY : std_msgs_generate_messages_lisp/fast\n\n# Convenience name for target.\nosi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/rule\n.PHONY : osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nosi3_bridge_generate_messages_nodejs: osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/rule\n\n.PHONY : osi3_bridge_generate_messages_nodejs\n\n# fast build rule for target.\nosi3_bridge_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/build\n.PHONY : osi3_bridge_generate_messages_nodejs/fast\n\nsrc/osi3_publisher.o: src/osi3_publisher.cpp.o\n\n.PHONY : src/osi3_publisher.o\n\n# target to build an object file\nsrc/osi3_publisher.cpp.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi3_publisher.cpp.o\n.PHONY : src/osi3_publisher.cpp.o\n\nsrc/osi3_publisher.i: src/osi3_publisher.cpp.i\n\n.PHONY : src/osi3_publisher.i\n\n# target to preprocess a source file\nsrc/osi3_publisher.cpp.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi3_publisher.cpp.i\n.PHONY : src/osi3_publisher.cpp.i\n\nsrc/osi3_publisher.s: src/osi3_publisher.cpp.s\n\n.PHONY : src/osi3_publisher.s\n\n# target to generate assembly for a file\nsrc/osi3_publisher.cpp.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi3_publisher.cpp.s\n.PHONY : src/osi3_publisher.cpp.s\n\nsrc/osi_protocol_header.o: src/osi_protocol_header.c.o\n\n.PHONY : src/osi_protocol_header.o\n\n# target to build an object file\nsrc/osi_protocol_header.c.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi_protocol_header.c.o\n.PHONY : src/osi_protocol_header.c.o\n\nsrc/osi_protocol_header.i: src/osi_protocol_header.c.i\n\n.PHONY : src/osi_protocol_header.i\n\n# target to preprocess a source file\nsrc/osi_protocol_header.c.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi_protocol_header.c.i\n.PHONY : src/osi_protocol_header.c.i\n\nsrc/osi_protocol_header.s: src/osi_protocol_header.c.s\n\n.PHONY : src/osi_protocol_header.s\n\n# target to generate assembly for a file\nsrc/osi_protocol_header.c.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi_protocol_header.c.s\n.PHONY : src/osi_protocol_header.c.s\n\nsrc/udp.o: src/udp.c.o\n\n.PHONY : src/udp.o\n\n# target to build an object file\nsrc/udp.c.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/udp.c.o\n.PHONY : src/udp.c.o\n\nsrc/udp.i: src/udp.c.i\n\n.PHONY : src/udp.i\n\n# target to preprocess a source file\nsrc/udp.c.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/udp.c.i\n.PHONY : src/udp.c.i\n\nsrc/udp.s: src/udp.c.s\n\n.PHONY : src/udp.s\n\n# target to generate assembly for a file\nsrc/udp.c.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/udp.c.s\n.PHONY : src/udp.c.s\n\n# Help Target\nhelp:\n\t@echo \"The following are some of the valid targets for this Makefile:\"\n\t@echo \"... all (the default if no target is provided)\"\n\t@echo \"... clean\"\n\t@echo \"... depend\"\n\t@echo \"... install/local\"\n\t@echo \"... install\"\n\t@echo \"... edit_cache\"\n\t@echo \"... test\"\n\t@echo \"... list_install_components\"\n\t@echo \"... run_tests_osi3_bridge_roslaunch-check_launch\"\n\t@echo \"... run_tests_osi3_bridge_roslaunch-check\"\n\t@echo \"... _run_tests_osi3_bridge\"\n\t@echo \"... run_tests_osi3_bridge\"\n\t@echo \"... osi3_bridge_publisher\"\n\t@echo \"... roscpp_generate_messages_py\"\n\t@echo \"... std_msgs_generate_messages_py\"\n\t@echo \"... geometry_msgs_generate_messages_eus\"\n\t@echo \"... roscpp_generate_messages_nodejs\"\n\t@echo \"... roscpp_generate_messages_eus\"\n\t@echo \"... osi3_bridge_generate_messages_eus\"\n\t@echo \"... std_msgs_generate_messages_eus\"\n\t@echo \"... rosgraph_msgs_generate_messages_nodejs\"\n\t@echo \"... rosgraph_msgs_generate_messages_py\"\n\t@echo \"... rebuild_cache\"\n\t@echo \"... roscpp_generate_messages_cpp\"\n\t@echo \"... roscpp_generate_messages_lisp\"\n\t@echo \"... geometry_msgs_generate_messages_nodejs\"\n\t@echo \"... geometry_msgs_generate_messages_cpp\"\n\t@echo \"... geometry_msgs_generate_messages_lisp\"\n\t@echo \"... geometry_msgs_generate_messages_py\"\n\t@echo \"... clean_test_results_osi3_bridge\"\n\t@echo \"... osi3_bridge_generate_messages_py\"\n\t@echo \"... _run_tests_osi3_bridge_roslaunch-check_launch\"\n\t@echo \"... std_msgs_generate_messages_cpp\"\n\t@echo \"... rosgraph_msgs_generate_messages_eus\"\n\t@echo \"... rosgraph_msgs_generate_messages_lisp\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_Orientation3d\"\n\t@echo \"... install/strip\"\n\t@echo \"... osi3_bridge_genlisp\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_Dimension3d\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_MovingObject\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\"\n\t@echo \"... std_msgs_generate_messages_nodejs\"\n\t@echo \"... osi3_bridge_gennodejs\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\"\n\t@echo \"... osi3_bridge_generate_messages_cpp\"\n\t@echo \"... rosgraph_msgs_generate_messages_cpp\"\n\t@echo \"... osi3_bridge_generate_messages\"\n\t@echo \"... osi3_bridge_genpy\"\n\t@echo \"... osi3_bridge_gencpp\"\n\t@echo \"... _run_tests_osi3_bridge_roslaunch-check\"\n\t@echo \"... osi3_bridge_geneus\"\n\t@echo \"... osi3_bridge_generate_messages_lisp\"\n\t@echo \"... std_msgs_generate_messages_lisp\"\n\t@echo \"... osi3_bridge_generate_messages_nodejs\"\n\t@echo \"... src/osi3_publisher.o\"\n\t@echo \"... src/osi3_publisher.i\"\n\t@echo \"... src/osi3_publisher.s\"\n\t@echo \"... src/osi_protocol_header.o\"\n\t@echo \"... src/osi_protocol_header.i\"\n\t@echo \"... src/osi_protocol_header.s\"\n\t@echo \"... src/udp.o\"\n\t@echo \"... src/udp.i\"\n\t@echo \"... src/udp.s\"\n.PHONY : help\n\n\n\n#=============================================================================\n# Special targets to cleanup operation of make.\n\n# Special rule to run CMake to check the build system integrity.\n# No rule that depends on this can have commands that come from listfiles\n# because they might be regenerated.\ncmake_check_build_system:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0\n.PHONY : cmake_check_build_system\n\n"
},
{
"alpha_fraction": 0.6888296008110046,
"alphanum_fraction": 0.6956033110618591,
"avg_line_length": 43.46217727661133,
"blob_id": "75cc36f8c1d73d8b903eeeecafc136ace07d9af7",
"content_id": "4ac3a6e897a019f6813e9834c7e2145cc79071f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 64071,
"license_type": "no_license",
"max_line_length": 218,
"num_lines": 1441,
"path": "/build/Makefile",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMAKE generated file: DO NOT EDIT!\n# Generated by \"Unix Makefiles\" Generator, CMake Version 3.10\n\n# Default target executed when no arguments are given to make.\ndefault_target: all\n\n.PHONY : default_target\n\n# Allow only one \"make -f Makefile2\" at a time, but pass parallelism.\n.NOTPARALLEL:\n\n\n#=============================================================================\n# Special targets provided by cmake.\n\n# Disable implicit rules so canonical targets will work.\n.SUFFIXES:\n\n\n# Remove some rules from gmake that .SUFFIXES does not remove.\nSUFFIXES =\n\n.SUFFIXES: .hpux_make_needs_suffix_list\n\n\n# Suppress display of executed commands.\n$(VERBOSE).SILENT:\n\n\n# A target that is always out of date.\ncmake_force:\n\n.PHONY : cmake_force\n\n#=============================================================================\n# Set environment variables for the build.\n\n# The shell in which to execute make rules.\nSHELL = /bin/sh\n\n# The CMake executable.\nCMAKE_COMMAND = /usr/bin/cmake\n\n# The command to remove a file.\nRM = /usr/bin/cmake -E remove -f\n\n# Escaping for special characters.\nEQUALS = =\n\n# The top-level source directory on which CMake was run.\nCMAKE_SOURCE_DIR = /home/student/Desktop/Redge_Thesis/vil/src\n\n# The top-level build directory on which CMake was run.\nCMAKE_BINARY_DIR = /home/student/Desktop/Redge_Thesis/vil/build\n\n#=============================================================================\n# Targets provided globally by CMake.\n\n# Special rule for the target install/strip\ninstall/strip: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip\n\n# Special rule for the target install/strip\ninstall/strip/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip/fast\n\n# Special rule for the target install\ninstall: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install\n\n# Special rule for the target install\ninstall/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install/fast\n\n# Special rule for the target list_install_components\nlist_install_components:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Available install components are: \\\"Unspecified\\\" \\\"dev\\\" \\\"lib\\\"\"\n.PHONY : list_install_components\n\n# Special rule for the target list_install_components\nlist_install_components/fast: list_install_components\n\n.PHONY : list_install_components/fast\n\n# Special rule for the target rebuild_cache\nrebuild_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running CMake to regenerate build system...\"\n\t/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)\n.PHONY : rebuild_cache\n\n# Special rule for the target rebuild_cache\nrebuild_cache/fast: rebuild_cache\n\n.PHONY : rebuild_cache/fast\n\n# Special rule for the target edit_cache\nedit_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"No interactive CMake dialog available...\"\n\t/usr/bin/cmake -E echo No\\ interactive\\ CMake\\ dialog\\ available.\n.PHONY : edit_cache\n\n# Special rule for the target edit_cache\nedit_cache/fast: edit_cache\n\n.PHONY : edit_cache/fast\n\n# Special rule for the target install/local\ninstall/local: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local\n\n# Special rule for the target install/local\ninstall/local/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local/fast\n\n# Special rule for the target test\ntest:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running tests...\"\n\t/usr/bin/ctest --force-new-ctest-process $(ARGS)\n.PHONY : test\n\n# Special rule for the target test\ntest/fast: test\n\n.PHONY : test/fast\n\n# The main all target\nall: cmake_check_build_system\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles/progress.marks\n\t$(MAKE) -f CMakeFiles/Makefile2 all\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles 0\n.PHONY : all\n\n# The main clean target\nclean:\n\t$(MAKE) -f CMakeFiles/Makefile2 clean\n.PHONY : clean\n\n# The main clean target\nclean/fast: clean\n\n.PHONY : clean/fast\n\n# Prepare targets for installation.\npreinstall: all\n\t$(MAKE) -f CMakeFiles/Makefile2 preinstall\n.PHONY : preinstall\n\n# Prepare targets for installation.\npreinstall/fast:\n\t$(MAKE) -f CMakeFiles/Makefile2 preinstall\n.PHONY : preinstall/fast\n\n# clear depends\ndepend:\n\t$(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1\n.PHONY : depend\n\n#=============================================================================\n# Target rules for targets named tests\n\n# Build rule for target.\ntests: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 tests\n.PHONY : tests\n\n# fast build rule for target.\ntests/fast:\n\t$(MAKE) -f CMakeFiles/tests.dir/build.make CMakeFiles/tests.dir/build\n.PHONY : tests/fast\n\n#=============================================================================\n# Target rules for targets named download_extra_data\n\n# Build rule for target.\ndownload_extra_data: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 download_extra_data\n.PHONY : download_extra_data\n\n# fast build rule for target.\ndownload_extra_data/fast:\n\t$(MAKE) -f CMakeFiles/download_extra_data.dir/build.make CMakeFiles/download_extra_data.dir/build\n.PHONY : download_extra_data/fast\n\n#=============================================================================\n# Target rules for targets named run_tests\n\n# Build rule for target.\nrun_tests: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 run_tests\n.PHONY : run_tests\n\n# fast build rule for target.\nrun_tests/fast:\n\t$(MAKE) -f CMakeFiles/run_tests.dir/build.make CMakeFiles/run_tests.dir/build\n.PHONY : run_tests/fast\n\n#=============================================================================\n# Target rules for targets named clean_test_results\n\n# Build rule for target.\nclean_test_results: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 clean_test_results\n.PHONY : clean_test_results\n\n# fast build rule for target.\nclean_test_results/fast:\n\t$(MAKE) -f CMakeFiles/clean_test_results.dir/build.make CMakeFiles/clean_test_results.dir/build\n.PHONY : clean_test_results/fast\n\n#=============================================================================\n# Target rules for targets named doxygen\n\n# Build rule for target.\ndoxygen: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 doxygen\n.PHONY : doxygen\n\n# fast build rule for target.\ndoxygen/fast:\n\t$(MAKE) -f CMakeFiles/doxygen.dir/build.make CMakeFiles/doxygen.dir/build\n.PHONY : doxygen/fast\n\n#=============================================================================\n# Target rules for targets named gmock_main\n\n# Build rule for target.\ngmock_main: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 gmock_main\n.PHONY : gmock_main\n\n# fast build rule for target.\ngmock_main/fast:\n\t$(MAKE) -f gtest/googlemock/CMakeFiles/gmock_main.dir/build.make gtest/googlemock/CMakeFiles/gmock_main.dir/build\n.PHONY : gmock_main/fast\n\n#=============================================================================\n# Target rules for targets named gmock\n\n# Build rule for target.\ngmock: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 gmock\n.PHONY : gmock\n\n# fast build rule for target.\ngmock/fast:\n\t$(MAKE) -f gtest/googlemock/CMakeFiles/gmock.dir/build.make gtest/googlemock/CMakeFiles/gmock.dir/build\n.PHONY : gmock/fast\n\n#=============================================================================\n# Target rules for targets named gtest_main\n\n# Build rule for target.\ngtest_main: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 gtest_main\n.PHONY : gtest_main\n\n# fast build rule for target.\ngtest_main/fast:\n\t$(MAKE) -f gtest/googlemock/gtest/CMakeFiles/gtest_main.dir/build.make gtest/googlemock/gtest/CMakeFiles/gtest_main.dir/build\n.PHONY : gtest_main/fast\n\n#=============================================================================\n# Target rules for targets named gtest\n\n# Build rule for target.\ngtest: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 gtest\n.PHONY : gtest\n\n# fast build rule for target.\ngtest/fast:\n\t$(MAKE) -f gtest/googlemock/gtest/CMakeFiles/gtest.dir/build.make gtest/googlemock/gtest/CMakeFiles/gtest.dir/build\n.PHONY : gtest/fast\n\n#=============================================================================\n# Target rules for targets named run_tests_osi3_bridge_roslaunch-check_launch\n\n# Build rule for target.\nrun_tests_osi3_bridge_roslaunch-check_launch: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 run_tests_osi3_bridge_roslaunch-check_launch\n.PHONY : run_tests_osi3_bridge_roslaunch-check_launch\n\n# fast build rule for target.\nrun_tests_osi3_bridge_roslaunch-check_launch/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check_launch.dir/build\n.PHONY : run_tests_osi3_bridge_roslaunch-check_launch/fast\n\n#=============================================================================\n# Target rules for targets named run_tests_osi3_bridge_roslaunch-check\n\n# Build rule for target.\nrun_tests_osi3_bridge_roslaunch-check: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 run_tests_osi3_bridge_roslaunch-check\n.PHONY : run_tests_osi3_bridge_roslaunch-check\n\n# fast build rule for target.\nrun_tests_osi3_bridge_roslaunch-check/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge_roslaunch-check.dir/build\n.PHONY : run_tests_osi3_bridge_roslaunch-check/fast\n\n#=============================================================================\n# Target rules for targets named _run_tests_osi3_bridge\n\n# Build rule for target.\n_run_tests_osi3_bridge: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _run_tests_osi3_bridge\n.PHONY : _run_tests_osi3_bridge\n\n# fast build rule for target.\n_run_tests_osi3_bridge/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge.dir/build\n.PHONY : _run_tests_osi3_bridge/fast\n\n#=============================================================================\n# Target rules for targets named run_tests_osi3_bridge\n\n# Build rule for target.\nrun_tests_osi3_bridge: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 run_tests_osi3_bridge\n.PHONY : run_tests_osi3_bridge\n\n# fast build rule for target.\nrun_tests_osi3_bridge/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/run_tests_osi3_bridge.dir/build\n.PHONY : run_tests_osi3_bridge/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_publisher\n\n# Build rule for target.\nosi3_bridge_publisher: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_publisher\n.PHONY : osi3_bridge_publisher\n\n# fast build rule for target.\nosi3_bridge_publisher/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/build\n.PHONY : osi3_bridge_publisher/fast\n\n#=============================================================================\n# Target rules for targets named roscpp_generate_messages_py\n\n# Build rule for target.\nroscpp_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 roscpp_generate_messages_py\n.PHONY : roscpp_generate_messages_py\n\n# fast build rule for target.\nroscpp_generate_messages_py/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_py.dir/build\n.PHONY : roscpp_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named std_msgs_generate_messages_py\n\n# Build rule for target.\nstd_msgs_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 std_msgs_generate_messages_py\n.PHONY : std_msgs_generate_messages_py\n\n# fast build rule for target.\nstd_msgs_generate_messages_py/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_py.dir/build\n.PHONY : std_msgs_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named geometry_msgs_generate_messages_eus\n\n# Build rule for target.\ngeometry_msgs_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 geometry_msgs_generate_messages_eus\n.PHONY : geometry_msgs_generate_messages_eus\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_eus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_eus.dir/build\n.PHONY : geometry_msgs_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named roscpp_generate_messages_nodejs\n\n# Build rule for target.\nroscpp_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 roscpp_generate_messages_nodejs\n.PHONY : roscpp_generate_messages_nodejs\n\n# fast build rule for target.\nroscpp_generate_messages_nodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_nodejs.dir/build\n.PHONY : roscpp_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named roscpp_generate_messages_eus\n\n# Build rule for target.\nroscpp_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 roscpp_generate_messages_eus\n.PHONY : roscpp_generate_messages_eus\n\n# fast build rule for target.\nroscpp_generate_messages_eus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_eus.dir/build\n.PHONY : roscpp_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages_eus\n\n# Build rule for target.\nosi3_bridge_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages_eus\n.PHONY : osi3_bridge_generate_messages_eus\n\n# fast build rule for target.\nosi3_bridge_generate_messages_eus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/build\n.PHONY : osi3_bridge_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named std_msgs_generate_messages_eus\n\n# Build rule for target.\nstd_msgs_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 std_msgs_generate_messages_eus\n.PHONY : std_msgs_generate_messages_eus\n\n# fast build rule for target.\nstd_msgs_generate_messages_eus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_eus.dir/build\n.PHONY : std_msgs_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named rosgraph_msgs_generate_messages_nodejs\n\n# Build rule for target.\nrosgraph_msgs_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 rosgraph_msgs_generate_messages_nodejs\n.PHONY : rosgraph_msgs_generate_messages_nodejs\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_nodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build\n.PHONY : rosgraph_msgs_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named rosgraph_msgs_generate_messages_py\n\n# Build rule for target.\nrosgraph_msgs_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 rosgraph_msgs_generate_messages_py\n.PHONY : rosgraph_msgs_generate_messages_py\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_py/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build\n.PHONY : rosgraph_msgs_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named roscpp_generate_messages_cpp\n\n# Build rule for target.\nroscpp_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 roscpp_generate_messages_cpp\n.PHONY : roscpp_generate_messages_cpp\n\n# fast build rule for target.\nroscpp_generate_messages_cpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_cpp.dir/build\n.PHONY : roscpp_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named roscpp_generate_messages_lisp\n\n# Build rule for target.\nroscpp_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 roscpp_generate_messages_lisp\n.PHONY : roscpp_generate_messages_lisp\n\n# fast build rule for target.\nroscpp_generate_messages_lisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/roscpp_generate_messages_lisp.dir/build\n.PHONY : roscpp_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named geometry_msgs_generate_messages_nodejs\n\n# Build rule for target.\ngeometry_msgs_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 geometry_msgs_generate_messages_nodejs\n.PHONY : geometry_msgs_generate_messages_nodejs\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_nodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_nodejs.dir/build\n.PHONY : geometry_msgs_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named geometry_msgs_generate_messages_cpp\n\n# Build rule for target.\ngeometry_msgs_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 geometry_msgs_generate_messages_cpp\n.PHONY : geometry_msgs_generate_messages_cpp\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_cpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_cpp.dir/build\n.PHONY : geometry_msgs_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named geometry_msgs_generate_messages_lisp\n\n# Build rule for target.\ngeometry_msgs_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 geometry_msgs_generate_messages_lisp\n.PHONY : geometry_msgs_generate_messages_lisp\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_lisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_lisp.dir/build\n.PHONY : geometry_msgs_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named geometry_msgs_generate_messages_py\n\n# Build rule for target.\ngeometry_msgs_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 geometry_msgs_generate_messages_py\n.PHONY : geometry_msgs_generate_messages_py\n\n# fast build rule for target.\ngeometry_msgs_generate_messages_py/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/geometry_msgs_generate_messages_py.dir/build\n.PHONY : geometry_msgs_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named clean_test_results_osi3_bridge\n\n# Build rule for target.\nclean_test_results_osi3_bridge: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 clean_test_results_osi3_bridge\n.PHONY : clean_test_results_osi3_bridge\n\n# fast build rule for target.\nclean_test_results_osi3_bridge/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/build.make osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/build\n.PHONY : clean_test_results_osi3_bridge/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages_py\n\n# Build rule for target.\nosi3_bridge_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages_py\n.PHONY : osi3_bridge_generate_messages_py\n\n# fast build rule for target.\nosi3_bridge_generate_messages_py/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/build\n.PHONY : osi3_bridge_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named _run_tests_osi3_bridge_roslaunch-check_launch\n\n# Build rule for target.\n_run_tests_osi3_bridge_roslaunch-check_launch: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _run_tests_osi3_bridge_roslaunch-check_launch\n.PHONY : _run_tests_osi3_bridge_roslaunch-check_launch\n\n# fast build rule for target.\n_run_tests_osi3_bridge_roslaunch-check_launch/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check_launch.dir/build\n.PHONY : _run_tests_osi3_bridge_roslaunch-check_launch/fast\n\n#=============================================================================\n# Target rules for targets named std_msgs_generate_messages_cpp\n\n# Build rule for target.\nstd_msgs_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 std_msgs_generate_messages_cpp\n.PHONY : std_msgs_generate_messages_cpp\n\n# fast build rule for target.\nstd_msgs_generate_messages_cpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_cpp.dir/build\n.PHONY : std_msgs_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named rosgraph_msgs_generate_messages_eus\n\n# Build rule for target.\nrosgraph_msgs_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 rosgraph_msgs_generate_messages_eus\n.PHONY : rosgraph_msgs_generate_messages_eus\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_eus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build\n.PHONY : rosgraph_msgs_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named rosgraph_msgs_generate_messages_lisp\n\n# Build rule for target.\nrosgraph_msgs_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 rosgraph_msgs_generate_messages_lisp\n.PHONY : rosgraph_msgs_generate_messages_lisp\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_lisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build\n.PHONY : rosgraph_msgs_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named _osi3_bridge_generate_messages_check_deps_Orientation3d\n\n# Build rule for target.\n_osi3_bridge_generate_messages_check_deps_Orientation3d: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _osi3_bridge_generate_messages_check_deps_Orientation3d\n.PHONY : _osi3_bridge_generate_messages_check_deps_Orientation3d\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_Orientation3d/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Orientation3d.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_Orientation3d/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_genlisp\n\n# Build rule for target.\nosi3_bridge_genlisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_genlisp\n.PHONY : osi3_bridge_genlisp\n\n# fast build rule for target.\nosi3_bridge_genlisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_genlisp.dir/build\n.PHONY : osi3_bridge_genlisp/fast\n\n#=============================================================================\n# Target rules for targets named _osi3_bridge_generate_messages_check_deps_Dimension3d\n\n# Build rule for target.\n_osi3_bridge_generate_messages_check_deps_Dimension3d: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _osi3_bridge_generate_messages_check_deps_Dimension3d\n.PHONY : _osi3_bridge_generate_messages_check_deps_Dimension3d\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_Dimension3d/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_Dimension3d.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_Dimension3d/fast\n\n#=============================================================================\n# Target rules for targets named _osi3_bridge_generate_messages_check_deps_MovingObject\n\n# Build rule for target.\n_osi3_bridge_generate_messages_check_deps_MovingObject: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _osi3_bridge_generate_messages_check_deps_MovingObject\n.PHONY : _osi3_bridge_generate_messages_check_deps_MovingObject\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_MovingObject/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_MovingObject.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_MovingObject/fast\n\n#=============================================================================\n# Target rules for targets named _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\n\n# Build rule for target.\n_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\n.PHONY : _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects/fast\n\n#=============================================================================\n# Target rules for targets named std_msgs_generate_messages_nodejs\n\n# Build rule for target.\nstd_msgs_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 std_msgs_generate_messages_nodejs\n.PHONY : std_msgs_generate_messages_nodejs\n\n# fast build rule for target.\nstd_msgs_generate_messages_nodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_nodejs.dir/build\n.PHONY : std_msgs_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_gennodejs\n\n# Build rule for target.\nosi3_bridge_gennodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_gennodejs\n.PHONY : osi3_bridge_gennodejs\n\n# fast build rule for target.\nosi3_bridge_gennodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_gennodejs.dir/build\n.PHONY : osi3_bridge_gennodejs/fast\n\n#=============================================================================\n# Target rules for targets named _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\n\n# Build rule for target.\n_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\n.PHONY : _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\n\n# fast build rule for target.\n_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/build.make osi3_bridge/CMakeFiles/_osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject.dir/build\n.PHONY : _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages_cpp\n\n# Build rule for target.\nosi3_bridge_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages_cpp\n.PHONY : osi3_bridge_generate_messages_cpp\n\n# fast build rule for target.\nosi3_bridge_generate_messages_cpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/build\n.PHONY : osi3_bridge_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named rosgraph_msgs_generate_messages_cpp\n\n# Build rule for target.\nrosgraph_msgs_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 rosgraph_msgs_generate_messages_cpp\n.PHONY : rosgraph_msgs_generate_messages_cpp\n\n# fast build rule for target.\nrosgraph_msgs_generate_messages_cpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build.make osi3_bridge/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build\n.PHONY : rosgraph_msgs_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages\n\n# Build rule for target.\nosi3_bridge_generate_messages: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages\n.PHONY : osi3_bridge_generate_messages\n\n# fast build rule for target.\nosi3_bridge_generate_messages/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages.dir/build\n.PHONY : osi3_bridge_generate_messages/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_genpy\n\n# Build rule for target.\nosi3_bridge_genpy: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_genpy\n.PHONY : osi3_bridge_genpy\n\n# fast build rule for target.\nosi3_bridge_genpy/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_genpy.dir/build\n.PHONY : osi3_bridge_genpy/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_gencpp\n\n# Build rule for target.\nosi3_bridge_gencpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_gencpp\n.PHONY : osi3_bridge_gencpp\n\n# fast build rule for target.\nosi3_bridge_gencpp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_gencpp.dir/build\n.PHONY : osi3_bridge_gencpp/fast\n\n#=============================================================================\n# Target rules for targets named _run_tests_osi3_bridge_roslaunch-check\n\n# Build rule for target.\n_run_tests_osi3_bridge_roslaunch-check: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _run_tests_osi3_bridge_roslaunch-check\n.PHONY : _run_tests_osi3_bridge_roslaunch-check\n\n# fast build rule for target.\n_run_tests_osi3_bridge_roslaunch-check/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/build.make osi3_bridge/CMakeFiles/_run_tests_osi3_bridge_roslaunch-check.dir/build\n.PHONY : _run_tests_osi3_bridge_roslaunch-check/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_geneus\n\n# Build rule for target.\nosi3_bridge_geneus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_geneus\n.PHONY : osi3_bridge_geneus\n\n# fast build rule for target.\nosi3_bridge_geneus/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_geneus.dir/build\n.PHONY : osi3_bridge_geneus/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages_lisp\n\n# Build rule for target.\nosi3_bridge_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages_lisp\n.PHONY : osi3_bridge_generate_messages_lisp\n\n# fast build rule for target.\nosi3_bridge_generate_messages_lisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/build\n.PHONY : osi3_bridge_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named std_msgs_generate_messages_lisp\n\n# Build rule for target.\nstd_msgs_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 std_msgs_generate_messages_lisp\n.PHONY : std_msgs_generate_messages_lisp\n\n# fast build rule for target.\nstd_msgs_generate_messages_lisp/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/build.make osi3_bridge/CMakeFiles/std_msgs_generate_messages_lisp.dir/build\n.PHONY : std_msgs_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named osi3_bridge_generate_messages_nodejs\n\n# Build rule for target.\nosi3_bridge_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 osi3_bridge_generate_messages_nodejs\n.PHONY : osi3_bridge_generate_messages_nodejs\n\n# fast build rule for target.\nosi3_bridge_generate_messages_nodejs/fast:\n\t$(MAKE) -f osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/build.make osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/build\n.PHONY : osi3_bridge_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named open_simulation_interface_obj\n\n# Build rule for target.\nopen_simulation_interface_obj: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 open_simulation_interface_obj\n.PHONY : open_simulation_interface_obj\n\n# fast build rule for target.\nopen_simulation_interface_obj/fast:\n\t$(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build\n.PHONY : open_simulation_interface_obj/fast\n\n#=============================================================================\n# Target rules for targets named open_simulation_interface_static\n\n# Build rule for target.\nopen_simulation_interface_static: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 open_simulation_interface_static\n.PHONY : open_simulation_interface_static\n\n# fast build rule for target.\nopen_simulation_interface_static/fast:\n\t$(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build\n.PHONY : open_simulation_interface_static/fast\n\n#=============================================================================\n# Target rules for targets named open_simulation_interface_pic\n\n# Build rule for target.\nopen_simulation_interface_pic: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 open_simulation_interface_pic\n.PHONY : open_simulation_interface_pic\n\n# fast build rule for target.\nopen_simulation_interface_pic/fast:\n\t$(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/build\n.PHONY : open_simulation_interface_pic/fast\n\n#=============================================================================\n# Target rules for targets named open_simulation_interface\n\n# Build rule for target.\nopen_simulation_interface: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 open_simulation_interface\n.PHONY : open_simulation_interface\n\n# fast build rule for target.\nopen_simulation_interface/fast:\n\t$(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/build\n.PHONY : open_simulation_interface/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages_py\n\n# Build rule for target.\nobject_list_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages_py\n.PHONY : object_list_generate_messages_py\n\n# fast build rule for target.\nobject_list_generate_messages_py/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_py.dir/build.make object_list/CMakeFiles/object_list_generate_messages_py.dir/build\n.PHONY : object_list_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_ObjectList\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_ObjectList: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_ObjectList\n.PHONY : _object_list_generate_messages_check_deps_ObjectList\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_ObjectList/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/build\n.PHONY : _object_list_generate_messages_check_deps_ObjectList/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_SensorProperty\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_SensorProperty: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_SensorProperty\n.PHONY : _object_list_generate_messages_check_deps_SensorProperty\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_SensorProperty/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/build\n.PHONY : _object_list_generate_messages_check_deps_SensorProperty/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_ObjectsList\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_ObjectsList: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_ObjectsList\n.PHONY : _object_list_generate_messages_check_deps_ObjectsList\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_ObjectsList/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/build\n.PHONY : _object_list_generate_messages_check_deps_ObjectsList/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_Features\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_Features: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_Features\n.PHONY : _object_list_generate_messages_check_deps_Features\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Features/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Features/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages\n\n# Build rule for target.\nobject_list_generate_messages: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages\n.PHONY : object_list_generate_messages\n\n# fast build rule for target.\nobject_list_generate_messages/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages.dir/build.make object_list/CMakeFiles/object_list_generate_messages.dir/build\n.PHONY : object_list_generate_messages/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_Dimension\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_Dimension: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_Dimension\n.PHONY : _object_list_generate_messages_check_deps_Dimension\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Dimension/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Dimension/fast\n\n#=============================================================================\n# Target rules for targets named object_list_genpy\n\n# Build rule for target.\nobject_list_genpy: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_genpy\n.PHONY : object_list_genpy\n\n# fast build rule for target.\nobject_list_genpy/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_genpy.dir/build.make object_list/CMakeFiles/object_list_genpy.dir/build\n.PHONY : object_list_genpy/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_Geometric\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_Geometric: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_Geometric\n.PHONY : _object_list_generate_messages_check_deps_Geometric\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Geometric/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Geometric/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_EgoData\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_EgoData: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_EgoData\n.PHONY : _object_list_generate_messages_check_deps_EgoData\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_EgoData/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/build\n.PHONY : _object_list_generate_messages_check_deps_EgoData/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages_cpp\n\n# Build rule for target.\nobject_list_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages_cpp\n.PHONY : object_list_generate_messages_cpp\n\n# fast build rule for target.\nobject_list_generate_messages_cpp/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_cpp.dir/build.make object_list/CMakeFiles/object_list_generate_messages_cpp.dir/build\n.PHONY : object_list_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named _catkin_empty_exported_target\n\n# Build rule for target.\n_catkin_empty_exported_target: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _catkin_empty_exported_target\n.PHONY : _catkin_empty_exported_target\n\n# fast build rule for target.\n_catkin_empty_exported_target/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_catkin_empty_exported_target.dir/build.make object_list/CMakeFiles/_catkin_empty_exported_target.dir/build\n.PHONY : _catkin_empty_exported_target/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages_lisp\n\n# Build rule for target.\nobject_list_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages_lisp\n.PHONY : object_list_generate_messages_lisp\n\n# fast build rule for target.\nobject_list_generate_messages_lisp/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_lisp.dir/build.make object_list/CMakeFiles/object_list_generate_messages_lisp.dir/build\n.PHONY : object_list_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages_nodejs\n\n# Build rule for target.\nobject_list_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages_nodejs\n.PHONY : object_list_generate_messages_nodejs\n\n# fast build rule for target.\nobject_list_generate_messages_nodejs/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/build.make object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/build\n.PHONY : object_list_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named object_list_gencpp\n\n# Build rule for target.\nobject_list_gencpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_gencpp\n.PHONY : object_list_gencpp\n\n# fast build rule for target.\nobject_list_gencpp/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_gencpp.dir/build.make object_list/CMakeFiles/object_list_gencpp.dir/build\n.PHONY : object_list_gencpp/fast\n\n#=============================================================================\n# Target rules for targets named _object_list_generate_messages_check_deps_Classification\n\n# Build rule for target.\n_object_list_generate_messages_check_deps_Classification: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _object_list_generate_messages_check_deps_Classification\n.PHONY : _object_list_generate_messages_check_deps_Classification\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Classification/fast:\n\t$(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Classification/fast\n\n#=============================================================================\n# Target rules for targets named object_list_generate_messages_eus\n\n# Build rule for target.\nobject_list_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_generate_messages_eus\n.PHONY : object_list_generate_messages_eus\n\n# fast build rule for target.\nobject_list_generate_messages_eus/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_eus.dir/build.make object_list/CMakeFiles/object_list_generate_messages_eus.dir/build\n.PHONY : object_list_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named object_list_gennodejs\n\n# Build rule for target.\nobject_list_gennodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_gennodejs\n.PHONY : object_list_gennodejs\n\n# fast build rule for target.\nobject_list_gennodejs/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_gennodejs.dir/build.make object_list/CMakeFiles/object_list_gennodejs.dir/build\n.PHONY : object_list_gennodejs/fast\n\n#=============================================================================\n# Target rules for targets named object_list_geneus\n\n# Build rule for target.\nobject_list_geneus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_geneus\n.PHONY : object_list_geneus\n\n# fast build rule for target.\nobject_list_geneus/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_geneus.dir/build.make object_list/CMakeFiles/object_list_geneus.dir/build\n.PHONY : object_list_geneus/fast\n\n#=============================================================================\n# Target rules for targets named object_list_genlisp\n\n# Build rule for target.\nobject_list_genlisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 object_list_genlisp\n.PHONY : object_list_genlisp\n\n# fast build rule for target.\nobject_list_genlisp/fast:\n\t$(MAKE) -f object_list/CMakeFiles/object_list_genlisp.dir/build.make object_list/CMakeFiles/object_list_genlisp.dir/build\n.PHONY : object_list_genlisp/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_geneus\n\n# Build rule for target.\nvehicle_control_geneus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_geneus\n.PHONY : vehicle_control_geneus\n\n# fast build rule for target.\nvehicle_control_geneus/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_geneus.dir/build.make vehicle_control/CMakeFiles/vehicle_control_geneus.dir/build\n.PHONY : vehicle_control_geneus/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_gencpp\n\n# Build rule for target.\nvehicle_control_gencpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_gencpp\n.PHONY : vehicle_control_gencpp\n\n# fast build rule for target.\nvehicle_control_gencpp/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_gencpp.dir/build\n.PHONY : vehicle_control_gencpp/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages_eus\n\n# Build rule for target.\nvehicle_control_generate_messages_eus: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages_eus\n.PHONY : vehicle_control_generate_messages_eus\n\n# fast build rule for target.\nvehicle_control_generate_messages_eus/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_eus.dir/build\n.PHONY : vehicle_control_generate_messages_eus/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages_cpp\n\n# Build rule for target.\nvehicle_control_generate_messages_cpp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages_cpp\n.PHONY : vehicle_control_generate_messages_cpp\n\n# fast build rule for target.\nvehicle_control_generate_messages_cpp/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_cpp.dir/build\n.PHONY : vehicle_control_generate_messages_cpp/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages\n\n# Build rule for target.\nvehicle_control_generate_messages: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages\n.PHONY : vehicle_control_generate_messages\n\n# fast build rule for target.\nvehicle_control_generate_messages/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages.dir/build\n.PHONY : vehicle_control_generate_messages/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages_nodejs\n\n# Build rule for target.\nvehicle_control_generate_messages_nodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages_nodejs\n.PHONY : vehicle_control_generate_messages_nodejs\n\n# fast build rule for target.\nvehicle_control_generate_messages_nodejs/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/build\n.PHONY : vehicle_control_generate_messages_nodejs/fast\n\n#=============================================================================\n# Target rules for targets named _vehicle_control_generate_messages_check_deps_Trajectory\n\n# Build rule for target.\n_vehicle_control_generate_messages_check_deps_Trajectory: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 _vehicle_control_generate_messages_check_deps_Trajectory\n.PHONY : _vehicle_control_generate_messages_check_deps_Trajectory\n\n# fast build rule for target.\n_vehicle_control_generate_messages_check_deps_Trajectory/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/build.make vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/build\n.PHONY : _vehicle_control_generate_messages_check_deps_Trajectory/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_genlisp\n\n# Build rule for target.\nvehicle_control_genlisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_genlisp\n.PHONY : vehicle_control_genlisp\n\n# fast build rule for target.\nvehicle_control_genlisp/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_genlisp.dir/build\n.PHONY : vehicle_control_genlisp/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages_lisp\n\n# Build rule for target.\nvehicle_control_generate_messages_lisp: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages_lisp\n.PHONY : vehicle_control_generate_messages_lisp\n\n# fast build rule for target.\nvehicle_control_generate_messages_lisp/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_lisp.dir/build\n.PHONY : vehicle_control_generate_messages_lisp/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_gennodejs\n\n# Build rule for target.\nvehicle_control_gennodejs: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_gennodejs\n.PHONY : vehicle_control_gennodejs\n\n# fast build rule for target.\nvehicle_control_gennodejs/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/build.make vehicle_control/CMakeFiles/vehicle_control_gennodejs.dir/build\n.PHONY : vehicle_control_gennodejs/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_generate_messages_py\n\n# Build rule for target.\nvehicle_control_generate_messages_py: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_generate_messages_py\n.PHONY : vehicle_control_generate_messages_py\n\n# fast build rule for target.\nvehicle_control_generate_messages_py/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/build.make vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/build\n.PHONY : vehicle_control_generate_messages_py/fast\n\n#=============================================================================\n# Target rules for targets named vehicle_control_genpy\n\n# Build rule for target.\nvehicle_control_genpy: cmake_check_build_system\n\t$(MAKE) -f CMakeFiles/Makefile2 vehicle_control_genpy\n.PHONY : vehicle_control_genpy\n\n# fast build rule for target.\nvehicle_control_genpy/fast:\n\t$(MAKE) -f vehicle_control/CMakeFiles/vehicle_control_genpy.dir/build.make vehicle_control/CMakeFiles/vehicle_control_genpy.dir/build\n.PHONY : vehicle_control_genpy/fast\n\n# Help Target\nhelp:\n\t@echo \"The following are some of the valid targets for this Makefile:\"\n\t@echo \"... all (the default if no target is provided)\"\n\t@echo \"... clean\"\n\t@echo \"... depend\"\n\t@echo \"... install/strip\"\n\t@echo \"... install\"\n\t@echo \"... list_install_components\"\n\t@echo \"... tests\"\n\t@echo \"... rebuild_cache\"\n\t@echo \"... download_extra_data\"\n\t@echo \"... edit_cache\"\n\t@echo \"... run_tests\"\n\t@echo \"... clean_test_results\"\n\t@echo \"... doxygen\"\n\t@echo \"... install/local\"\n\t@echo \"... test\"\n\t@echo \"... gmock_main\"\n\t@echo \"... gmock\"\n\t@echo \"... gtest_main\"\n\t@echo \"... gtest\"\n\t@echo \"... run_tests_osi3_bridge_roslaunch-check_launch\"\n\t@echo \"... run_tests_osi3_bridge_roslaunch-check\"\n\t@echo \"... _run_tests_osi3_bridge\"\n\t@echo \"... run_tests_osi3_bridge\"\n\t@echo \"... osi3_bridge_publisher\"\n\t@echo \"... roscpp_generate_messages_py\"\n\t@echo \"... std_msgs_generate_messages_py\"\n\t@echo \"... geometry_msgs_generate_messages_eus\"\n\t@echo \"... roscpp_generate_messages_nodejs\"\n\t@echo \"... roscpp_generate_messages_eus\"\n\t@echo \"... osi3_bridge_generate_messages_eus\"\n\t@echo \"... std_msgs_generate_messages_eus\"\n\t@echo \"... rosgraph_msgs_generate_messages_nodejs\"\n\t@echo \"... rosgraph_msgs_generate_messages_py\"\n\t@echo \"... roscpp_generate_messages_cpp\"\n\t@echo \"... roscpp_generate_messages_lisp\"\n\t@echo \"... geometry_msgs_generate_messages_nodejs\"\n\t@echo \"... geometry_msgs_generate_messages_cpp\"\n\t@echo \"... geometry_msgs_generate_messages_lisp\"\n\t@echo \"... geometry_msgs_generate_messages_py\"\n\t@echo \"... clean_test_results_osi3_bridge\"\n\t@echo \"... osi3_bridge_generate_messages_py\"\n\t@echo \"... _run_tests_osi3_bridge_roslaunch-check_launch\"\n\t@echo \"... std_msgs_generate_messages_cpp\"\n\t@echo \"... rosgraph_msgs_generate_messages_eus\"\n\t@echo \"... rosgraph_msgs_generate_messages_lisp\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_Orientation3d\"\n\t@echo \"... osi3_bridge_genlisp\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_Dimension3d\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_MovingObject\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_GroundTruthMovingObjects\"\n\t@echo \"... std_msgs_generate_messages_nodejs\"\n\t@echo \"... osi3_bridge_gennodejs\"\n\t@echo \"... _osi3_bridge_generate_messages_check_deps_TrafficUpdateMovingObject\"\n\t@echo \"... osi3_bridge_generate_messages_cpp\"\n\t@echo \"... rosgraph_msgs_generate_messages_cpp\"\n\t@echo \"... osi3_bridge_generate_messages\"\n\t@echo \"... osi3_bridge_genpy\"\n\t@echo \"... osi3_bridge_gencpp\"\n\t@echo \"... _run_tests_osi3_bridge_roslaunch-check\"\n\t@echo \"... osi3_bridge_geneus\"\n\t@echo \"... osi3_bridge_generate_messages_lisp\"\n\t@echo \"... std_msgs_generate_messages_lisp\"\n\t@echo \"... osi3_bridge_generate_messages_nodejs\"\n\t@echo \"... open_simulation_interface_obj\"\n\t@echo \"... open_simulation_interface_static\"\n\t@echo \"... open_simulation_interface_pic\"\n\t@echo \"... open_simulation_interface\"\n\t@echo \"... object_list_generate_messages_py\"\n\t@echo \"... _object_list_generate_messages_check_deps_ObjectList\"\n\t@echo \"... _object_list_generate_messages_check_deps_SensorProperty\"\n\t@echo \"... _object_list_generate_messages_check_deps_ObjectsList\"\n\t@echo \"... _object_list_generate_messages_check_deps_Features\"\n\t@echo \"... object_list_generate_messages\"\n\t@echo \"... _object_list_generate_messages_check_deps_Dimension\"\n\t@echo \"... object_list_genpy\"\n\t@echo \"... _object_list_generate_messages_check_deps_Geometric\"\n\t@echo \"... _object_list_generate_messages_check_deps_EgoData\"\n\t@echo \"... object_list_generate_messages_cpp\"\n\t@echo \"... _catkin_empty_exported_target\"\n\t@echo \"... object_list_generate_messages_lisp\"\n\t@echo \"... object_list_generate_messages_nodejs\"\n\t@echo \"... object_list_gencpp\"\n\t@echo \"... _object_list_generate_messages_check_deps_Classification\"\n\t@echo \"... object_list_generate_messages_eus\"\n\t@echo \"... object_list_gennodejs\"\n\t@echo \"... object_list_geneus\"\n\t@echo \"... object_list_genlisp\"\n\t@echo \"... vehicle_control_geneus\"\n\t@echo \"... vehicle_control_gencpp\"\n\t@echo \"... vehicle_control_generate_messages_eus\"\n\t@echo \"... vehicle_control_generate_messages_cpp\"\n\t@echo \"... vehicle_control_generate_messages\"\n\t@echo \"... vehicle_control_generate_messages_nodejs\"\n\t@echo \"... _vehicle_control_generate_messages_check_deps_Trajectory\"\n\t@echo \"... vehicle_control_genlisp\"\n\t@echo \"... vehicle_control_generate_messages_lisp\"\n\t@echo \"... vehicle_control_gennodejs\"\n\t@echo \"... vehicle_control_generate_messages_py\"\n\t@echo \"... vehicle_control_genpy\"\n.PHONY : help\n\n\n\n#=============================================================================\n# Special targets to cleanup operation of make.\n\n# Special rule to run CMake to check the build system integrity.\n# No rule that depends on this can have commands that come from listfiles\n# because they might be regenerated.\ncmake_check_build_system:\n\t$(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0\n.PHONY : cmake_check_build_system\n\n"
},
{
"alpha_fraction": 0.789321780204773,
"alphanum_fraction": 0.8023087978363037,
"avg_line_length": 52.30769348144531,
"blob_id": "0f03be99fb95a180dbd5348edd4debf67c83a2d0",
"content_id": "30a90b0db03322a6da8b4f158b63906886e50052",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 693,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 13,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_cpp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_generate_messages_cpp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/osi3_bridge/GroundTruthMovingObjects.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/osi3_bridge/MovingObject.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/osi3_bridge/Dimension3d.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/osi3_bridge/TrafficUpdateMovingObject.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/osi3_bridge/Orientation3d.h\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/osi3_bridge_generate_messages_cpp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.717391312122345,
"alphanum_fraction": 0.72826087474823,
"avg_line_length": 45.0625,
"blob_id": "8ba7bfb27305f6f666de3224b5b09b0011a6c700",
"content_id": "40ed1facc85feb31add2857a7a73d3de56207d09",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 736,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 16,
"path": "/build/fusion/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"fusion\")\nset(fusion_VERSION \"0.0.0\")\nset(fusion_MAINTAINER \"student <[email protected]>\")\nset(fusion_PACKAGE_FORMAT \"2\")\nset(fusion_BUILD_DEPENDS \"aeb\" \"object_list\" \"osi3_bridge\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(fusion_BUILD_EXPORT_DEPENDS \"aeb\" \"object_list\" \"osi3_bridge\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(fusion_BUILDTOOL_DEPENDS \"catkin\")\nset(fusion_BUILDTOOL_EXPORT_DEPENDS )\nset(fusion_EXEC_DEPENDS \"aeb\" \"object_list\" \"osi3_bridge\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(fusion_RUN_DEPENDS \"aeb\" \"object_list\" \"osi3_bridge\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(fusion_TEST_DEPENDS )\nset(fusion_DOC_DEPENDS )\nset(fusion_URL_WEBSITE \"\")\nset(fusion_URL_BUGTRACKER \"\")\nset(fusion_URL_REPOSITORY \"\")\nset(fusion_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.7991266250610352,
"alphanum_fraction": 0.8034934401512146,
"avg_line_length": 56.25,
"blob_id": "8cfb8aaff4568f1ea83d2f418db6bd397cd6391f",
"content_id": "5e7d83b2636519b7ccd7e694305b74d58a60419b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 229,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 4,
"path": "/devel/share/object_list/cmake/object_list-msg-paths.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-msg-paths.cmake.develspace.in\n\nset(object_list_MSG_INCLUDE_DIRS \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg\")\nset(object_list_MSG_DEPENDENCIES std_msgs;geometry_msgs;osi3_bridge)\n"
},
{
"alpha_fraction": 0.5113767981529236,
"alphanum_fraction": 0.5480138659477234,
"avg_line_length": 39.5,
"blob_id": "b94d5782ebe06b35be7ea0abb41a6996789e18a3",
"content_id": "3a3e22370bad5650d89c6fa1c79ef7aabbe24cd2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2593,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 64,
"path": "/src/vehicle_control/scripts/_test.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport matlab.engine\nimport os\nimport numpy as np\nimport time\n\nprint('Initializing MATLAB')\neng = matlab.engine.start_matlab()\nprint('MATLAB Initialized')\n# Define Matlab path\neng.cd(os.getcwd())\nm = 200;\n\nrX = 127.97515106201172; # current x - point of EGO((1x1) scalar)\nrY = 0.0; # current y - point of EGO((1x1) scalar)\nyaw = 0.0; # current yaw angle of EGO((1x1) scalar)\nv = 0.1; # current velocity of EGO((1x1) scalar)\nax = 0.0; # current acceleration in x of EGO((1x1) scalar)\nay = 0.0; # current acceleration in y of EGO((1x1) scalar)\ni=1\nwhile True:\n\n tic= time.time()\n if i< 400:\n vel = 2.0\n else:\n vel = 4.0\n\n\n rXtraj=matlab.double(tuple(np.linspace(0, vel*2.0, num=m))) # Trajectory x-points ((1xm) vector)\n rYtraj=matlab.double(tuple(np.linspace(0,0,num=m))) # Trajectory y-points ((1xm) vector)\n PsiTraj=matlab.double(tuple(np.linspace(0,0,num=m))) # Trajectory yaw angle ((1xm) vector)\n tTraj=matlab.double(tuple(np.linspace(0,2.0,num=m))) # Trajectory time_stamp ((1xm) vector)\n vTraj=matlab.double(tuple(np.linspace(vel,vel,num=m))) # Trajectory velocity ((1xm) vector)\n\n\n #t = 0.0; # current time((1x1) scalar)\n #n = 1.0; # current loop index((1x1) scalar)\n #print('rXtraj {}, rYtraj {}, vTraj {}, PsiTraj {}, tTraj {}, rX {}, rY {}, yaw {}, v {}'.format(rXtraj, rYtraj, vTraj,\n # PsiTraj, tTraj, rX,\n # rY, yaw, v, ax, ay))\n\n #print (rYtraj)\n res = eng.Vehicle(rXtraj, rYtraj, vTraj, PsiTraj, tTraj, rX, rY, yaw, v, ax, ay, nargout=6)\n rX = res[0] # Updated x - point of EGO m - Map Frame\n rY = res[1] # Updated y - point of EGO m - Map Frame\n yaw = res[5] # Updated yaw angle of EGO rad\n\n # Rotate from EGO to Map frame\n v = res[2] # Lateral Velocity = 0 Updated velocity of EGO m/s - Map Frame\n ax = res[3]\n ay = res[4]\n # rXnew: Updated x - point of EGO((1x1) scalar)\n # rYnew: Updated y - point of EGO((1x1) scalar)\n # vnew: Updated velocity of EGO((1x1) scalar)\n # axnew: Updated accel in x of EGO((1x1) scalar)\n # aynew: Updated accel in y of EGO((1x1) scalar)\n # Psinew: Updated yaw angle of EGO((1x1) scalar)\n #print(i)\n #print(res)\n toc=time.time()\n print(\"frequency\")\n print(1/(-tic+toc))\n i = i+1\n\n"
},
{
"alpha_fraction": 0.7713004350662231,
"alphanum_fraction": 0.773542582988739,
"avg_line_length": 33.30769348144531,
"blob_id": "c59189a07375cbb7523e9535ca4eb5314e35fb66",
"content_id": "bf776b483e61cfcbf22e16e9c94dee102761c4fe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 446,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 13,
"path": "/build/CTestTestfile.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMake generated Testfile for \n# Source directory: /home/student/Desktop/Redge_Thesis/vil/src\n# Build directory: /home/student/Desktop/Redge_Thesis/vil/build\n# \n# This file includes the relevant testing commands required for \n# testing this directory and lists subdirectories to be tested as well.\nsubdirs(\"gtest\")\nsubdirs(\"osi3_bridge\")\nsubdirs(\"object_list\")\nsubdirs(\"sensor_model\")\nsubdirs(\"vehicle_control\")\nsubdirs(\"aeb\")\nsubdirs(\"fusion\")\n"
},
{
"alpha_fraction": 0.7752808928489685,
"alphanum_fraction": 0.7902621626853943,
"avg_line_length": 34.53333282470703,
"blob_id": "272767e1e397c33c2032114414ea3888814249f6",
"content_id": "8912e04f140c6fcfc3e3c2a30b4d449a6b3cc703",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 534,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 15,
"path": "/devel/share/gennodejs/ros/osi3_bridge/msg/_index.js",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "\n\"use strict\";\n\nlet TrafficUpdateMovingObject = require('./TrafficUpdateMovingObject.js');\nlet Orientation3d = require('./Orientation3d.js');\nlet MovingObject = require('./MovingObject.js');\nlet Dimension3d = require('./Dimension3d.js');\nlet GroundTruthMovingObjects = require('./GroundTruthMovingObjects.js');\n\nmodule.exports = {\n TrafficUpdateMovingObject: TrafficUpdateMovingObject,\n Orientation3d: Orientation3d,\n MovingObject: MovingObject,\n Dimension3d: Dimension3d,\n GroundTruthMovingObjects: GroundTruthMovingObjects,\n};\n"
},
{
"alpha_fraction": 0.6903225779533386,
"alphanum_fraction": 0.6967741847038269,
"avg_line_length": 18.375,
"blob_id": "e3124783385a80f9314fd19aa7271d858c0ac4d6",
"content_id": "a54fcd106e0d58f12858db5df3ddb89d6d116185",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 310,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 16,
"path": "/src/osi3_bridge/include/win_dll_export.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#ifndef WIN_DLL_EXPORT\n#define WIN_DLL_EXPORT\n\n#ifdef _WIN32\n# ifdef MODULE_API_EXPORTS\n# define MODULE_API __declspec(dllexport)\n# elif MODULE_API_IMPORTS\n# define MODULE_API __declspec(dllimport)\n# else \n# define MODULE_API\n# endif\n#else\n# define MODULE_API\n#endif\n\n#endif /* WIN_DLL_EXPORT */\n"
},
{
"alpha_fraction": 0.677813708782196,
"alphanum_fraction": 0.6973510980606079,
"avg_line_length": 28.90322494506836,
"blob_id": "9aeb08b9a637459a4a25cb750b1aff65d88912a1",
"content_id": "f719b6b01263c483e7d138023f9e38118ba6d65b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8343,
"license_type": "no_license",
"max_line_length": 196,
"num_lines": 279,
"path": "/devel/include/osi3_bridge/GroundTruthMovingObjects.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file osi3_bridge/GroundTruthMovingObjects.msg\n// DO NOT EDIT!\n\n\n#ifndef OSI3_BRIDGE_MESSAGE_GROUNDTRUTHMOVINGOBJECTS_H\n#define OSI3_BRIDGE_MESSAGE_GROUNDTRUTHMOVINGOBJECTS_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n#include <std_msgs/Header.h>\n#include <osi3_bridge/MovingObject.h>\n\nnamespace osi3_bridge\n{\ntemplate <class ContainerAllocator>\nstruct GroundTruthMovingObjects_\n{\n typedef GroundTruthMovingObjects_<ContainerAllocator> Type;\n\n GroundTruthMovingObjects_()\n : header()\n , objects() {\n }\n GroundTruthMovingObjects_(const ContainerAllocator& _alloc)\n : header(_alloc)\n , objects(_alloc) {\n (void)_alloc;\n }\n\n\n\n typedef ::std_msgs::Header_<ContainerAllocator> _header_type;\n _header_type header;\n\n typedef std::vector< ::osi3_bridge::MovingObject_<ContainerAllocator> , typename ContainerAllocator::template rebind< ::osi3_bridge::MovingObject_<ContainerAllocator> >::other > _objects_type;\n _objects_type objects;\n\n\n\n\n\n typedef boost::shared_ptr< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> const> ConstPtr;\n\n}; // struct GroundTruthMovingObjects_\n\ntypedef ::osi3_bridge::GroundTruthMovingObjects_<std::allocator<void> > GroundTruthMovingObjects;\n\ntypedef boost::shared_ptr< ::osi3_bridge::GroundTruthMovingObjects > GroundTruthMovingObjectsPtr;\ntypedef boost::shared_ptr< ::osi3_bridge::GroundTruthMovingObjects const> GroundTruthMovingObjectsConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator1> & lhs, const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator2> & rhs)\n{\n return lhs.header == rhs.header &&\n lhs.objects == rhs.objects;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator1> & lhs, const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace osi3_bridge\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> const>\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> const>\n : TrueType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"b5c10f964cf85cf58aa709fc3567f543\";\n }\n\n static const char* value(const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xb5c10f964cf85cf5ULL;\n static const uint64_t static_value2 = 0x8aa709fc3567f543ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"osi3_bridge/GroundTruthMovingObjects\";\n }\n\n static const char* value(const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"Header header\\n\"\n\"osi3_bridge/MovingObject[] objects\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: std_msgs/Header\\n\"\n\"# Standard metadata for higher-level stamped data types.\\n\"\n\"# This is generally used to communicate timestamped data \\n\"\n\"# in a particular coordinate frame.\\n\"\n\"# \\n\"\n\"# sequence ID: consecutively increasing ID \\n\"\n\"uint32 seq\\n\"\n\"#Two-integer timestamp that is expressed as:\\n\"\n\"# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')\\n\"\n\"# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')\\n\"\n\"# time-handling sugar is provided by the client library\\n\"\n\"time stamp\\n\"\n\"#Frame this data is associated with\\n\"\n\"string frame_id\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: osi3_bridge/MovingObject\\n\"\n\"uint64 id\\n\"\n\"osi3_bridge/Dimension3d dimension\\n\"\n\"geometry_msgs/Vector3 position\\n\"\n\"osi3_bridge/Orientation3d orientation\\n\"\n\"geometry_msgs/Vector3 velocity\\n\"\n\"geometry_msgs/Vector3 acceleration\\n\"\n\"uint8 type\\n\"\n\"\\n\"\n\"uint8 TYPE_UNKNOWN = 0\\n\"\n\"uint8 TYPE_OTHER = 1\\n\"\n\"uint8 TYPE_CAR = 2\\n\"\n\"uint8 TYPE_PEDESTRIAN = 3\\n\"\n\"uint8 TYPE_ANIMAL = 4\\n\"\n\"uint8 TYPE_TRUCK = 5\\n\"\n\"uint8 TYPE_TRAILER = 6\\n\"\n\"uint8 TYPE_MOTORBIKE = 7\\n\"\n\"uint8 TYPE_BICYCLE = 8\\n\"\n\"uint8 TYPE_BUS = 9\\n\"\n\"uint8 TYPE_TRAM = 10\\n\"\n\"uint8 TYPE_TRAIN = 11\\n\"\n\"uint8 TYPE_WHEELCHAIR = 12\\n\"\n\"\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: osi3_bridge/Dimension3d\\n\"\n\"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 height\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: geometry_msgs/Vector3\\n\"\n\"# This represents a vector in free space. \\n\"\n\"# It is only meant to represent a direction. Therefore, it does not\\n\"\n\"# make sense to apply a translation to it (e.g., when applying a \\n\"\n\"# generic rigid transformation to a Vector3, tf2 will only apply the\\n\"\n\"# rotation). If you want your data to be translatable too, use the\\n\"\n\"# geometry_msgs/Point message instead.\\n\"\n\"\\n\"\n\"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 z\\n\"\n\"================================================================================\\n\"\n\"MSG: osi3_bridge/Orientation3d\\n\"\n\"float64 roll\\n\"\n\"float64 pitch\\n\"\n\"float64 yaw\\n\"\n;\n }\n\n static const char* value(const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.header);\n stream.next(m.objects);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct GroundTruthMovingObjects_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::osi3_bridge::GroundTruthMovingObjects_<ContainerAllocator>& v)\n {\n s << indent << \"header: \";\n s << std::endl;\n Printer< ::std_msgs::Header_<ContainerAllocator> >::stream(s, indent + \" \", v.header);\n s << indent << \"objects[]\" << std::endl;\n for (size_t i = 0; i < v.objects.size(); ++i)\n {\n s << indent << \" objects[\" << i << \"]: \";\n s << std::endl;\n s << indent;\n Printer< ::osi3_bridge::MovingObject_<ContainerAllocator> >::stream(s, indent + \" \", v.objects[i]);\n }\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OSI3_BRIDGE_MESSAGE_GROUNDTRUTHMOVINGOBJECTS_H\n"
},
{
"alpha_fraction": 0.6009417176246643,
"alphanum_fraction": 0.6195801496505737,
"avg_line_length": 34.38888931274414,
"blob_id": "39e1bbb8a3748d374dd3246cca67c9fef03978c9",
"content_id": "df293d9ef7157c66f08f373bb8988d26561ef17e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5097,
"license_type": "no_license",
"max_line_length": 318,
"num_lines": 144,
"path": "/src/object_list/scripts/CARLAOSIbridge.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\n\nimport sys\nimport glob\nimport os\n\n\ntry:\n sys.path.append(glob.glob('/opt/carla-simulator/PythonAPI/carla/dist/carla-0.9.8-py2.7-linux-x86_64.egg')[0])\n\n# sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (\n# sys.version_info.major,\n# sys.version_info.minor,\n# 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])\nexcept IndexError:\n pass\n\n# ==============================================================================\n# -- add PythonAPI for release mode --------------------------------------------\n# ==============================================================================\ntry:\n sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + '/carla')\nexcept IndexError:\n pass\n\n\n\nimport carla\nimport rospy\nimport numpy as np\nimport math\nimport time\nfrom cv_bridge import CvBridge\ncv_bridge = CvBridge()\n\nfrom sensor_msgs.msg import CameraInfo, Image\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject, MovingObject\n\ntime.sleep(3) ## Wait the scenario setting\n\n\ndef CARLA_OSI_ROS():\n\n ## Connect with CARLA server\n #client = carla.Client('localhost', 2000) ### It starts the communication between server and client\n client = carla.Client('10.116.64.35', 2000)\n client.set_timeout(10.0) # seconds ### After 10 seconds without communication with CARLA server the script stops\n world = client.get_world()\n\n #Start ROS node\n pub = rospy.Publisher('osi3_moving_obj', GroundTruthMovingObjects, queue_size=10) #\n rospy.init_node('osi3_bridge',anonymous=False) # Initiate the node camera and anonymous true permit opening this node a lot of time including number in the end of the node name\n rate = rospy.Rate(rospy.get_param(\"freq\") ) # 100 hz\n #rate = rospy.Rate(0.1) # 100 hz\n\n cameras = world.get_actors().filter('sensor.camera.rgb')\n cc = carla.ColorConverter.CityScapesPalette\n for camera in cameras:\n #camera.listen(lambda image: image.save_to_disk('/home/drechsler/01.png', cc))\n camera.listen(lambda image: get_data(image))\n\n def get_data(image):\n camera_data_array = np.ndarray(shape=(image.height, image.width, 4), dtype=np.uint8, buffer=image.raw_data)\n img_msg = cv_bridge.cv2_to_imgmsg(camera_data_array, encoding='bgra8')\n pub = rospy.Publisher('imagetimer', Image, queue_size=10)\n pub.publish(img_msg )\n\n\n while not rospy.is_shutdown():\n b = GroundTruthMovingObjects()\n #actor_list = world.get_actors()\n vehicles = world.get_actors().filter('vehicle.*')\n walkers = world.get_actors().filter('walker.*')\n\n\n for vehicle in vehicles:\n a = get_OSI(vehicle)\n a.type = get_classification(vehicle.type_id)\n b.objects = np.append(b.objects, a)\n\n for walker in walkers:\n a = get_OSI(walker)\n a.type = 3 # Pesdestrian\n b.objects = np.append(b.objects, a)\n\n if b.objects !=[]:\n\n b.header.stamp = rospy.Time.now()\n b.header.frame_id = \"world\"\n #rospy.loginfo(b)\n pub.publish(b)\n rate.sleep()\n\ndef get_OSI(actor):\n location = actor.get_location()\n velocity = actor.get_velocity()\n acceleration = actor.get_acceleration()\n rotation = actor.get_transform()\n a = MovingObject()\n a.id = actor.id\n\n a.position.x = location.x\n a.position.y = - location.y\n a.position.z = location.z\n a.velocity.x = velocity.x\n a.velocity.y = - velocity.y\n a.velocity.z = velocity.z\n a.acceleration.x = acceleration.x\n a.acceleration.y = - acceleration.y\n a.acceleration.z = acceleration.z\n a.dimension.length = actor.bounding_box.extent.x * 2\n a.dimension.width = actor.bounding_box.extent.y * 2\n a.dimension.height = actor.bounding_box.extent.z * 2\n a.orientation.pitch = - rotation.rotation.pitch*math.pi/180\n a.orientation.yaw = - rotation.rotation.yaw*math.pi/180\n a.orientation.roll = rotation.rotation.roll*math.pi/180\n return a\n\n\n\n\ndef get_classification(name):\n\n # UNKNOWN = 0 #OTHER = 1 #CAR = 2 #PEDESTRIAN = 3 #ANIMAL = 4\n # TRUCK = 5 #TRAILER = 6 #MOTORBIKE = 7 #BICYCLE = 8 #BUS = 9\n # TRAM = 10 #TRAIN = 11 #WHEELCHAIR = 12\n if (\"audi\" in name or \"bmw\" in name or \"chevrolet\" in name or \"citroen\" in name or \"dodge\" in name or \"jeep\" in name or \"lincoln\" in name or \"mercedes-benz\" in name or \"mini.cooperst\" in name or \"mustang\" in name or \"nissan\" in name or \"seat\" in name or \"tesla\" in name or \"toyota\" in name or \"volkswagen\"in name):\n classi = 2 #car\n elif name ==(\"vehicle.carlamotors.carlacola\"):\n classi = 5 #Truck\n elif (\"harley-davidson\" in name or \"kawasaki\" in name or \"yamaha\" in name):\n classi = 7 #Motorbike\n elif (\"crossbike\" in name or \"diamondback\" in name or \"gazelle\" in name):\n classi = 8 #Bicicle\n else:\n classi = 1 #Other\n return classi\n\nif __name__ == '__main__':\n try:\n CARLA_OSI_ROS()\n except rospy.ROSInterruptException:\n pass\n\n"
},
{
"alpha_fraction": 0.8027210831642151,
"alphanum_fraction": 0.8027210831642151,
"avg_line_length": 35.75,
"blob_id": "3c4442dbacf62c9aa49a75a3d20862e28a5fa4eb",
"content_id": "882ac2fd86249ae2aa39734a2809175a5b1137c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 294,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 8,
"path": "/build/vehicle_control/CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/_vehicle_control_generate_messages_check_deps_Trajectory.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6384527683258057,
"alphanum_fraction": 0.6433447003364563,
"avg_line_length": 56.20173645019531,
"blob_id": "572db176f438acd73a23338f611f72d4aebcab71",
"content_id": "fcfd2a1c902cc72984455f649fbac04ac93b07fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 26370,
"license_type": "no_license",
"max_line_length": 256,
"num_lines": 461,
"path": "/src/fusion/src/ClassExistance_Objects.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport rospy\nimport math\nimport message_filters\nfrom scipy.spatial import distance\nfrom scipy.stats import chi2\nfrom scipy.linalg import sqrtm\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\nimport sys\n# import function\n#from rotate import rotate\n#from scipy.spatial import distance\n\nclass GlobalObject:\n def __init__(self,obj):\n self.object_id = obj.obj_id\n #self.dimension_vector = copy.deepcopy(object.dimension_vector)\n self.existance_probability = obj.prop_existence\n self.persistance_probability = obj.prop_persistance\n #self.classification_vector = copy.deepcopy(object.classification_vector)\n #self.probability_object_moved = copy.deepcopy(object.probability_object_moved)\n #self.feature_vector = copy.deepcopy(object.feature_vector)\n\n self.mass_existance = None\n self.mass_nonexistance = None\n self.mass_uncertainity = None\n self.list_existance_mass_factor = None\n self.fused_probability_existance = None\n self.fused_probability_nonexistance = None\n self.global_predicted_mass_existance = None\n self.global_predicted_mass_nonexistance = None\n self.global_predicted_mass_uncertainity = None\n self.global_predicted_masslist = None\n\n self.mass_car = obj.classification_mass[0]\n self.mass_truck = obj.classification_mass[1]\n self.mass_motorcycle = obj.classification_mass[2]\n self.mass_bicycle = obj.classification_mass[3]\n self.mass_pedestrian = obj.classification_mass[4]\n self.mass_stationary = obj.classification_mass[5]\n self.mass_vehicle = obj.classification_mass[6]\n self.mass_vru = obj.classification_mass[7]\n self.mass_traffic = obj.classification_mass[8]\n self.mass_vehicle_stationary = obj.classification_mass[9]\n self.mass_vru_stationary = obj.classification_mass[10]\n self.mass_ignorance = obj.classification_mass[11]\n self.list_classification_mass_factor = obj.classification_mass\n\n def existance_mass_prediction(self , prediction_weight):\n \"\"\"\n Method to predict the existence mass factors for existence probability.\n\n :param prediction_weight: Defined in the fusion configuration.\n \"\"\"\n global_object_mass_existance = float(self.mass_existance)\n global_object_mass_nonexistance = float(self.mass_nonexistance)\n global_object_mass_uncertainity = float(self.mass_uncertainity)\n prediction_weight = float(prediction_weight)\n\n self.global_predicted_mass_existance = ((1 - prediction_weight) * global_object_mass_existance)\n self.global_predicted_mass_nonexistance = ((1 - prediction_weight) * global_object_mass_nonexistance)\n self.global_predicted_mass_uncertainity = ((global_object_mass_uncertainity) + (prediction_weight * (global_object_mass_existance + global_object_mass_nonexistance)))\n self.global_predicted_masslist = [self.global_predicted_mass_existance , self.global_predicted_mass_nonexistance , self.global_predicted_mass_uncertainity]\n\n\n def set_existance_probability_mass_factors(self, sensor_trust):\n\n \"\"\"\n Method to set the new existence probability mass factors after fusion.\n\n :param sensor:\n \"\"\"\n\n\n self.mass_existance = (self.persistance_probability * float(sensor_trust) * self.existance_probability)\n self.mass_nonexistance = (self.persistance_probability * float(sensor_trust) * (1 - self.existance_probability))\n self.mass_uncertainity = (1 - (float(self.mass_existance) + float(self.mass_nonexistance)))\n\n self.list_existance_mass_factor = [self.mass_existance, self.mass_nonexistance, self.mass_uncertainity]\n\n\n\n\nclass SensorObject:\n def __init__(self,obj,property):\n self.ids = obj.obj_id\n self.sensor_trust = property.trust_existance\n self.existance_probability = obj.prop_existence\n self.persistance_probability = obj.prop_persistance\n self.classification_vector = [obj.classification.car, obj.classification.truck, obj.classification.motorcycle, obj.classification.bicycle, obj.classification.pedestrian, obj.classification.stacionary, obj.classification.other]\n\n if (np.sqrt(np.square(obj.geometric.vx) + np.square(obj.geometric.vy)) > 0 and np.sqrt(np.square(obj.geometric.vx) + np.square(obj.geometric.vy)) < 1):\n\n self.probability_object_moved = 0.3\n elif np.sqrt(np.square(obj.geometric.vx) + np.square(obj.geometric.vy)) > 1:\n self.probabiliry_object_moved = 0.6\n else:\n self.probabiliry_object_moved = 0.01\n\n self.feature_vector = None\n self.sensor = property\n #self.sensor = None\n\n #self.state_vector_EGOFOR = None\n self.mass_existance = None\n self.mass_nonexistance = None\n self.mass_uncertainity = None\n self.list_existance_mass_factor = None\n\n self.mass_car = None\n self.mass_truck = None\n self.mass_motorcycle = None\n self.mass_bicycle = None\n self.mass_pedestrian = None\n self.mass_stationary = None\n self.mass_vehicle = None\n self.mass_vru = None\n self.mass_traffic = None\n self.mass_vehicle_stationary = None\n self.mass_vru_stationary = None\n self.mass_ignorance = None\n self.list_classification_mass_factor = None\n\n\n def set_existance_probability_mass_factors(self):\n \"\"\"\n Calculate the exitence probability mass factors for the sensor object using the existence vector.\n\n \"\"\"\n\n\n self.mass_existance = (float(self.persistance_probability) * float(self.sensor_trust) * float(\n self.existance_probability))\n self.mass_nonexistance = (float(self.persistance_probability) * float(self.sensor_trust) * float(\n (1 - self.existance_probability)))\n self.mass_uncertainity = (1 - (float(self.mass_existance) + float(self.mass_nonexistance)))\n\n self.list_existance_mass_factor = [self.mass_existance, self.mass_nonexistance, self.mass_uncertainity]\n\n def set_classification_mass_factors(self):\n \"\"\"\n Claculate the classification probability mass factors for the sensor object using the classification vector.\n \"\"\"\n object_probability_car = self.classification_vector[0]\n object_probability_truck = self.classification_vector[1]\n object_probability_motorcycle = self.classification_vector[2]\n object_probability_bicycle = self.classification_vector[3]\n object_probability_pedestrian = self.classification_vector[4]\n object_probability_stationary = self.classification_vector[5]\n object_probability_other = self.classification_vector[6]\n object_moved = 0.5#self.probability_object_moved #Need to determine\n\n self.mass_car = float(self.sensor.trust_car) * float(object_probability_car)\n self.mass_truck = float(self.sensor.trust_truck) * float(object_probability_truck)\n self.mass_motorcycle = float(self.sensor.trust_motorcycle) * float(object_probability_motorcycle)\n self.mass_bicycle = float(self.sensor.trust_bicycle) * float(object_probability_bicycle)\n self.mass_pedestrian = float(self.sensor.trust_pedestrian) * float(object_probability_pedestrian)\n self.mass_stationary = float(self.sensor.trust_stationary) * float(object_probability_stationary)\n\n mass_sum = float(self.mass_car) + float(self.mass_truck) + float(self.mass_motorcycle) + float(self.mass_bicycle) + float(self.mass_pedestrian)\n\n object_probability_vehicle = (float(self.mass_car) + float(self.mass_truck) + float(self.mass_motorcycle)) / (mass_sum)\n object_probability_vru = (float(self.mass_bicycle) + float(self.mass_pedestrian)) / (mass_sum)\n\n self.mass_vehicle = (float(object_moved)) * (float(object_probability_vehicle)) * (\n ((1 - float(self.sensor.trust_car)) * float(object_probability_car)) + (\n (1 - float(self.sensor.trust_truck)) * float(object_probability_truck)) + (\n (1 - float(self.sensor.trust_motorcycle)) * float(object_probability_motorcycle)))\n self.mass_vru = (float(object_moved)) * (float(object_probability_vru)) * (\n ((1 - float(self.sensor.trust_bicycle)) * float(object_probability_bicycle)) + (\n (1 - float(self.sensor.trust_pedestrian)) * float(object_probability_pedestrian)))\n self.mass_traffic = ((float(object_moved)) * (float(object_probability_vru)) * (\n ((1 - float(self.sensor.trust_car)) * float(object_probability_car)) + (\n (1 - float(self.sensor.trust_truck)) * float(object_probability_truck)) + (\n (1 - float(self.sensor.trust_motorcycle)) * float(object_probability_motorcycle)))) + (\n (float(object_moved)) * (float(object_probability_vehicle)) * (\n ((1 - float(self.sensor.trust_bicycle)) * float(object_probability_bicycle)) + (\n (1 - float(self.sensor.trust_pedestrian)) * float(object_probability_pedestrian))))\n\n self.mass_vehicle_stationary = (1 - float(object_moved)) * (float(object_probability_vehicle)) * (\n ((1 - float(self.sensor.trust_car)) * float(object_probability_car)) + (\n (1 - float(self.sensor.trust_truck)) * float(object_probability_truck)) + (\n (1 - float(self.sensor.trust_motorcycle)) * float(object_probability_motorcycle)))\n self.mass_vru_stationary = (1 - float(object_moved)) * (float(object_probability_vru)) * (\n ((1 - float(self.sensor.trust_bicycle)) * float(object_probability_bicycle)) + (\n (1 - float(self.sensor.trust_pedestrian)) * float(object_probability_pedestrian)))\n\n self.mass_ignorance = (1 - (\n self.mass_car + self.mass_truck + self.mass_motorcycle + self.mass_bicycle + self.mass_pedestrian + self.mass_stationary + self.mass_vehicle + self.mass_vru + self.mass_traffic + self.mass_vehicle_stationary + self.mass_vru_stationary))\n\n self.list_classification_mass_factor = [self.mass_car, self.mass_truck, self.mass_motorcycle, self.mass_bicycle,\n self.mass_pedestrian, self.mass_stationary, self.mass_vehicle,\n self.mass_vru, self.mass_traffic, self.mass_vehicle_stationary,\n self.mass_vru_stationary, self.mass_ignorance]\n\n\n\nclass ExistanceFusion:\n mass_factor_combination = [['e', 'n', 'e'],\n ['n', 'ne', 'ne'],\n ['e', 'ne', 'i']]\n\n\n def __init__(self,sensor_object,global_object):\n self.sensor_object = sensor_object # need to determine\n self.global_object = global_object # need to determine\n\n\n #self.time_difference = global_object.current_time - global_object.previous_time\n\n self.fused_mass_factor_existance = None\n self.fused_mass_factor_nonexistance = None\n self.fused_mass_factor_uncertainity = None\n\n self.fused_probability_existance = None\n self.fused_probability_nonexistance = None\n\n def existance_fusion_object_assosiated(self):\n \"\"\"\n Method to perfrom the existence fusion between the sensor object and the global object if the objects are associated.\n\n :return: Assigns the class object with the fusion results.\n \"\"\"\n sensor_existance_mass_factors = (self.sensor_object.list_existance_mass_factor) # need to determine\n global_existance_mass_factors = (self.global_object.global_predicted_masslist) # need to determine\n #print('inside fusion', sensor_existance_mass_factors, global_existance_mass_factors)\n #a = sensor_existance_mass_factors\n #b = global_existance_mass_factors\n #c = ExistanceFusion.mass_factor_combination\n\n self.fused_mass_factor_existance, self.fused_mass_factor_nonexistance, self.fused_mass_factor_uncertainity = self.get_fused_mass_factors()\n print(\"out of fused mass and in exist fuse associated\")\n self.fused_probability_existance = ((self.fused_mass_factor_existance) + ((1 / 2) * (self.fused_mass_factor_uncertainity)))\n self.fused_probability_nonexistance = (1 - (self.fused_probability_existance))\n\n #def get_fused_mass_factors(sensor_existance_mass_factors, global_existance_mass_factors , mass_factor_combination):\n def get_fused_mass_factors(self):\n \"\"\"\n Method to perfrom the fused mass factors.\n\n :param sensor_existance_mass_factors:\n :param global_existance_mass_factors:\n :param mass_factor_combination:\n\n :return: fused_mass_factor_existance, fused_mass_factor_nonexistance, fused_mass_factor_uncertainity\n \"\"\"\n sensor_existance_mass_factors = (self.sensor_object.list_existance_mass_factor) # need to determine\n global_existance_mass_factors = (self.global_object.global_predicted_masslist) # need to determine\n # print('inside fusion', sensor_existance_mass_factors, global_existance_mass_factors)\n mass_factor_combination = ExistanceFusion.mass_factor_combination\n sum_intersection_existance = 0\n sum_intersection_nonexistance = 0\n sum_intersection_uncertainity = 0\n sum_intersection_null = 0\n\n for row in range(len(sensor_existance_mass_factors)):\n for column in range(len(global_existance_mass_factors)):\n if mass_factor_combination[row][column] == 'e':\n sum_intersection_existance = sum_intersection_existance + (\n sensor_existance_mass_factors[row] * global_existance_mass_factors[column])\n\n elif mass_factor_combination[row][column] == 'n':\n sum_intersection_null = sum_intersection_null + (\n sensor_existance_mass_factors[row] * global_existance_mass_factors[column])\n\n elif mass_factor_combination[row][column] == 'ne':\n sum_intersection_nonexistance = sum_intersection_nonexistance + (\n sensor_existance_mass_factors[row] * global_existance_mass_factors[column])\n\n elif mass_factor_combination[row][column] == 'i':\n sum_intersection_uncertainity = sum_intersection_uncertainity + (\n sensor_existance_mass_factors[row] * global_existance_mass_factors[column])\n\n print('inside DST combination')\n #print('mass factor combination', mass_factor_combination)\n #print('sum of elements', sum_intersection_existance, sum_intersection_nonexistance,sum_intersection_null,sum_intersection_uncertainity)\n fused_mass_factor_existance = ((sum_intersection_existance) / (1 - (sum_intersection_null)))\n fused_mass_factor_nonexistance = ((sum_intersection_nonexistance) / (1 - (sum_intersection_null)))\n fused_mass_factor_uncertainity = ((sum_intersection_uncertainity) / (1 - (sum_intersection_null)))\n\n return (fused_mass_factor_existance, fused_mass_factor_nonexistance, fused_mass_factor_uncertainity)\n\n#def assign_Sensor_obj(obj,sensor_trust_probability):\n# sensor_obj = SensorObject()\n# sensor_obj.id = obj.obj_id\n# sensor_obj.existance_probability = obj.prop_existence\n# sensor_obj.persistance_probability =obj.prop_persistance\n# sensor_obj.sensor_trust = sensor_trust_probability\n\n# return sensor_obj\n\n#def assign_Global_obj(obj):\n# global_obj = GlobalObject()\n# global_obj.global_object_id = obj.obj_id\n# global_obj.existance_probability = obj.prop_existence\n# global_obj.persistance_probability = obj.prop_persistance\n\n\n# return global_obj\nclass ClassificationFusion:\n \"\"\"\n Class to perform classification fusion between the associated objects.\n\n Class Attributes:\n mass_factor_combination\n\n Class Methods:\n __init__\n classification_fusion\n \"\"\"\n\n mass_factor_combination = [\n ['car', 'null', 'null', 'null', 'null', 'null', 'car', 'null', 'car', 'car', 'null', 'car'],\n ['null', 'truck', 'null', 'null', 'null', 'null', 'truck', 'null', 'truck', 'truck', 'null', 'truck'],\n ['null', 'null', 'moto', 'null', 'null', 'null', 'moto', 'null', 'moto', 'moto', 'null', 'moto'],\n ['null', 'null', 'null', 'bicycle', 'null', 'null', 'null', 'bicycle', 'bicycle', 'null', 'bicycle', 'bicycle'],\n ['null', 'null', 'null', 'null', 'ped', 'null', 'null', 'ped', 'ped', 'null', 'ped', 'ped'],\n ['null', 'null', 'null', 'null', 'null', 'stat', 'null', 'null', 'null', 'stat', 'stat', 'stat'],\n ['car', 'truck', 'moto', 'null', 'null', 'null', 'veh', 'null', 'veh', 'veh', 'null', 'veh'],\n ['null', 'null', 'null', 'bicycle', 'ped', 'null', 'null', 'vru', 'vru', 'null', 'vru', 'vru'],\n ['car', 'truck', 'moto', 'bicycle', 'ped', 'null', 'veh', 'vru', 'traffic', 'veh', 'vru', 'traffic'],\n ['car', 'truck', 'moto', 'null', 'null', 'stat', 'veh', 'null', 'veh', 'vehstat', 'stat', 'vehstat'],\n ['null', 'null', 'null', 'bicycle', 'ped', 'stat', 'null', 'vru', 'vru', 'stat', 'vrustat', 'vrustat'],\n ['car', 'truck', 'moto', 'bicycle', 'ped', 'stat', 'veh', 'vru', 'traffic', 'vehstat', 'vrustat', 'all']]\n\n def __init__(self, sensor_object, global_object):\n \"\"\"\n Class object constructor\n\n :param sensor_object: Sensor objects involved in the classification fusion\n :param global_object: Global object involved in the classification fusion.\n \"\"\"\n self.sensor_object = sensor_object\n self.global_object = global_object\n\n self.fused_mass_factor_car = None\n self.fused_mass_factor_truck = None\n self.fused_mass_factor_motorcycle = None\n self.fused_mass_factor_bicycle = None\n self.fused_mass_factor_pedestrian = None\n self.fused_mass_factor_stationary = None\n self.fused_mass_factor_vehicle = None\n self.fused_mass_factor_vru = None\n self. fused_mass_factor_traffic = None\n self. fused_mass_factor_statvehicle = None\n self.fused_mass_factor_statvru = None\n self. fused_mass_factor_ignorance = None\n\n self.fused_probability_car = None\n self.fused_probability_truck = None\n self.fused_probability_motorcycle = None\n self.fused_probability_bicycle = None\n self.fused_probability_pedestrian = None\n self.fused_probability_stationary = None\n self.fused_probability_other = None\n\n\n\n def classification_fusion(self):\n \"\"\"\n Method to perfrom the classification fusion.\n Updates the clssification fusion results in the class object.\n\n \"\"\"\n sensor_classification_mass_factors = (self.sensor_object.list_classification_mass_factor)\n global_classification_mass_factors = (self.global_object.list_classification_mass_factor)\n\n sum_intersection_car = 0\n sum_intersection_truck = 0\n sum_intersection_motorcycle = 0\n sum_intersection_bicycle = 0\n sum_intersection_pedestrian = 0\n sum_intersection_stationary = 0\n sum_intersection_vehicle = 0\n sum_intersection_vru = 0\n sum_intersection_traffic = 0\n sum_intersection_statvehicle = 0\n sum_intersection_statvru = 0\n sum_intersection_null = 0\n sum_intersection_all = 0\n for row in range(len(sensor_classification_mass_factors)):\n for column in range(len(global_classification_mass_factors)):\n if ClassificationFusion.mass_factor_combination[row][column] == 'car':\n sum_intersection_car = sum_intersection_car + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'truck':\n sum_intersection_truck = sum_intersection_truck + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'moto':\n sum_intersection_motorcycle = sum_intersection_motorcycle + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'bicycle':\n sum_intersection_bicycle = sum_intersection_bicycle + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'ped':\n sum_intersection_pedestrian = sum_intersection_pedestrian + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'stat':\n sum_intersection_stationary = sum_intersection_stationary + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'veh':\n sum_intersection_vehicle = sum_intersection_vehicle + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'vru':\n sum_intersection_vru = sum_intersection_vru + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'traffic':\n sum_intersection_traffic = sum_intersection_traffic + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'vehstat':\n sum_intersection_statvehicle = sum_intersection_statvehicle + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'vrustat':\n sum_intersection_statvru = sum_intersection_statvru + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'all':\n sum_intersection_all = sum_intersection_all + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n elif ClassificationFusion.mass_factor_combination[row][column] == 'null':\n sum_intersection_null = sum_intersection_null + (\n sensor_classification_mass_factors[row] * global_classification_mass_factors[column])\n\n self.fused_mass_factor_car = (sum_intersection_car) / (1 - sum_intersection_null)\n self.fused_mass_factor_truck = (sum_intersection_truck) / (1 - sum_intersection_null)\n self.fused_mass_factor_motorcycle = (sum_intersection_motorcycle) / (1 - sum_intersection_null)\n self.fused_mass_factor_bicycle = (sum_intersection_bicycle) / (1 - sum_intersection_null)\n self.fused_mass_factor_pedestrian = (sum_intersection_pedestrian) / (1 - sum_intersection_null)\n self.fused_mass_factor_stationary = (sum_intersection_stationary) / (1 - sum_intersection_null)\n self.fused_mass_factor_vehicle = (sum_intersection_vehicle) / (1 - sum_intersection_null)\n self.fused_mass_factor_vru = (sum_intersection_vru) / (1 - sum_intersection_null)\n self.fused_mass_factor_traffic = (sum_intersection_traffic) / (1 - sum_intersection_null)\n self.fused_mass_factor_statvehicle = (sum_intersection_statvehicle) / (1 - sum_intersection_null)\n self.fused_mass_factor_statvru = (sum_intersection_statvru) / (1 - sum_intersection_null)\n self.fused_mass_factor_ignorance = (sum_intersection_all) / (1 - sum_intersection_null)\n\n self.fused_probability_car = (self.fused_mass_factor_car) + ((1 / 3) * self.fused_mass_factor_vehicle) + (\n (1 / 5) * self.fused_mass_factor_traffic) + ((1 / 4) * self.fused_mass_factor_statvehicle)\n self.fused_probability_truck = (self.fused_mass_factor_truck) + ((1 / 3) * self.fused_mass_factor_vehicle) + (\n (1 / 5) * self.fused_mass_factor_traffic) + ((1 / 4) * self.fused_mass_factor_statvehicle)\n self.fused_probability_motorcycle = (self.fused_mass_factor_motorcycle) + ((1 / 3) * self.fused_mass_factor_vehicle) + (\n (1 / 5) * self.fused_mass_factor_traffic) + ((1 / 4) * self.fused_mass_factor_statvehicle)\n self.fused_probability_bicycle = (self.fused_mass_factor_bicycle) + ((1 / 2) * self.fused_mass_factor_vru) + (\n (1 / 5) * self.fused_mass_factor_traffic) + ((1 / 3) * self.fused_mass_factor_statvru)\n self.fused_probability_pedestrian = (self.fused_mass_factor_pedestrian) + ((1 / 2) * self.fused_mass_factor_vru) + (\n (1 / 5) * self.fused_mass_factor_traffic) + ((1 / 3) * self.fused_mass_factor_statvru)\n self.fused_probability_stationary = (self.fused_mass_factor_stationary) + (\n (1 / 3) * self.fused_mass_factor_statvehicle) + ((1 / 3) * self.fused_mass_factor_statvru)\n self.fused_probability_other = 1 - (self.fused_probability_car + self.fused_probability_truck + self.fused_probability_motorcycle + self.fused_probability_bicycle + self.fused_probability_pedestrian + self.fused_probability_stationary)\n"
},
{
"alpha_fraction": 0.7986111044883728,
"alphanum_fraction": 0.7986111044883728,
"avg_line_length": 35,
"blob_id": "aba056a6c19189335f0274a0a639f770398ce24e",
"content_id": "738a99f5b49e2d6381b8616cfc202681767442b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 288,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 8,
"path": "/build/object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7946428656578064,
"alphanum_fraction": 0.8061224222183228,
"avg_line_length": 59.30769348144531,
"blob_id": "22b7f540dbc784c99ec8fae4830786796f6a5488",
"content_id": "6d3546a9b0c048e0242c360492cdc3edd7ce9303",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 784,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 13,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_generate_messages_nodejs\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/osi3_bridge/msg/GroundTruthMovingObjects.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/osi3_bridge/msg/MovingObject.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/osi3_bridge/msg/Dimension3d.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/osi3_bridge/msg/TrafficUpdateMovingObject.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/osi3_bridge/msg/Orientation3d.js\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/osi3_bridge_generate_messages_nodejs.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7598726153373718,
"alphanum_fraction": 0.7598726153373718,
"avg_line_length": 40.31578826904297,
"blob_id": "0575481531bd1fee88f11d5fdb4c50c2dbf89e2c",
"content_id": "885f69960779f293d625719ba46b99fe9f97ac7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 3140,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 76,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"osi_version.pb.cc\"\n \"osi_version.pb.h\"\n \"osi_common.pb.cc\"\n \"osi_common.pb.h\"\n \"osi_datarecording.pb.cc\"\n \"osi_datarecording.pb.h\"\n \"osi_detectedtrafficsign.pb.cc\"\n \"osi_detectedtrafficsign.pb.h\"\n \"osi_detectedtrafficlight.pb.cc\"\n \"osi_detectedtrafficlight.pb.h\"\n \"osi_detectedroadmarking.pb.cc\"\n \"osi_detectedroadmarking.pb.h\"\n \"osi_detectedlane.pb.cc\"\n \"osi_detectedlane.pb.h\"\n \"osi_detectedobject.pb.cc\"\n \"osi_detectedobject.pb.h\"\n \"osi_detectedoccupant.pb.cc\"\n \"osi_detectedoccupant.pb.h\"\n \"osi_environment.pb.cc\"\n \"osi_environment.pb.h\"\n \"osi_groundtruth.pb.cc\"\n \"osi_groundtruth.pb.h\"\n \"osi_hostvehicledata.pb.cc\"\n \"osi_hostvehicledata.pb.h\"\n \"osi_trafficsign.pb.cc\"\n \"osi_trafficsign.pb.h\"\n \"osi_trafficlight.pb.cc\"\n \"osi_trafficlight.pb.h\"\n \"osi_roadmarking.pb.cc\"\n \"osi_roadmarking.pb.h\"\n \"osi_lane.pb.cc\"\n \"osi_lane.pb.h\"\n \"osi_featuredata.pb.cc\"\n \"osi_featuredata.pb.h\"\n \"osi_object.pb.cc\"\n \"osi_object.pb.h\"\n \"osi_occupant.pb.cc\"\n \"osi_occupant.pb.h\"\n \"osi_sensordata.pb.cc\"\n \"osi_sensordata.pb.h\"\n \"osi_sensorviewconfiguration.pb.cc\"\n \"osi_sensorviewconfiguration.pb.h\"\n \"osi_sensorspecific.pb.cc\"\n \"osi_sensorspecific.pb.h\"\n \"osi_sensorview.pb.cc\"\n \"osi_sensorview.pb.h\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_version.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_common.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_datarecording.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficsign.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficlight.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedroadmarking.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedlane.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedobject.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_detectedoccupant.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_environment.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_groundtruth.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_hostvehicledata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_trafficsign.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_trafficlight.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_roadmarking.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_lane.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_featuredata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_object.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_occupant.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_sensordata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_sensorviewconfiguration.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_sensorspecific.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_obj.dir/osi_sensorview.pb.cc.o\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang CXX)\n include(CMakeFiles/open_simulation_interface_obj.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.749187171459198,
"alphanum_fraction": 0.7566186785697937,
"avg_line_length": 54.20512771606445,
"blob_id": "fc80d109b1d33690ccf89cc575f38912dc632ca4",
"content_id": "9226ec218baba17fffe42d7a6dc3d2b79b2fd11c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 2153,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 39,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/Export/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets-release.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#----------------------------------------------------------------\n# Generated CMake target import file for configuration \"Release\".\n#----------------------------------------------------------------\n\n# Commands may need to know the format version.\nset(CMAKE_IMPORT_FILE_VERSION 1)\n\n# Import target \"open_simulation_interface_static\" for configuration \"Release\"\nset_property(TARGET open_simulation_interface_static APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)\nset_target_properties(open_simulation_interface_static PROPERTIES\n IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE \"CXX\"\n IMPORTED_LOCATION_RELEASE \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface_static.a\"\n )\n\nlist(APPEND _IMPORT_CHECK_TARGETS open_simulation_interface_static )\nlist(APPEND _IMPORT_CHECK_FILES_FOR_open_simulation_interface_static \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface_static.a\" )\n\n# Import target \"open_simulation_interface_pic\" for configuration \"Release\"\nset_property(TARGET open_simulation_interface_pic APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)\nset_target_properties(open_simulation_interface_pic PROPERTIES\n IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE \"CXX\"\n IMPORTED_LOCATION_RELEASE \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface_pic.a\"\n )\n\nlist(APPEND _IMPORT_CHECK_TARGETS open_simulation_interface_pic )\nlist(APPEND _IMPORT_CHECK_FILES_FOR_open_simulation_interface_pic \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface_pic.a\" )\n\n# Import target \"open_simulation_interface\" for configuration \"Release\"\nset_property(TARGET open_simulation_interface APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)\nset_target_properties(open_simulation_interface PROPERTIES\n IMPORTED_LOCATION_RELEASE \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface.so.3.0.1\"\n IMPORTED_SONAME_RELEASE \"libopen_simulation_interface.so.3.0.1\"\n )\n\nlist(APPEND _IMPORT_CHECK_TARGETS open_simulation_interface )\nlist(APPEND _IMPORT_CHECK_FILES_FOR_open_simulation_interface \"${_IMPORT_PREFIX}/lib/osi3/libopen_simulation_interface.so.3.0.1\" )\n\n# Commands beyond this point should not need to know the version.\nset(CMAKE_IMPORT_FILE_VERSION)\n"
},
{
"alpha_fraction": 0.688654363155365,
"alphanum_fraction": 0.7037094235420227,
"avg_line_length": 24.266666412353516,
"blob_id": "51dea47191a0efdb34eebde456c5a1d615edd8f4",
"content_id": "998b6eaa0cfbea07265013d5b9a714f216e10fe7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6443,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 255,
"path": "/devel/include/object_list/Classification.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/Classification.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_CLASSIFICATION_H\n#define OBJECT_LIST_MESSAGE_CLASSIFICATION_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct Classification_\n{\n typedef Classification_<ContainerAllocator> Type;\n\n Classification_()\n : car(0.0)\n , truck(0.0)\n , motorcycle(0.0)\n , bicycle(0.0)\n , pedestrian(0.0)\n , stacionary(0.0)\n , other(0.0) {\n }\n Classification_(const ContainerAllocator& _alloc)\n : car(0.0)\n , truck(0.0)\n , motorcycle(0.0)\n , bicycle(0.0)\n , pedestrian(0.0)\n , stacionary(0.0)\n , other(0.0) {\n (void)_alloc;\n }\n\n\n\n typedef float _car_type;\n _car_type car;\n\n typedef float _truck_type;\n _truck_type truck;\n\n typedef float _motorcycle_type;\n _motorcycle_type motorcycle;\n\n typedef float _bicycle_type;\n _bicycle_type bicycle;\n\n typedef float _pedestrian_type;\n _pedestrian_type pedestrian;\n\n typedef float _stacionary_type;\n _stacionary_type stacionary;\n\n typedef float _other_type;\n _other_type other;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::Classification_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::Classification_<ContainerAllocator> const> ConstPtr;\n\n}; // struct Classification_\n\ntypedef ::object_list::Classification_<std::allocator<void> > Classification;\n\ntypedef boost::shared_ptr< ::object_list::Classification > ClassificationPtr;\ntypedef boost::shared_ptr< ::object_list::Classification const> ClassificationConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::Classification_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::Classification_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::Classification_<ContainerAllocator1> & lhs, const ::object_list::Classification_<ContainerAllocator2> & rhs)\n{\n return lhs.car == rhs.car &&\n lhs.truck == rhs.truck &&\n lhs.motorcycle == rhs.motorcycle &&\n lhs.bicycle == rhs.bicycle &&\n lhs.pedestrian == rhs.pedestrian &&\n lhs.stacionary == rhs.stacionary &&\n lhs.other == rhs.other;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::Classification_<ContainerAllocator1> & lhs, const ::object_list::Classification_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Classification_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Classification_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Classification_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Classification_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Classification_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Classification_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::Classification_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"37b53ddc70d71a526ada035ab3f28e33\";\n }\n\n static const char* value(const ::object_list::Classification_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0x37b53ddc70d71a52ULL;\n static const uint64_t static_value2 = 0x6ada035ab3f28e33ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::Classification_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/Classification\";\n }\n\n static const char* value(const ::object_list::Classification_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::Classification_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"float32 car\\n\"\n\"float32 truck\\n\"\n\"float32 motorcycle\\n\"\n\"float32 bicycle\\n\"\n\"float32 pedestrian\\n\"\n\"float32 stacionary\\n\"\n\"float32 other\\n\"\n;\n }\n\n static const char* value(const ::object_list::Classification_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::Classification_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.car);\n stream.next(m.truck);\n stream.next(m.motorcycle);\n stream.next(m.bicycle);\n stream.next(m.pedestrian);\n stream.next(m.stacionary);\n stream.next(m.other);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct Classification_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::Classification_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::Classification_<ContainerAllocator>& v)\n {\n s << indent << \"car: \";\n Printer<float>::stream(s, indent + \" \", v.car);\n s << indent << \"truck: \";\n Printer<float>::stream(s, indent + \" \", v.truck);\n s << indent << \"motorcycle: \";\n Printer<float>::stream(s, indent + \" \", v.motorcycle);\n s << indent << \"bicycle: \";\n Printer<float>::stream(s, indent + \" \", v.bicycle);\n s << indent << \"pedestrian: \";\n Printer<float>::stream(s, indent + \" \", v.pedestrian);\n s << indent << \"stacionary: \";\n Printer<float>::stream(s, indent + \" \", v.stacionary);\n s << indent << \"other: \";\n Printer<float>::stream(s, indent + \" \", v.other);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_CLASSIFICATION_H\n"
},
{
"alpha_fraction": 0.7184750437736511,
"alphanum_fraction": 0.7243402004241943,
"avg_line_length": 41.6875,
"blob_id": "08c5f64485b94cdea337880f9f7f154a6bcdd8f6",
"content_id": "397fa2dd2ce4d7b8de1d3ad9e3a9d2879557c868",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 682,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 16,
"path": "/build/aeb/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"aeb\")\nset(aeb_VERSION \"0.0.0\")\nset(aeb_MAINTAINER \"drechsler <[email protected]>\")\nset(aeb_PACKAGE_FORMAT \"2\")\nset(aeb_BUILD_DEPENDS \"object_list\" \"roscpp\" \"rospy\" \"vehicle_control\" \"std_msgs\")\nset(aeb_BUILD_EXPORT_DEPENDS \"vehicle_control\" \"object_list\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(aeb_BUILDTOOL_DEPENDS \"catkin\")\nset(aeb_BUILDTOOL_EXPORT_DEPENDS )\nset(aeb_EXEC_DEPENDS \"vehicle_control\" \"object_list\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(aeb_RUN_DEPENDS \"vehicle_control\" \"object_list\" \"roscpp\" \"rospy\" \"std_msgs\")\nset(aeb_TEST_DEPENDS )\nset(aeb_DOC_DEPENDS )\nset(aeb_URL_WEBSITE \"\")\nset(aeb_URL_BUGTRACKER \"\")\nset(aeb_URL_REPOSITORY \"\")\nset(aeb_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.8027397394180298,
"alphanum_fraction": 0.8027397394180298,
"avg_line_length": 39.55555725097656,
"blob_id": "12ea611da9371c3fe26384f1eac849d39eda37f3",
"content_id": "a776dd65b04fe41fa7d7e1897041d3dd5bd989ac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 365,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 9,
"path": "/build/vehicle_control/CMakeFiles/vehicle_control_generate_messages_nodejs.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/vehicle_control_generate_messages_nodejs\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/vehicle_control/msg/Trajectory.js\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/vehicle_control_generate_messages_nodejs.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.77105313539505,
"alphanum_fraction": 0.7724681496620178,
"avg_line_length": 47.027183532714844,
"blob_id": "a815620890470b1bff3e2dcc13ce45b9ab938a4d",
"content_id": "3b3f27deaf7f8b32e3300ee5a945e0bac99da002",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 24735,
"license_type": "no_license",
"max_line_length": 247,
"num_lines": 515,
"path": "/build/object_list/Makefile",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMAKE generated file: DO NOT EDIT!\n# Generated by \"Unix Makefiles\" Generator, CMake Version 3.10\n\n# Default target executed when no arguments are given to make.\ndefault_target: all\n\n.PHONY : default_target\n\n# Allow only one \"make -f Makefile2\" at a time, but pass parallelism.\n.NOTPARALLEL:\n\n\n#=============================================================================\n# Special targets provided by cmake.\n\n# Disable implicit rules so canonical targets will work.\n.SUFFIXES:\n\n\n# Remove some rules from gmake that .SUFFIXES does not remove.\nSUFFIXES =\n\n.SUFFIXES: .hpux_make_needs_suffix_list\n\n\n# Suppress display of executed commands.\n$(VERBOSE).SILENT:\n\n\n# A target that is always out of date.\ncmake_force:\n\n.PHONY : cmake_force\n\n#=============================================================================\n# Set environment variables for the build.\n\n# The shell in which to execute make rules.\nSHELL = /bin/sh\n\n# The CMake executable.\nCMAKE_COMMAND = /usr/bin/cmake\n\n# The command to remove a file.\nRM = /usr/bin/cmake -E remove -f\n\n# Escaping for special characters.\nEQUALS = =\n\n# The top-level source directory on which CMake was run.\nCMAKE_SOURCE_DIR = /home/student/Desktop/Redge_Thesis/vil/src\n\n# The top-level build directory on which CMake was run.\nCMAKE_BINARY_DIR = /home/student/Desktop/Redge_Thesis/vil/build\n\n#=============================================================================\n# Targets provided globally by CMake.\n\n# Special rule for the target install/strip\ninstall/strip: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip\n\n# Special rule for the target install/strip\ninstall/strip/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip/fast\n\n# Special rule for the target install/local\ninstall/local: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local\n\n# Special rule for the target install/local\ninstall/local/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local/fast\n\n# Special rule for the target install\ninstall: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install\n\n# Special rule for the target install\ninstall/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install/fast\n\n# Special rule for the target list_install_components\nlist_install_components:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Available install components are: \\\"Unspecified\\\" \\\"dev\\\" \\\"lib\\\"\"\n.PHONY : list_install_components\n\n# Special rule for the target list_install_components\nlist_install_components/fast: list_install_components\n\n.PHONY : list_install_components/fast\n\n# Special rule for the target edit_cache\nedit_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"No interactive CMake dialog available...\"\n\t/usr/bin/cmake -E echo No\\ interactive\\ CMake\\ dialog\\ available.\n.PHONY : edit_cache\n\n# Special rule for the target edit_cache\nedit_cache/fast: edit_cache\n\n.PHONY : edit_cache/fast\n\n# Special rule for the target test\ntest:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running tests...\"\n\t/usr/bin/ctest --force-new-ctest-process $(ARGS)\n.PHONY : test\n\n# Special rule for the target test\ntest/fast: test\n\n.PHONY : test/fast\n\n# Special rule for the target rebuild_cache\nrebuild_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running CMake to regenerate build system...\"\n\t/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)\n.PHONY : rebuild_cache\n\n# Special rule for the target rebuild_cache\nrebuild_cache/fast: rebuild_cache\n\n.PHONY : rebuild_cache/fast\n\n# The main all target\nall: cmake_check_build_system\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles /home/student/Desktop/Redge_Thesis/vil/build/object_list/CMakeFiles/progress.marks\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/all\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles 0\n.PHONY : all\n\n# The main clean target\nclean:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/clean\n.PHONY : clean\n\n# The main clean target\nclean/fast: clean\n\n.PHONY : clean/fast\n\n# Prepare targets for installation.\npreinstall: all\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/preinstall\n.PHONY : preinstall\n\n# Prepare targets for installation.\npreinstall/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/preinstall\n.PHONY : preinstall/fast\n\n# clear depends\ndepend:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1\n.PHONY : depend\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages_py.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages_py.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages_py.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages_py: object_list/CMakeFiles/object_list_generate_messages_py.dir/rule\n\n.PHONY : object_list_generate_messages_py\n\n# fast build rule for target.\nobject_list_generate_messages_py/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_py.dir/build.make object_list/CMakeFiles/object_list_generate_messages_py.dir/build\n.PHONY : object_list_generate_messages_py/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_ObjectList: object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_ObjectList\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_ObjectList/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectList.dir/build\n.PHONY : _object_list_generate_messages_check_deps_ObjectList/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_SensorProperty: object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_SensorProperty\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_SensorProperty/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_SensorProperty.dir/build\n.PHONY : _object_list_generate_messages_check_deps_SensorProperty/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_ObjectsList: object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_ObjectsList\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_ObjectsList/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_ObjectsList.dir/build\n.PHONY : _object_list_generate_messages_check_deps_ObjectsList/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_Features: object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_Features\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Features/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Features.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Features/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages: object_list/CMakeFiles/object_list_generate_messages.dir/rule\n\n.PHONY : object_list_generate_messages\n\n# fast build rule for target.\nobject_list_generate_messages/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages.dir/build.make object_list/CMakeFiles/object_list_generate_messages.dir/build\n.PHONY : object_list_generate_messages/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_Dimension: object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_Dimension\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Dimension/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Dimension/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_genpy.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_genpy.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_genpy.dir/rule\n\n# Convenience name for target.\nobject_list_genpy: object_list/CMakeFiles/object_list_genpy.dir/rule\n\n.PHONY : object_list_genpy\n\n# fast build rule for target.\nobject_list_genpy/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_genpy.dir/build.make object_list/CMakeFiles/object_list_genpy.dir/build\n.PHONY : object_list_genpy/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_Geometric: object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_Geometric\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Geometric/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Geometric.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Geometric/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_EgoData: object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_EgoData\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_EgoData/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_EgoData.dir/build\n.PHONY : _object_list_generate_messages_check_deps_EgoData/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages_cpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages_cpp.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages_cpp.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages_cpp: object_list/CMakeFiles/object_list_generate_messages_cpp.dir/rule\n\n.PHONY : object_list_generate_messages_cpp\n\n# fast build rule for target.\nobject_list_generate_messages_cpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_cpp.dir/build.make object_list/CMakeFiles/object_list_generate_messages_cpp.dir/build\n.PHONY : object_list_generate_messages_cpp/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_catkin_empty_exported_target.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_catkin_empty_exported_target.dir/rule\n.PHONY : object_list/CMakeFiles/_catkin_empty_exported_target.dir/rule\n\n# Convenience name for target.\n_catkin_empty_exported_target: object_list/CMakeFiles/_catkin_empty_exported_target.dir/rule\n\n.PHONY : _catkin_empty_exported_target\n\n# fast build rule for target.\n_catkin_empty_exported_target/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_catkin_empty_exported_target.dir/build.make object_list/CMakeFiles/_catkin_empty_exported_target.dir/build\n.PHONY : _catkin_empty_exported_target/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages_lisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages_lisp.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages_lisp.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages_lisp: object_list/CMakeFiles/object_list_generate_messages_lisp.dir/rule\n\n.PHONY : object_list_generate_messages_lisp\n\n# fast build rule for target.\nobject_list_generate_messages_lisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_lisp.dir/build.make object_list/CMakeFiles/object_list_generate_messages_lisp.dir/build\n.PHONY : object_list_generate_messages_lisp/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages_nodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages_nodejs: object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/rule\n\n.PHONY : object_list_generate_messages_nodejs\n\n# fast build rule for target.\nobject_list_generate_messages_nodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/build.make object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/build\n.PHONY : object_list_generate_messages_nodejs/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_gencpp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_gencpp.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_gencpp.dir/rule\n\n# Convenience name for target.\nobject_list_gencpp: object_list/CMakeFiles/object_list_gencpp.dir/rule\n\n.PHONY : object_list_gencpp\n\n# fast build rule for target.\nobject_list_gencpp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_gencpp.dir/build.make object_list/CMakeFiles/object_list_gencpp.dir/build\n.PHONY : object_list_gencpp/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/rule\n.PHONY : object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/rule\n\n# Convenience name for target.\n_object_list_generate_messages_check_deps_Classification: object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/rule\n\n.PHONY : _object_list_generate_messages_check_deps_Classification\n\n# fast build rule for target.\n_object_list_generate_messages_check_deps_Classification/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/build.make object_list/CMakeFiles/_object_list_generate_messages_check_deps_Classification.dir/build\n.PHONY : _object_list_generate_messages_check_deps_Classification/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_generate_messages_eus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_generate_messages_eus.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_generate_messages_eus.dir/rule\n\n# Convenience name for target.\nobject_list_generate_messages_eus: object_list/CMakeFiles/object_list_generate_messages_eus.dir/rule\n\n.PHONY : object_list_generate_messages_eus\n\n# fast build rule for target.\nobject_list_generate_messages_eus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_generate_messages_eus.dir/build.make object_list/CMakeFiles/object_list_generate_messages_eus.dir/build\n.PHONY : object_list_generate_messages_eus/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_gennodejs.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_gennodejs.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_gennodejs.dir/rule\n\n# Convenience name for target.\nobject_list_gennodejs: object_list/CMakeFiles/object_list_gennodejs.dir/rule\n\n.PHONY : object_list_gennodejs\n\n# fast build rule for target.\nobject_list_gennodejs/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_gennodejs.dir/build.make object_list/CMakeFiles/object_list_gennodejs.dir/build\n.PHONY : object_list_gennodejs/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_geneus.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_geneus.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_geneus.dir/rule\n\n# Convenience name for target.\nobject_list_geneus: object_list/CMakeFiles/object_list_geneus.dir/rule\n\n.PHONY : object_list_geneus\n\n# fast build rule for target.\nobject_list_geneus/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_geneus.dir/build.make object_list/CMakeFiles/object_list_geneus.dir/build\n.PHONY : object_list_geneus/fast\n\n# Convenience name for target.\nobject_list/CMakeFiles/object_list_genlisp.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 object_list/CMakeFiles/object_list_genlisp.dir/rule\n.PHONY : object_list/CMakeFiles/object_list_genlisp.dir/rule\n\n# Convenience name for target.\nobject_list_genlisp: object_list/CMakeFiles/object_list_genlisp.dir/rule\n\n.PHONY : object_list_genlisp\n\n# fast build rule for target.\nobject_list_genlisp/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f object_list/CMakeFiles/object_list_genlisp.dir/build.make object_list/CMakeFiles/object_list_genlisp.dir/build\n.PHONY : object_list_genlisp/fast\n\n# Help Target\nhelp:\n\t@echo \"The following are some of the valid targets for this Makefile:\"\n\t@echo \"... all (the default if no target is provided)\"\n\t@echo \"... clean\"\n\t@echo \"... depend\"\n\t@echo \"... install/strip\"\n\t@echo \"... install/local\"\n\t@echo \"... install\"\n\t@echo \"... list_install_components\"\n\t@echo \"... edit_cache\"\n\t@echo \"... test\"\n\t@echo \"... object_list_generate_messages_py\"\n\t@echo \"... _object_list_generate_messages_check_deps_ObjectList\"\n\t@echo \"... _object_list_generate_messages_check_deps_SensorProperty\"\n\t@echo \"... rebuild_cache\"\n\t@echo \"... _object_list_generate_messages_check_deps_ObjectsList\"\n\t@echo \"... _object_list_generate_messages_check_deps_Features\"\n\t@echo \"... object_list_generate_messages\"\n\t@echo \"... _object_list_generate_messages_check_deps_Dimension\"\n\t@echo \"... object_list_genpy\"\n\t@echo \"... _object_list_generate_messages_check_deps_Geometric\"\n\t@echo \"... _object_list_generate_messages_check_deps_EgoData\"\n\t@echo \"... object_list_generate_messages_cpp\"\n\t@echo \"... _catkin_empty_exported_target\"\n\t@echo \"... object_list_generate_messages_lisp\"\n\t@echo \"... object_list_generate_messages_nodejs\"\n\t@echo \"... object_list_gencpp\"\n\t@echo \"... _object_list_generate_messages_check_deps_Classification\"\n\t@echo \"... object_list_generate_messages_eus\"\n\t@echo \"... object_list_gennodejs\"\n\t@echo \"... object_list_geneus\"\n\t@echo \"... object_list_genlisp\"\n.PHONY : help\n\n\n\n#=============================================================================\n# Special targets to cleanup operation of make.\n\n# Special rule to run CMake to check the build system integrity.\n# No rule that depends on this can have commands that come from listfiles\n# because they might be regenerated.\ncmake_check_build_system:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0\n.PHONY : cmake_check_build_system\n\n"
},
{
"alpha_fraction": 0.6533917188644409,
"alphanum_fraction": 0.6773273348808289,
"avg_line_length": 24.43523406982422,
"blob_id": "984dbd7063fe1710344290eee2aaa9b5dbffde50",
"content_id": "9a80e157f3df94cb9ee1aaf3db420be3a33aec80",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 9818,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 386,
"path": "/devel/include/osi3_bridge/MovingObject.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file osi3_bridge/MovingObject.msg\n// DO NOT EDIT!\n\n\n#ifndef OSI3_BRIDGE_MESSAGE_MOVINGOBJECT_H\n#define OSI3_BRIDGE_MESSAGE_MOVINGOBJECT_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n#include <osi3_bridge/Dimension3d.h>\n#include <geometry_msgs/Vector3.h>\n#include <osi3_bridge/Orientation3d.h>\n#include <geometry_msgs/Vector3.h>\n#include <geometry_msgs/Vector3.h>\n\nnamespace osi3_bridge\n{\ntemplate <class ContainerAllocator>\nstruct MovingObject_\n{\n typedef MovingObject_<ContainerAllocator> Type;\n\n MovingObject_()\n : id(0)\n , dimension()\n , position()\n , orientation()\n , velocity()\n , acceleration()\n , type(0) {\n }\n MovingObject_(const ContainerAllocator& _alloc)\n : id(0)\n , dimension(_alloc)\n , position(_alloc)\n , orientation(_alloc)\n , velocity(_alloc)\n , acceleration(_alloc)\n , type(0) {\n (void)_alloc;\n }\n\n\n\n typedef uint64_t _id_type;\n _id_type id;\n\n typedef ::osi3_bridge::Dimension3d_<ContainerAllocator> _dimension_type;\n _dimension_type dimension;\n\n typedef ::geometry_msgs::Vector3_<ContainerAllocator> _position_type;\n _position_type position;\n\n typedef ::osi3_bridge::Orientation3d_<ContainerAllocator> _orientation_type;\n _orientation_type orientation;\n\n typedef ::geometry_msgs::Vector3_<ContainerAllocator> _velocity_type;\n _velocity_type velocity;\n\n typedef ::geometry_msgs::Vector3_<ContainerAllocator> _acceleration_type;\n _acceleration_type acceleration;\n\n typedef uint8_t _type_type;\n _type_type type;\n\n\n\n// reducing the odds to have name collisions with Windows.h \n#if defined(_WIN32) && defined(TYPE_UNKNOWN)\n #undef TYPE_UNKNOWN\n#endif\n#if defined(_WIN32) && defined(TYPE_OTHER)\n #undef TYPE_OTHER\n#endif\n#if defined(_WIN32) && defined(TYPE_CAR)\n #undef TYPE_CAR\n#endif\n#if defined(_WIN32) && defined(TYPE_PEDESTRIAN)\n #undef TYPE_PEDESTRIAN\n#endif\n#if defined(_WIN32) && defined(TYPE_ANIMAL)\n #undef TYPE_ANIMAL\n#endif\n#if defined(_WIN32) && defined(TYPE_TRUCK)\n #undef TYPE_TRUCK\n#endif\n#if defined(_WIN32) && defined(TYPE_TRAILER)\n #undef TYPE_TRAILER\n#endif\n#if defined(_WIN32) && defined(TYPE_MOTORBIKE)\n #undef TYPE_MOTORBIKE\n#endif\n#if defined(_WIN32) && defined(TYPE_BICYCLE)\n #undef TYPE_BICYCLE\n#endif\n#if defined(_WIN32) && defined(TYPE_BUS)\n #undef TYPE_BUS\n#endif\n#if defined(_WIN32) && defined(TYPE_TRAM)\n #undef TYPE_TRAM\n#endif\n#if defined(_WIN32) && defined(TYPE_TRAIN)\n #undef TYPE_TRAIN\n#endif\n#if defined(_WIN32) && defined(TYPE_WHEELCHAIR)\n #undef TYPE_WHEELCHAIR\n#endif\n\n enum {\n TYPE_UNKNOWN = 0u,\n TYPE_OTHER = 1u,\n TYPE_CAR = 2u,\n TYPE_PEDESTRIAN = 3u,\n TYPE_ANIMAL = 4u,\n TYPE_TRUCK = 5u,\n TYPE_TRAILER = 6u,\n TYPE_MOTORBIKE = 7u,\n TYPE_BICYCLE = 8u,\n TYPE_BUS = 9u,\n TYPE_TRAM = 10u,\n TYPE_TRAIN = 11u,\n TYPE_WHEELCHAIR = 12u,\n };\n\n\n typedef boost::shared_ptr< ::osi3_bridge::MovingObject_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::osi3_bridge::MovingObject_<ContainerAllocator> const> ConstPtr;\n\n}; // struct MovingObject_\n\ntypedef ::osi3_bridge::MovingObject_<std::allocator<void> > MovingObject;\n\ntypedef boost::shared_ptr< ::osi3_bridge::MovingObject > MovingObjectPtr;\ntypedef boost::shared_ptr< ::osi3_bridge::MovingObject const> MovingObjectConstPtr;\n\n// constants requiring out of line definition\n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::osi3_bridge::MovingObject_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::osi3_bridge::MovingObject_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::osi3_bridge::MovingObject_<ContainerAllocator1> & lhs, const ::osi3_bridge::MovingObject_<ContainerAllocator2> & rhs)\n{\n return lhs.id == rhs.id &&\n lhs.dimension == rhs.dimension &&\n lhs.position == rhs.position &&\n lhs.orientation == rhs.orientation &&\n lhs.velocity == rhs.velocity &&\n lhs.acceleration == rhs.acceleration &&\n lhs.type == rhs.type;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::osi3_bridge::MovingObject_<ContainerAllocator1> & lhs, const ::osi3_bridge::MovingObject_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace osi3_bridge\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::osi3_bridge::MovingObject_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::osi3_bridge::MovingObject_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::osi3_bridge::MovingObject_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"1d813c673962ef31735dd456446e05b5\";\n }\n\n static const char* value(const ::osi3_bridge::MovingObject_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0x1d813c673962ef31ULL;\n static const uint64_t static_value2 = 0x735dd456446e05b5ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"osi3_bridge/MovingObject\";\n }\n\n static const char* value(const ::osi3_bridge::MovingObject_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"uint64 id\\n\"\n\"osi3_bridge/Dimension3d dimension\\n\"\n\"geometry_msgs/Vector3 position\\n\"\n\"osi3_bridge/Orientation3d orientation\\n\"\n\"geometry_msgs/Vector3 velocity\\n\"\n\"geometry_msgs/Vector3 acceleration\\n\"\n\"uint8 type\\n\"\n\"\\n\"\n\"uint8 TYPE_UNKNOWN = 0\\n\"\n\"uint8 TYPE_OTHER = 1\\n\"\n\"uint8 TYPE_CAR = 2\\n\"\n\"uint8 TYPE_PEDESTRIAN = 3\\n\"\n\"uint8 TYPE_ANIMAL = 4\\n\"\n\"uint8 TYPE_TRUCK = 5\\n\"\n\"uint8 TYPE_TRAILER = 6\\n\"\n\"uint8 TYPE_MOTORBIKE = 7\\n\"\n\"uint8 TYPE_BICYCLE = 8\\n\"\n\"uint8 TYPE_BUS = 9\\n\"\n\"uint8 TYPE_TRAM = 10\\n\"\n\"uint8 TYPE_TRAIN = 11\\n\"\n\"uint8 TYPE_WHEELCHAIR = 12\\n\"\n\"\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: osi3_bridge/Dimension3d\\n\"\n\"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 height\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: geometry_msgs/Vector3\\n\"\n\"# This represents a vector in free space. \\n\"\n\"# It is only meant to represent a direction. Therefore, it does not\\n\"\n\"# make sense to apply a translation to it (e.g., when applying a \\n\"\n\"# generic rigid transformation to a Vector3, tf2 will only apply the\\n\"\n\"# rotation). If you want your data to be translatable too, use the\\n\"\n\"# geometry_msgs/Point message instead.\\n\"\n\"\\n\"\n\"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 z\\n\"\n\"================================================================================\\n\"\n\"MSG: osi3_bridge/Orientation3d\\n\"\n\"float64 roll\\n\"\n\"float64 pitch\\n\"\n\"float64 yaw\\n\"\n;\n }\n\n static const char* value(const ::osi3_bridge::MovingObject_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.id);\n stream.next(m.dimension);\n stream.next(m.position);\n stream.next(m.orientation);\n stream.next(m.velocity);\n stream.next(m.acceleration);\n stream.next(m.type);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct MovingObject_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::osi3_bridge::MovingObject_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::osi3_bridge::MovingObject_<ContainerAllocator>& v)\n {\n s << indent << \"id: \";\n Printer<uint64_t>::stream(s, indent + \" \", v.id);\n s << indent << \"dimension: \";\n s << std::endl;\n Printer< ::osi3_bridge::Dimension3d_<ContainerAllocator> >::stream(s, indent + \" \", v.dimension);\n s << indent << \"position: \";\n s << std::endl;\n Printer< ::geometry_msgs::Vector3_<ContainerAllocator> >::stream(s, indent + \" \", v.position);\n s << indent << \"orientation: \";\n s << std::endl;\n Printer< ::osi3_bridge::Orientation3d_<ContainerAllocator> >::stream(s, indent + \" \", v.orientation);\n s << indent << \"velocity: \";\n s << std::endl;\n Printer< ::geometry_msgs::Vector3_<ContainerAllocator> >::stream(s, indent + \" \", v.velocity);\n s << indent << \"acceleration: \";\n s << std::endl;\n Printer< ::geometry_msgs::Vector3_<ContainerAllocator> >::stream(s, indent + \" \", v.acceleration);\n s << indent << \"type: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.type);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OSI3_BRIDGE_MESSAGE_MOVINGOBJECT_H\n"
},
{
"alpha_fraction": 0.7768595218658447,
"alphanum_fraction": 0.7851239442825317,
"avg_line_length": 29.25,
"blob_id": "55ea8aa48959315499bfad238282af60f54894e9",
"content_id": "1451b3e5061b398abe3e652b2f2cb8196dc13ae9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 242,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 8,
"path": "/build/osi3_bridge/CMakeFiles/clean_test_results_osi3_bridge.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/clean_test_results_osi3_bridge\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/clean_test_results_osi3_bridge.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7952127456665039,
"alphanum_fraction": 0.7952127456665039,
"avg_line_length": 40.77777862548828,
"blob_id": "9a6a84f985d01fc38a13a3dfcbf727333e54cd94",
"content_id": "cb1fff015e7049495d2e195550aca516ba58a93d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 376,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 9,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_pic.pdb\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_pic.a\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang CXX)\n include(CMakeFiles/open_simulation_interface_pic.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7964601516723633,
"alphanum_fraction": 0.7964601516723633,
"avg_line_length": 36.66666793823242,
"blob_id": "38f4de13033d0b277f1c1c76719ce99cb8c0c05a",
"content_id": "78dd6cb1b2cb47f9a2097dd8ab714547b41facc9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 113,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 3,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/cmake_clean_target.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_static.a\"\n)\n"
},
{
"alpha_fraction": 0.5988099575042725,
"alphanum_fraction": 0.6159394383430481,
"avg_line_length": 34.5512809753418,
"blob_id": "ba0de1df46155e66657c08ac5b1ad50a5ab16751",
"content_id": "948ad25f1702aab4f0e476b7d2c8eee08f14e1cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5546,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 156,
"path": "/src/aeb/script/aeb_test.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport rospy\nimport message_filters\nimport numpy as np\nimport math\nimport time\n\n# import ROS messages\nfrom object_list.msg import ObjectsList\nfrom osi3_bridge.msg import TrafficUpdateMovingObject\nfrom vehicle_control.msg import Trajectory\n\n# import class with aeb parameters\nfrom ClassAeb import Aeb\n\ndef aeb():\n\n # Node initialization\n rospy.init_node('aeb', anonymous=False) # Start node\n rate = rospy.Rate(25) # Define the node frequency 100hz\n time.sleep(5)\n print(\"Hallo\")\n rospy.Subscriber(\"/ego_data\", TrafficUpdateMovingObject, callback)\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\ndef callback(ego):\n\n Traj = Trajectory()\n Traj.header.stamp = rospy.Time.now()\n\n des_vel = 6\n ### final_time = rospy.get_param(\"final_time\") # 2 s ## amount of future time which data is sent to the controller####\n final_time = 4\n ### time_step = rospy.get_param(\"time_step\") # 200 ## Time step in seconds##\n time_step = 0.04 #s\n amount_data = int(final_time/time_step)\n\n Yaw = rospy.get_param(\"Veh_yaw\")*math.pi/180\n\n ## Considering just longitudinal movement\n #Traj.yaw = np.full((amount_data), ego.object.orientation.yaw)\n Traj.yaw = np.full((amount_data), Yaw)\n\n ## Fullfil Trajectory time with header time + steps\n Timesteparray = np.linspace(time_step, final_time, num=amount_data)\n Timenow = np.full(amount_data, ego.header.stamp.secs)\n Traj.time = Timesteparray + Timenow\n #Traj.time = Timesteparray\n\n #print(obj_list.obj_list[near_obj].geometric.vx)\n ## Calculate AEB\n #if near_obj != 9999:\n # [aeb,reldist] = calculate_aeb(egovx, obj_list.obj_list[near_obj])\n ## Definition of actual condition\n #if (abs(aeb.ttc) < aeb.stoptime.stage3) and (aeb.ttc < 0):\n # print(\"Stage 3 is on\")\n # vel_aux = velocity_calculation(egovx,aeb_data.acc.stage3,time_step,amount_data)\n #elif (abs(aeb.ttc) < aeb.stoptime.stage2) and (aeb.ttc < 0):\n # print(\"Stage 2 is on\")\n # vel_aux = velocity_calculation(egovx,aeb_data.acc.stage2,time_step,amount_data)\n #elif (abs(aeb.ttc) < aeb.stoptime.stage1) and (aeb.ttc < 0):\n # print(\"Stage 1 is on\")\n # vel_aux = velocity_calculation(egovx,aeb_data.acc.stage1,time_step,amount_data)\n #elif (abs(aeb.ttc) < aeb.stoptime.fw) and (aeb.ttc < 0):\n # print(\"FW is on\")\n # vel_aux = np.full(amount_data+1, des_vel)\n \n \n \n #if (abs(aeb.ttc) < aeb.stoptime.fw) and (aeb.ttc < 0):\n # print (\"Stop\")\n # vel_aux = np.full(amount_data+1,0)\n\n #elif aeb.offset >= abs(reldist) and obj_list.obj_list[near_obj].geometric.vx <=0:\n # print (\"Stop\")\n # vel_aux = np.full(amount_data + 1,0)\n # #print (\"Following\")\n #vel_aux = np.full(amount_data+1,obj_list.obj_list[near_obj].geometric.vx-egovx)\n if ego.header.seq >300:\n print('AEB is on')\n vel_aux = np.full(amount_data+1,0.0)\n\n else:\n print(\"AEB is off\")\n ## keep the expected velocity\n vel_aux = np.full(amount_data + 1, des_vel)\n\n egox = np.full(amount_data, ego.object.position.x)\n egoy = np.full(amount_data, ego.object.position.y)\n Traj = position_calculation(time_step, vel_aux, Traj,egox,egoy)\n Traj.v = vel_aux[0:amount_data-1]\n\n #return Traj\n #print(Traj)\n pub = rospy.Publisher('trajectory', Trajectory, queue_size=10,latch=True)\n pub.publish(Traj)\n #rate = rospy.Rate(25) # Define the node frequency 100hz\n #rate.sleep()\n\ndef position_calculation(step, vel, Traj, egox,egoy):\n pos = np.zeros(len(vel)-1)\n for i in range (1,len(vel)-1):\n pos[i] = pos[i-1] + (vel[i]+vel[i+1]) * 0.5 * step\n\n #Traj.x = egox+pos*math.cos(Traj.yaw[0])\n #Traj.y = egoy-pos*math.sin(Traj.yaw[0])\n Traj.x = pos*math.cos(Traj.yaw[0]) #####################\n Traj.y = pos*math.sin(Traj.yaw[0]) #####################\n\n return(Traj)\n\n\ndef velocity_calculation(egovx,acc,step,amount_data):\n v=egovx\n i=0\n vel = np.zeros(amount_data+1)\n while v >= 0 and (i) < amount_data:\n vel[i] = v\n v = v - acc * step\n i = i+1\n return(vel)\n\n\ndef find_target(obj_list):\n lat_rang = rospy.get_param(\"lat_rang\") #1.5 ## Lateral range of evaluation [m]\n near_x = 9999\n near_obj = 9999\n for i in range(len(obj_list.obj_list)):\n if abs(obj_list.obj_list[i].geometric.y) < lat_rang: ################# Lateral range of evaluation\n if obj_list.obj_list[i].geometric.x < near_x:\n near_x = obj_list.obj_list[i].geometric.x\n near_obj = i\n return near_obj\n\ndef calculate_aeb (egovx,obj):\n aeb=Aeb()\n rel_velx = obj.geometric.vx\n reldist = obj.geometric.x - abs(obj.dimension.length * math.cos(obj.geometric.yaw)) - abs(obj.dimension.width * math.sin(obj.geometric.yaw))\n #print(reldist)\n #ttc = distance - offset / relative velocity\n aeb.ttc = safe_div(reldist-aeb.offset, abs(rel_velx)) * safe_div(rel_velx, abs(rel_velx))\n aeb.stoptime.fw = egovx/aeb.acc.fw + aeb.react.driver\n aeb.stoptime.stage1 = egovx / aeb.acc.stage1 + aeb.react.system\n aeb.stoptime.stage2 = egovx / aeb.acc.stage2 + aeb.react.system\n aeb.stoptime.stage3 = egovx / aeb.acc.stage3 + aeb.react.system\n\n return [aeb,reldist]\n\ndef safe_div(x,y):\n try:\n return x/y\n except ZeroDivisionError:\n return 9999\n\nif __name__ == '__main__':\n aeb()\n"
},
{
"alpha_fraction": 0.7269230484962463,
"alphanum_fraction": 0.7525641322135925,
"avg_line_length": 47.8125,
"blob_id": "6fcb83d8cd1def86933b45c5ee7605d3c2bda6aa",
"content_id": "cb4bce35c3a0da2b3f92218b956aa0689b71cb52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 780,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 16,
"path": "/build/osi3_bridge/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"osi3_bridge\")\nset(osi3_bridge_VERSION \"0.0.0\")\nset(osi3_bridge_MAINTAINER \"Georg Seifert <[email protected]>\")\nset(osi3_bridge_PACKAGE_FORMAT \"2\")\nset(osi3_bridge_BUILD_DEPENDS \"geometry_msgs\" \"roscpp\" \"std_msgs\" \"message_generation\" \"roslaunch\")\nset(osi3_bridge_BUILD_EXPORT_DEPENDS \"geometry_msgs\" \"roscpp\" \"std_msgs\")\nset(osi3_bridge_BUILDTOOL_DEPENDS \"catkin\")\nset(osi3_bridge_BUILDTOOL_EXPORT_DEPENDS )\nset(osi3_bridge_EXEC_DEPENDS \"geometry_msgs\" \"roscpp\" \"std_msgs\" \"message_runtime\")\nset(osi3_bridge_RUN_DEPENDS \"geometry_msgs\" \"roscpp\" \"std_msgs\" \"message_runtime\")\nset(osi3_bridge_TEST_DEPENDS )\nset(osi3_bridge_DOC_DEPENDS )\nset(osi3_bridge_URL_WEBSITE \"\")\nset(osi3_bridge_URL_BUGTRACKER \"\")\nset(osi3_bridge_URL_REPOSITORY \"\")\nset(osi3_bridge_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.6530232429504395,
"alphanum_fraction": 0.6716278791427612,
"avg_line_length": 25.564102172851562,
"blob_id": "9ea6b2508aa17c46fab2f7748664110433f70b20",
"content_id": "b5e662c6fea545bfb5b59eb67bf752c65fd16706",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2150,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 78,
"path": "/src/osi3_bridge/src/osi_protocol_header.c",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#include \"osi_protocol_header.h\"\r\n\r\nsize_t osiph_get_header_size()\r\n{\r\n return sizeof(osi_protocol_header);\r\n}\r\n\r\nvoid osiph_set_magic_id(void * header)\r\n{\r\n ((osi_protocol_header*)header)->magic_id = SWAP_64(OSI_PROTOCOL_HEADER_MAGIC_ID);\r\n}\r\n\r\nbool osiph_check_magic_id(void * header)\r\n{\r\n return SWAP_64(((osi_protocol_header*)header)->magic_id) == OSI_PROTOCOL_HEADER_MAGIC_ID;\r\n}\r\n\r\nvoid osiph_set_protocol_version(void * header)\r\n{\r\n ((osi_protocol_header*)header)->protocol_version = SWAP_32(OSI_PROTOCOL_HEADER_VERSION);\r\n}\r\n\r\nuint32_t osiph_get_protocol_version(void * header)\r\n{\r\n return SWAP_32(((osi_protocol_header*)header)->protocol_version);\r\n}\r\n\r\nbool osiph_check_protocol_version(void * header)\r\n{\r\n return SWAP_32(((osi_protocol_header*)header)->protocol_version) == OSI_PROTOCOL_HEADER_VERSION;\r\n}\r\n\r\nvoid osiph_set_osi_version(void * header, uint16_t major, uint8_t minor, uint8_t patch)\r\n{\r\n ((osi_protocol_header*)header)->osi_version_major = SWAP_16(major);\r\n ((osi_protocol_header*)header)->osi_version_minor = minor;\r\n ((osi_protocol_header*)header)->osi_version_patch = patch;\r\n}\r\n\r\nuint16_t osiph_get_osi_version_major(void * header)\r\n{\r\n return SWAP_16(((osi_protocol_header*)header)->osi_version_major);\r\n}\r\n\r\nuint8_t osiph_get_osi_version_minor(void * header)\r\n{\r\n return ((osi_protocol_header*)header)->osi_version_minor;\r\n}\r\n\r\nuint8_t osiph_get_osi_version_patch(void * header)\r\n{\r\n return ((osi_protocol_header*)header)->osi_version_patch;\r\n}\r\n\r\nvoid osiph_set_payload_size(void * header, uint32_t size)\r\n{\r\n ((osi_protocol_header*)header)->payload_size = SWAP_32(size);\r\n}\r\n\r\nuint32_t osiph_get_payload_size(void * header)\r\n{\r\n return SWAP_32(((osi_protocol_header*)header)->payload_size);\r\n}\r\n\r\nvoid osiph_set_payload_type(void * header, uint32_t type)\r\n{\r\n ((osi_protocol_header*)header)->payload_type = SWAP_32(type);\r\n}\r\n\r\nuint32_t osiph_get_payload_type(void * header)\r\n{\r\n return SWAP_32(((osi_protocol_header*)header)->payload_type);\r\n}\r\n\r\nvoid * osiph_get_payload(void * header)\r\n{\r\n return (void *)(((uintptr_t) header) + sizeof(osi_protocol_header));\r\n}\r\n"
},
{
"alpha_fraction": 0.7872340679168701,
"alphanum_fraction": 0.7872340679168701,
"avg_line_length": 46,
"blob_id": "ed46ba18a5116fba9043e4879982054491b77c8a",
"content_id": "c8a4444c97c93f2b20471f7edda7616fef639477",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 94,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 2,
"path": "/build/vehicle_control/catkin_generated/installspace/vehicle_control-msg-extras.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(vehicle_control_MESSAGE_FILES \"msg/Trajectory.msg\")\nset(vehicle_control_SERVICE_FILES \"\")\n"
},
{
"alpha_fraction": 0.7909091114997864,
"alphanum_fraction": 0.7909091114997864,
"avg_line_length": 35.66666793823242,
"blob_id": "ecb5b614930503a0b60c1479a030770ca4a6e6d2",
"content_id": "af04f355e06d6b06665706e20d8c59294cdc5654",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 110,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 3,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/cmake_clean_target.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_pic.a\"\n)\n"
},
{
"alpha_fraction": 0.6492243409156799,
"alphanum_fraction": 0.6672840714454651,
"avg_line_length": 26.422222137451172,
"blob_id": "c54885bd8f3d52d2074a03547bbca41dd0cab59d",
"content_id": "1022694d31e2ea4d09c53a50d7ec571a660a41c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8638,
"license_type": "no_license",
"max_line_length": 193,
"num_lines": 315,
"path": "/devel/include/object_list/ObjectsList.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/ObjectsList.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_OBJECTSLIST_H\n#define OBJECT_LIST_MESSAGE_OBJECTSLIST_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n#include <std_msgs/Header.h>\n#include <object_list/ObjectList.h>\n#include <object_list/SensorProperty.h>\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct ObjectsList_\n{\n typedef ObjectsList_<ContainerAllocator> Type;\n\n ObjectsList_()\n : header()\n , obj_list()\n , sensor_property() {\n }\n ObjectsList_(const ContainerAllocator& _alloc)\n : header(_alloc)\n , obj_list(_alloc)\n , sensor_property(_alloc) {\n (void)_alloc;\n }\n\n\n\n typedef ::std_msgs::Header_<ContainerAllocator> _header_type;\n _header_type header;\n\n typedef std::vector< ::object_list::ObjectList_<ContainerAllocator> , typename ContainerAllocator::template rebind< ::object_list::ObjectList_<ContainerAllocator> >::other > _obj_list_type;\n _obj_list_type obj_list;\n\n typedef ::object_list::SensorProperty_<ContainerAllocator> _sensor_property_type;\n _sensor_property_type sensor_property;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::ObjectsList_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::ObjectsList_<ContainerAllocator> const> ConstPtr;\n\n}; // struct ObjectsList_\n\ntypedef ::object_list::ObjectsList_<std::allocator<void> > ObjectsList;\n\ntypedef boost::shared_ptr< ::object_list::ObjectsList > ObjectsListPtr;\ntypedef boost::shared_ptr< ::object_list::ObjectsList const> ObjectsListConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::ObjectsList_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::ObjectsList_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::ObjectsList_<ContainerAllocator1> & lhs, const ::object_list::ObjectsList_<ContainerAllocator2> & rhs)\n{\n return lhs.header == rhs.header &&\n lhs.obj_list == rhs.obj_list &&\n lhs.sensor_property == rhs.sensor_property;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::ObjectsList_<ContainerAllocator1> & lhs, const ::object_list::ObjectsList_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::ObjectsList_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::ObjectsList_<ContainerAllocator> const>\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::ObjectsList_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::ObjectsList_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::ObjectsList_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::ObjectsList_<ContainerAllocator> const>\n : TrueType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::ObjectsList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"d69ee6a3db897657f81853c8b3814ced\";\n }\n\n static const char* value(const ::object_list::ObjectsList_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xd69ee6a3db897657ULL;\n static const uint64_t static_value2 = 0xf81853c8b3814cedULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::ObjectsList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/ObjectsList\";\n }\n\n static const char* value(const ::object_list::ObjectsList_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::ObjectsList_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"Header header\\n\"\n\"ObjectList[] obj_list\\n\"\n\"SensorProperty sensor_property\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: std_msgs/Header\\n\"\n\"# Standard metadata for higher-level stamped data types.\\n\"\n\"# This is generally used to communicate timestamped data \\n\"\n\"# in a particular coordinate frame.\\n\"\n\"# \\n\"\n\"# sequence ID: consecutively increasing ID \\n\"\n\"uint32 seq\\n\"\n\"#Two-integer timestamp that is expressed as:\\n\"\n\"# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')\\n\"\n\"# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')\\n\"\n\"# time-handling sugar is provided by the client library\\n\"\n\"time stamp\\n\"\n\"#Frame this data is associated with\\n\"\n\"string frame_id\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/ObjectList\\n\"\n\"int32 obj_id\\n\"\n\"float64 time\\n\"\n\"Geometric geometric\\n\"\n\"float64[36] covariance\\n\"\n\"Dimension dimension\\n\"\n\"float64 prop_existence\\n\"\n\"float64 prop_nonexistence\\n\"\n\"float64 prop_persistance\\n\"\n\"float64 prop_mov \\n\"\n\"Classification classification\\n\"\n\"float64[12] classification_mass\\n\"\n\"Features features\\n\"\n\"int64[] sensors_fused\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Geometric\\n\"\n\"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 vx\\n\"\n\"float64 vy\\n\"\n\"float64 ax\\n\"\n\"float64 ay\\n\"\n\"float64 yaw\\n\"\n\"float64 yawrate\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Dimension\\n\"\n\"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 length_variance\\n\"\n\"float64 width_variance\\n\"\n\"\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Classification\\n\"\n\"float32 car\\n\"\n\"float32 truck\\n\"\n\"float32 motorcycle\\n\"\n\"float32 bicycle\\n\"\n\"float32 pedestrian\\n\"\n\"float32 stacionary\\n\"\n\"float32 other\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/Features\\n\"\n\"uint8 FL\\n\"\n\"uint8 FM\\n\"\n\"uint8 FR\\n\"\n\"uint8 MR\\n\"\n\"uint8 RR\\n\"\n\"uint8 RM\\n\"\n\"uint8 RL\\n\"\n\"uint8 ML\\n\"\n\"\\n\"\n\"================================================================================\\n\"\n\"MSG: object_list/SensorProperty\\n\"\n\"int32 sensor_id\\n\"\n\"float64 sensortype\\n\"\n\"float64 posx_variance\\n\"\n\"float64 posy_variance\\n\"\n\"float64 velx_variance\\n\"\n\"float64 vely_variance\\n\"\n\"float64 trust_existance\\n\"\n\"float64 trust_car\\n\"\n\"float64 trust_truck\\n\"\n\"float64 trust_motorcycle\\n\"\n\"float64 trust_bicycle\\n\"\n\"float64 trust_pedestrian\\n\"\n\"float64 trust_stationary\\n\"\n\"float64 trust_other \\n\"\n;\n }\n\n static const char* value(const ::object_list::ObjectsList_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::ObjectsList_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.header);\n stream.next(m.obj_list);\n stream.next(m.sensor_property);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct ObjectsList_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::ObjectsList_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::ObjectsList_<ContainerAllocator>& v)\n {\n s << indent << \"header: \";\n s << std::endl;\n Printer< ::std_msgs::Header_<ContainerAllocator> >::stream(s, indent + \" \", v.header);\n s << indent << \"obj_list[]\" << std::endl;\n for (size_t i = 0; i < v.obj_list.size(); ++i)\n {\n s << indent << \" obj_list[\" << i << \"]: \";\n s << std::endl;\n s << indent;\n Printer< ::object_list::ObjectList_<ContainerAllocator> >::stream(s, indent + \" \", v.obj_list[i]);\n }\n s << indent << \"sensor_property: \";\n s << std::endl;\n Printer< ::object_list::SensorProperty_<ContainerAllocator> >::stream(s, indent + \" \", v.sensor_property);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_OBJECTSLIST_H\n"
},
{
"alpha_fraction": 0.5167464017868042,
"alphanum_fraction": 0.5502392053604126,
"avg_line_length": 22.22222137451172,
"blob_id": "3802be5312c23c45de17d3a33c8ea6449f0d181e",
"content_id": "d299684f8d9dbcd228a843b6906da16d55a4c5da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 418,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 18,
"path": "/src/sensor_model/scripts/rotate2.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import math\nimport numpy as np\n\ndef rotate (x,y,angle):\n\n rot= np.zeros((2,2))\n rot[0,0] = math.cos(angle)\n rot[0,1] = -(math.sin(angle))\n rot[1,0] = math.sin(angle)\n rot[0,1]= math.cos(angle)\n pos = [x,y]\n pos = rot.dot(pos)\n rotx= pos[0,0]\n roty= pos[1,0]\n #rotx = x * math.cos(angle) - y * math.sin(angle)\n #roty = x * math.sin(angle) + y * math.cos(angle)\n\n return [rotx,roty]\n"
},
{
"alpha_fraction": 0.6352573037147522,
"alphanum_fraction": 0.6404728889465332,
"avg_line_length": 23.008695602416992,
"blob_id": "50d244439c5d6f1b13a5498d403950837e9aafc1",
"content_id": "65995e0275df77b17bab5a3f605fc9eb29a18cdd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2887,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 115,
"path": "/src/osi3_bridge/include/udp.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "/**\r\n * @file\r\n * @author Georg Seifert <[email protected]>\r\n * @version 1\r\n *\r\n * @section LICENSE\r\n *\r\n * For internal use only\r\n *\r\n * @section BESCHREIBUNG\r\n * \r\n * Plattformunabhängige (Windows und Linux) UDP Funktionen\r\n *\r\n */\r\n\r\n#ifndef UDP_CLIENT_HEADER\r\n#define UDP_CLIENT_HEADER\r\n\r\n#ifdef __linux__\r\n #include <sys/types.h>\r\n #include <sys/socket.h>\r\n #include <netinet/in.h>\r\n #include <arpa/inet.h>\r\n #include <netdb.h>\r\n #include <stdio.h>\r\n #include <unistd.h>\r\n #include <string.h>\r\n #include <stdlib.h>\r\n #include <sys/time.h>\r\n #include <errno.h>\r\n\r\n typedef struct sockaddr_in SOCKADDR_IN;\r\n typedef struct sockaddr SOCKADDR;\r\n typedef int SOCKET;\r\n #define ERROR_CODE (errno)\r\n#elif _WIN32\r\n #include <winsock2.h>\r\n #include <windows.h>\r\n #define ERROR_CODE (WSAGetLastError())\r\n typedef int socklen_t;\r\n#else\r\n #error \"OS not supported!\"\r\n#endif\r\n\r\n#include <stdbool.h>\r\n#include <stdint.h>\r\n#include <stdio.h>\r\n\r\n/**\r\n * Initialisierungsfunktion\r\n *\r\n * Initialisiert einen UDP-Socket\r\n *\r\n * @param s Socket, der initialisiert wird\r\n * @return Erfolg der Initialisierung\r\n */\r\nbool udp_init(SOCKET * s);\r\n\r\n/**\r\n * Bindefunktion\r\n *\r\n * @param s Socket, der initialisiert wird\r\n * @param port Port, an den der Socket gebunden wird\r\n * @return Erfolg des Binde\r\n */\r\nbool udp_bind(SOCKET * s, uint16_t port);\r\n\r\n/**\r\n * Sendefunktion\r\n *\r\n * @param s Socket, der initialisiert wird\r\n * @param addr Zieladdresse\r\n * @param port Zielport\r\n * @param data Daten, die versendet werden sollen\r\n * @param data_size Größe der Daten\r\n * @return Erfolg des Senden\r\n */\r\nbool udp_send(SOCKET * s, const char * addr, uint16_t port, \r\n const uint8_t * data, size_t data_size);\r\n\r\n/**\r\n * Empfangsfunktion\r\n *\r\n * @param s Socket, der initialisiert wird\r\n * @param remote_addr Quelladresse (oder NULL, falls nicht von Intresse)\r\n * @param remote_port Quellport\r\n * @param data Daten, die empfangen wurden\r\n * @param[inout] data_size Maximale größe (in), empfangene Größe (out)\r\n * @return Erfolg des Empfangen\r\n */\r\nbool udp_recv(SOCKET * s, uint8_t * data, size_t * data_size,\r\n char * remote_addr, uint16_t * remote_port);\r\n\r\n/**\r\n * Empfangsfunktion mit Timeout\r\n *\r\n * @param s Socket, der initialisiert wird\r\n * @param remote_addr Quelladresse (oder NULL, falls nicht von Intresse)\r\n * @param remote_port Quellport\r\n * @param data Daten, die empfangen wurden\r\n * @param[inout] data_size Maximale größe (in), empfangene Größe (out)\r\n * @return Erfolg des Empfangen\r\n */\r\nbool udp_recv_timeout(SOCKET * s, uint8_t * data, size_t * data_size,\r\n char * remote_addr, uint16_t * remote_port);\r\n \r\n/**\r\n * Deinitialiserungsfunktion\r\n *\r\n * @param s Socket, der deinitialisert wird\r\n * @return Erfolg der Deinitialiserung\r\n */\r\nbool udp_close(SOCKET * s);\r\n\r\n#endif\r\n"
},
{
"alpha_fraction": 0.6466666460037231,
"alphanum_fraction": 0.6466666460037231,
"avg_line_length": 49.02777862548828,
"blob_id": "9520662462879d7b9cc11893a1e8e7ec31234c57",
"content_id": "3b2a002a0c560d0a17cfe0ade58f412ce771dd54",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1800,
"license_type": "no_license",
"max_line_length": 170,
"num_lines": 36,
"path": "/src/sensor_model/scripts/get_sensor_properties.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "from configparser import ConfigParser\nfrom ClassSensor import Sensor\n\ndef get_sensor_properties():\n cfg = ConfigParser()\n cfg.read(\"ConfigFiles/sensorconfig.ini\") # Reading the config file\n\n sensors = cfg.sections() # List of sensors present in the config file\n #sensors = ['CameraFront','CameraLeft','CameraRear','CameraRight','RadarFront']\n #print(sensors)\n\n # To create objects and create transformation matrices using the class 'Transformation'\n sensorlist = []\n sensordict = {}\n # transformationdict = {}\n # trustprobabilityexistance = {}\n # trustprobabilityclassification = {}\n for sensorname in sensors:\n # print(sensorname)\n # print(cfg.get(sensorname, 'x-distance'))\n # sensorname = Sensor(cfg.get(sensors[index], 'x-distance'), cfg.get(sensors[index], 'y-distance'), cfg.get(sensors[index], 'rotation')) # Creating class object\n sensor = Sensor(cfg.get(sensorname, 'sensor_id'), cfg.get(sensorname, 'x-distance'),\n cfg.get(sensorname, 'y-distance'), cfg.get(sensorname, 'rotation'),\n cfg.get(sensorname, 'trustexistance'), cfg.get(sensorname, 'trustcar'),\n cfg.get(sensorname, 'trusttruck'), cfg.get(sensorname, 'trustmotorcycle'),\n cfg.get(sensorname, 'trustbicycle'), cfg.get(sensorname, 'trustpedestrian'),\n cfg.get(sensorname, 'truststationary'),\n cfg.get(sensorname, 'trustother')) # Creating class object\n sensor.set_sensor_properties()\n\n sensorlist.append(sensor)\n sensordict[sensor.sensor_id] = sensor\n # transformationdict[sensor.sensor_id] = sensor.transformation\n #print(sensorlist)\n #print(sensordict)\n return (sensorlist, sensordict)"
},
{
"alpha_fraction": 0.7957746386528015,
"alphanum_fraction": 0.7957746386528015,
"avg_line_length": 34.5,
"blob_id": "3fac3a252409f5a4cd0ee901f7fe176ca3d3001a",
"content_id": "15cbbd183cbe64d101f8768739983941e00b7eb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 284,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 8,
"path": "/build/object_list/CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/_object_list_generate_messages_check_deps_Dimension\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/_object_list_generate_messages_check_deps_Dimension.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6797900199890137,
"alphanum_fraction": 0.6896325349807739,
"avg_line_length": 31.446807861328125,
"blob_id": "46a030b69b82dd23c9e36130eaaf92dbad7da31e",
"content_id": "cf9695060d88270f76099b3845dfd4385c7c9c8e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1524,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 47,
"path": "/src/fusion/src/Ktesting.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport numpy as np\nimport rospy\nimport math\nimport message_filters\n\n## Import Funtions\nfrom rotate import rotate\n\n## Import Objects\nfrom ClassKF import KF , rotatedata\nfrom ClassSens import Sens , Ego\n\n# import all necessary ROS messages\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\ndeviationsum = 0\ndeviation = 0\ncount = 0\ndeviationy = 0\ndef sensor_rotate():\n # Node initialization\n\n rospy.init_node('KFtesting', anonymous=False) # Start node\n rate = rospy.Rate(rospy.get_param(\"freq\"))\n # subscribe to sensor data and ego data with time synchronization\n objs1 = message_filters.Subscriber('/sensor0/obj_list_egoframe', ObjectsList)\n objsI = message_filters.Subscriber('/sensor5/obj_list_egoframe', ObjectsList)\n ts = message_filters.TimeSynchronizer([ego_data, objs_list],10)\n #ts = message_filters.TimeSynchronizer([ego_data, objs_list], 10)\n ts.registerCallback(callback)\n\n rospy.spin()\n\ndef callback(objs1,objsI):\n\n for i, obj in enumerate(objs1):\n for d, idealobj in enumerate(objsI):\n if idealobj.obj_id == obj.obj_id:\n deviation = idealobj.geometric.x - obj-geometric.x\n deviationsum += deviation\n deviationy = idealobj.geometric.y - obj-geometric.y\n deviationsumy += deviationy\n count += 1\n avg_deviation = deviationsum/count\n print(avg_deviation)"
},
{
"alpha_fraction": 0.8116169571876526,
"alphanum_fraction": 0.8131868243217468,
"avg_line_length": 56.90909194946289,
"blob_id": "629caca9f3c8a0b6f8f6c48fd22891cb4e1291ce",
"content_id": "701924381886306bfb8d7553ac49114963a2379d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 637,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 11,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface-config.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# Compute paths\nget_filename_component(OPEN_SIMULATION_INTERFACE_CMAKE_DIR \"${CMAKE_CURRENT_LIST_FILE}\" PATH)\nset(OPEN_SIMULATION_INTERFACE_INCLUDE_DIRS \"/home/student/Desktop/Redge_Thesis/vil/install/include/osi3;/usr/include\")\n\n# Our library dependencies (contains definitions for IMPORTED targets)\nif(NOT TARGET OPEN_SIMULATION_INTERFACE AND NOT OPEN_SIMULATION_INTERFACE_BINARY_DIR)\n include(\"${OPEN_SIMULATION_INTERFACE_CMAKE_DIR}/open_simulation_interface_targets.cmake\")\nendif()\n\n# These are IMPORTED targets created by open_simulation_interface_targets.cmake\nset(OPEN_SIMULATION_INTERFACE_LIBRARIES open_simulation_interface)\n"
},
{
"alpha_fraction": 0.6140424013137817,
"alphanum_fraction": 0.6331533193588257,
"avg_line_length": 32.887325286865234,
"blob_id": "eb6fa13225e1ce128bba479ff7a029848df1adbf",
"content_id": "a3f6d998b38d6365481a08d6d66195def2646319",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2407,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 71,
"path": "/src/object_list/scripts/CARLAOSIbridge_update.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\n\nimport sys\nimport glob\nimport os\n\n\ntry:\n sys.path.append(glob.glob('/opt/carla-simulator/PythonAPI/carla/dist/carla-0.9.8-py2.7-linux-x86_64.egg')[0])\n\n# sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (\n# sys.version_info.major,\n# sys.version_info.minor,\n# 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])\nexcept IndexError:\n pass\n\n# ==============================================================================\n# -- add PythonAPI for release mode --------------------------------------------\n# ==============================================================================\ntry:\n sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + '/carla')\nexcept IndexError:\n pass\n\nimport carla\nimport rospy\nimport numpy as np\nimport math\n\nfrom osi3_bridge.msg import TrafficUpdateMovingObject\n\n\n\ndef CARLA_OSI_ROS():\n\n ## Connect with CARLA server\n client = carla.Client('localhost', 2000) ### It starts the communication between server and client\n client.set_timeout(10.0) # seconds ### After 10 seconds without communication with CARLA server the script stops\n #client.set_timeout(10.0) # seconds ### After 10 seconds without communication with CARLA server the script stops\n world = client.get_world()\n global actor_list\n actor_list = world.get_actors()\n\n # Initiate ROS node\n rospy.init_node('CARLAOSI_update', anonymous=False) # Start node\n rate = rospy.Rate(rospy.get_param(\"freq\") ) # Define the node frequency 100hz\n rospy.Subscriber(\"/osi3_traffic_update\", TrafficUpdateMovingObject, set_ego)\n rospy.spin()\n\n\ndef set_ego(ego):\n\n actor = actor_list.find(ego.object.id)\n actor.set_simulate_physics(enabled=False)\n location = carla.Location(x=ego.object.position.x, y=-ego.object.position.y, z=ego.object.position.z)\n velocity = carla.Vector3D(x=ego.object.velocity.x, y=-ego.object.velocity.y, z=ego.object.velocity.z)\n rotation = carla.Rotation(pitch=-ego.object.orientation.pitch*180/math.pi, yaw=-ego.object.orientation.yaw*180/math.pi, roll=ego.object.orientation.roll*180/math.pi)\n transform = carla.Transform(location,rotation)\n\n actor.set_transform(transform)\n actor.set_velocity(velocity)\n\n return\n\nif __name__ == '__main__':\n try:\n CARLA_OSI_ROS()\n except rospy.ROSInterruptException:\n pass\n\n"
},
{
"alpha_fraction": 0.7707509994506836,
"alphanum_fraction": 0.7816205620765686,
"avg_line_length": 91,
"blob_id": "61b50d3e67e2a550aae8b21eeaef9920cd3b9c03",
"content_id": "82d87acc45c9e36c847724732956403679475c54",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1012,
"license_type": "no_license",
"max_line_length": 421,
"num_lines": 11,
"path": "/build/osi3_bridge/cmake/osi3_bridge-genmsg-context.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-genmsg.context.in\n\nmessages_str = \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Dimension3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/Orientation3d.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/MovingObject.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/GroundTruthMovingObjects.msg;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg/TrafficUpdateMovingObject.msg\"\nservices_str = \"\"\npkg_name = \"osi3_bridge\"\ndependencies_str = \"geometry_msgs;std_msgs\"\nlangs = \"gencpp;geneus;genlisp;gennodejs;genpy\"\ndep_include_paths_str = \"osi3_bridge;/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg\"\nPYTHON_EXECUTABLE = \"/usr/bin/python2\"\npackage_has_static_sources = '' == 'TRUE'\ngenmsg_check_deps_script = \"/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py\"\n"
},
{
"alpha_fraction": 0.7463123202323914,
"alphanum_fraction": 0.7538148760795593,
"avg_line_length": 59.953487396240234,
"blob_id": "fb0498e47fa22d269704f29a10a87e42d12456dc",
"content_id": "6563c123f2a4015b037c798261b2e45dbc88534a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 7864,
"license_type": "no_license",
"max_line_length": 298,
"num_lines": 129,
"path": "/build/osi3_bridge/open-simulation-interface/cmake_install.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# Install script for directory: /home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/open-simulation-interface\n\n# Set the install prefix\nif(NOT DEFINED CMAKE_INSTALL_PREFIX)\n set(CMAKE_INSTALL_PREFIX \"/home/student/Desktop/Redge_Thesis/vil/install\")\nendif()\nstring(REGEX REPLACE \"/$\" \"\" CMAKE_INSTALL_PREFIX \"${CMAKE_INSTALL_PREFIX}\")\n\n# Set the install configuration name.\nif(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME)\n if(BUILD_TYPE)\n string(REGEX REPLACE \"^[^A-Za-z0-9_]+\" \"\"\n CMAKE_INSTALL_CONFIG_NAME \"${BUILD_TYPE}\")\n else()\n set(CMAKE_INSTALL_CONFIG_NAME \"Release\")\n endif()\n message(STATUS \"Install configuration: \\\"${CMAKE_INSTALL_CONFIG_NAME}\\\"\")\nendif()\n\n# Set the component getting installed.\nif(NOT CMAKE_INSTALL_COMPONENT)\n if(COMPONENT)\n message(STATUS \"Install component: \\\"${COMPONENT}\\\"\")\n set(CMAKE_INSTALL_COMPONENT \"${COMPONENT}\")\n else()\n set(CMAKE_INSTALL_COMPONENT)\n endif()\nendif()\n\n# Install shared libraries without execute permission?\nif(NOT DEFINED CMAKE_INSTALL_SO_NO_EXE)\n set(CMAKE_INSTALL_SO_NO_EXE \"1\")\nendif()\n\n# Is this installation the result of a crosscompile?\nif(NOT DEFINED CMAKE_CROSSCOMPILING)\n set(CMAKE_CROSSCOMPILING \"FALSE\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xlibx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/osi3\" TYPE STATIC_LIBRARY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_static.a\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xlibx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/osi3\" TYPE STATIC_LIBRARY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_pic.a\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xlibx\" OR NOT CMAKE_INSTALL_COMPONENT)\n foreach(file\n \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/osi3/libopen_simulation_interface.so.3.0.1\"\n \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/osi3/libopen_simulation_interface.so\"\n )\n if(EXISTS \"${file}\" AND\n NOT IS_SYMLINK \"${file}\")\n file(RPATH_CHECK\n FILE \"${file}\"\n RPATH \"\")\n endif()\n endforeach()\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/osi3\" TYPE SHARED_LIBRARY FILES\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface.so.3.0.1\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface.so\"\n )\n foreach(file\n \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/osi3/libopen_simulation_interface.so.3.0.1\"\n \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/osi3/libopen_simulation_interface.so\"\n )\n if(EXISTS \"${file}\" AND\n NOT IS_SYMLINK \"${file}\")\n if(CMAKE_INSTALL_DO_STRIP)\n execute_process(COMMAND \"/usr/bin/strip\" \"${file}\")\n endif()\n endif()\n endforeach()\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xdevx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3\" TYPE FILE FILES\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface-config.cmake\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/open_simulation_interface-config-version.cmake\"\n )\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/include/osi3\" TYPE FILE FILES\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_version.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_common.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_datarecording.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedtrafficsign.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedtrafficlight.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedroadmarking.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedlane.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedobject.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_detectedoccupant.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_environment.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_groundtruth.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_hostvehicledata.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_trafficsign.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_trafficlight.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_roadmarking.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_lane.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_featuredata.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_object.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_occupant.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_sensordata.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_sensorviewconfiguration.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_sensorspecific.pb.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/osi_sensorview.pb.h\"\n )\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xdevx\" OR NOT CMAKE_INSTALL_COMPONENT)\n if(EXISTS \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets.cmake\")\n file(DIFFERENT EXPORT_FILE_CHANGED FILES\n \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets.cmake\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/Export/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets.cmake\")\n if(EXPORT_FILE_CHANGED)\n file(GLOB OLD_CONFIG_FILES \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets-*.cmake\")\n if(OLD_CONFIG_FILES)\n message(STATUS \"Old export file \\\"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets.cmake\\\" will be replaced. Removing files [${OLD_CONFIG_FILES}].\")\n file(REMOVE ${OLD_CONFIG_FILES})\n endif()\n endif()\n endif()\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/Export/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets.cmake\")\n if(\"${CMAKE_INSTALL_CONFIG_NAME}\" MATCHES \"^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$\")\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/cmake/open_simulation_interface-3\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/Export/lib/cmake/open_simulation_interface-3/open_simulation_interface_targets-release.cmake\")\n endif()\nendif()\n\n"
},
{
"alpha_fraction": 0.4318181872367859,
"alphanum_fraction": 0.46136364340782166,
"avg_line_length": 23.5,
"blob_id": "d8fce475d6ba4b009c76792fa887cada8e871927",
"content_id": "7935dbb8d211f6ad7c120fa09f986b92ead60a8d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 440,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 18,
"path": "/src/fusion/src/ClassEgo.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "class Ego:\n def __init__(self):\n self.pos = Pos()\n self.pos.x = 0\n self.pos.y = 0\n self.vel = Vel()\n self.vel.x = 0\n self.vel.y = 0\n self.acc = Acc()\n self.acc.x = 0\n self.acc.y = 0\n self.neworientation = 0\n self.oldorientation = 0\n self.oldyaw = 0\n self.newyaw = 0\n self.yawrate = 0\n self.testyaw = 0\n self.testyawrate = 0"
},
{
"alpha_fraction": 0.7699260711669922,
"alphanum_fraction": 0.7847164869308472,
"avg_line_length": 70.5882339477539,
"blob_id": "9825941e71116a39c7cf955282291eebd2364414",
"content_id": "f6e2826f2d32b6ad287ca22619e3c8699a9dad4b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1217,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 17,
"path": "/build/object_list/CMakeFiles/object_list_generate_messages_py.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/object_list_generate_messages_py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_Features.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_Geometric.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_SensorProperty.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_ObjectList.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_Classification.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_ObjectsList.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_Dimension.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/_EgoData.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list/msg/__init__.py\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/object_list_generate_messages_py.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7898089289665222,
"alphanum_fraction": 0.7898089289665222,
"avg_line_length": 63.64706039428711,
"blob_id": "aacd79a3401e79fdab40c3904362a26f8ca090f1",
"content_id": "89b37b8fb6c99c35d12622fc8a32af288567bda2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1099,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 17,
"path": "/build/object_list/CMakeFiles/object_list_generate_messages_eus.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/object_list_generate_messages_eus\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/Features.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/Geometric.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/SensorProperty.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/ObjectList.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/Classification.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/ObjectsList.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/Dimension.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/msg/EgoData.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list/manifest.l\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/object_list_generate_messages_eus.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6096513867378235,
"alphanum_fraction": 0.6465920805931091,
"avg_line_length": 28.275590896606445,
"blob_id": "b78ab232f25fe30b1737338cf6173b0e3f1d6c83",
"content_id": "4da009dcad2e44704ff50642c59b566bd211c529",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 7731,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 254,
"path": "/src/osi3_bridge/include/osi_protocol_header.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "/**\r\n * @file\r\n * @author Georg Seifert <[email protected]>\r\n * @version 1\r\n *\r\n * @section LICENSE\r\n *\r\n * For internal use only\r\n *\r\n * @section BESCHREIBUNG\r\n * \r\n * Das \"osi network protocol\" definiert einen statischen Header, der vor das OSI \r\n * gesetzt wird um die dynamische Größe zu kodieren.\r\n *\r\n * \\latexonly\r\n * \\begin{bytefield}[bitwidth=0.8em]{32}\r\n * \t\\bitheader{0,15,16,23,24,31} \\\\\r\n * \t\\begin{rightwordgroup}{Header}\r\n * \t\t\\wordbox{2}{Magic ID} \\\\\r\n * \t\t\\wordbox{1}{Protokoll Version} \\\\\r\n * \t\t\\begin{leftwordgroup}{OSI Version}\r\n * \t\t\t\\bitbox{16}{Major} & \\bitbox{8}{Minor} & \\bitbox{8}{Patch}\r\n * \t\t\\end{leftwordgroup} \\\\\r\n * \t\t\\wordbox{1}{Payload Größe} \\\\\r\n * \t\t\\wordbox{1}{Payload Typ}\r\n * \t\\end{rightwordgroup}\\\\\r\n * \t\\begin{rightwordgroup}{Payload}\r\n * \t\\wordbox[lrt]{1}{OSI Daten} \\\\\r\n * \t\\skippedwords \\\\\r\n * \t\\wordbox[lrb]{1}{}\r\n * \t\\end{rightwordgroup}\r\n * \\end{bytefield}\r\n * \\endlatexonly\r\n * \r\n * Hierbei ist zu beachten, dass die versendeten Werte als Network/Big Endian\r\n * abgelegt werden. Um dies sicher zu stellen werden für den Zugriff auf die \r\n * Datenstruktur Getter und Setter bereitgestellt, die die Codierung intern \r\n * übernehmen.\r\n *\r\n */\r\n\r\n#ifndef OSI_NETWORK_PROTOCOL_HEADER\r\n#define OSI_NETWORK_PROTOCOL_HEADER\r\n\r\n#ifdef __cplusplus\r\nextern \"C\"\r\n{\r\n#endif\r\n\r\n#include <stddef.h>\r\n#include <stdint.h>\r\n#include <stdbool.h>\r\n#include \"win_dll_export.h\"\r\n\r\n#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__\r\n /// 16-Bit Swap Function\r\n #define SWAP_16(x) \\\r\n ((((x) >> 8) & 0xffu) | (((x) & 0xffu) << 8))\r\n\r\n /// 32-Bit Swap Function\r\n #define SWAP_32(x) \\\r\n ((((x) & 0xff000000u) >> 24) | (((x) & 0x00ff0000u) >> 8) | \\\r\n (((x) & 0x0000ff00u) << 8) | (((x) & 0x000000ffu) << 24))\r\n\r\n /// 64-Bit Swap Function\r\n #define SWAP_64(x) \\\r\n ((((x) & 0xff00000000000000ull) >> 56) | \\\r\n (((x) & 0x00ff000000000000ull) >> 40) | \\\r\n (((x) & 0x0000ff0000000000ull) >> 24) | \\\r\n (((x) & 0x000000ff00000000ull) >> 8) | \\\r\n (((x) & 0x00000000ff000000ull) << 8) | \\\r\n (((x) & 0x0000000000ff0000ull) << 24) | \\\r\n (((x) & 0x000000000000ff00ull) << 40) | \\\r\n (((x) & 0x00000000000000ffull) << 56))\r\n\r\n#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__\r\n #define SWAP_16(x) (x)\r\n #define SWAP_32(x) (x)\r\n #define SWAP_64(x) (x)\r\n#else\r\n #error Byteorder (\"__ORDER_BIG_ENDIAN__\" or \"__ORDER_LITTLE_ENDIAN__\") has to be defined\r\n#endif\r\n\r\n/// Magic ID um das Protokoll zu erkennen\r\n#define OSI_PROTOCOL_HEADER_MAGIC_ID (4846245148911422785lu)\r\n/// Aktuelle verwendete Version des Protokolls\r\n#define OSI_PROTOCOL_HEADER_VERSION (1u)\r\n\r\n/// OSI Datenpaket Typ\r\nenum osi_payloadDataType\r\n{\r\n osi_GroundTruth = 0, /**< OSI Paket ist vom Typ: Ground Truth */\r\n osi_SensorData = 1, /**< OSI Paket ist vom Typ: Sensor Data */\r\n osi_SensorView = 2, /**< OSI Paket ist vom Typ: Sensor View */\r\n osi_FeatureData = 3 /**< OSI Paket ist vom Typ: Feature Data */\r\n};\r\n\r\n/**\r\n * Struktur des Statischen Headers\r\n *\r\n * Die Struktur beschreibt die einzelnen Elemente des \"network protocol\".\r\n * Bei änderungen im Protokoll ist zu beachten, dass das das Alignment von\r\n * von 64-Bit eingehalten wird, und wenn nötig mit spare-Variablen gefüllt wird,\r\n * um Kompilerspezifika zu umgehen.\r\n *\r\n * @note Daten werden als Network bzw. Big Endian über das Netzwerk verschickt\r\n */\r\ntypedef struct _osi_protocol_header\r\n{\r\n uint64_t magic_id; /**< 64-Bit ID um das Protokoll zu erkennen */\r\n uint32_t protocol_version; /**< 32-Bit Aktuelle Version des Protokolls */\r\n uint16_t osi_version_major; /**< 16-Bit OSI Major Version */\r\n uint8_t osi_version_minor; /**< 8-Bit OSI Minor Version */\r\n uint8_t osi_version_patch; /**< 8-Bit OSI Patch Version */\r\n uint32_t payload_size; /**< 32-Bit Größe des Nachfolgenden OSI-Pakets */\r\n uint32_t payload_type; /**< 32-Bit OSI Typ @see enum payloadDataType */\r\n} osi_protocol_header;\r\n\r\n/**\r\n * Liefert die Größe der internen Struktur zurück\r\n *\r\n * @return Größe der Header-Struktur\r\n */\r\nMODULE_API size_t osiph_get_header_size();\r\n\r\n/**\r\n * Setter für Magic ID\r\n *\r\n * @param header Header, bei dem sie Magic ID gesetzt wird\r\n */\r\nMODULE_API void osiph_set_magic_id(void * header);\r\n\r\n/**\r\n * Überprüfen der Magic ID\r\n *\r\n * @param header Header, bei dem die Magic ID überprüft wird\r\n * @return Ergebnis der Überprüfung\r\n */\r\nMODULE_API bool osiph_check_magic_id(void * header);\r\n\r\n/**\r\n * Setter der Version\r\n *\r\n * @param header Header, bei dem die Version gesetzt wird\r\n */\r\nMODULE_API void osiph_set_protocol_version(void * header);\r\n\r\n/**\r\n * Getter der Version\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return Version des Headers\r\n */\r\nMODULE_API uint32_t osiph_get_protocol_version(void * header);\r\n\r\n/**\r\n * Überprüfen der Magic ID\r\n *\r\n * @param header Header, bei dem sie Version geprüft wird\r\n * @return Ergebnis der Überprüfung\r\n */\r\nMODULE_API bool osiph_check_protocol_version(void * header);\r\n\r\n/**\r\n * Setter der OSI Version\r\n *\r\n * Versionierung siehe:\r\n * https://github.com/OpenSimulationInterface/open-simulation-interface/blob/master/README.md\r\n *\r\n * @param header Header, bei dem sie OSI Version gesetzt wird\r\n * @param major OSI Major-Version\r\n * @param minor OSI Minor-Version\r\n * @param patch OSI Patch-Version\r\n */\r\nMODULE_API void osiph_set_osi_version(void * header, uint16_t major, uint8_t minor, uint8_t patch);\r\n\r\n/**\r\n * Getter der OSI Major-Version\r\n *\r\n * Versionierung siehe:\r\n * https://github.com/OpenSimulationInterface/open-simulation-interface/blob/master/README.md\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return OSI Major-Version des Headers\r\n */\r\nMODULE_API uint16_t osiph_get_osi_version_major(void * header);\r\n\r\n/**\r\n * Getter der OSI Minor-Version\r\n *\r\n * Versionierung siehe:\r\n * https://github.com/OpenSimulationInterface/open-simulation-interface/blob/master/README.md\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return OSI Minor-Version des Headers\r\n */\r\nMODULE_API uint8_t osiph_get_osi_version_minor(void * header);\r\n\r\n/**\r\n * Getter der OSI Patch-Version\r\n *\r\n * Versionierung siehe:\r\n * https://github.com/OpenSimulationInterface/open-simulation-interface/blob/master/README.md\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return OSI Patch-Version des Headers\r\n */\r\nMODULE_API uint8_t osiph_get_osi_version_patch(void * header);\r\n\r\n/**\r\n * Setter für OSI Payload Size\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @param size Payload Size\r\n */\r\nMODULE_API void osiph_set_payload_size(void * header, uint32_t size);\r\n\r\n/**\r\n * Getter für OSI Payload Size\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return Payload Size\r\n */\r\nMODULE_API uint32_t osiph_get_payload_size(void * header);\r\n\r\n/**\r\n * Setter für OSI Payload Type\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @param type Payload Type\r\n */\r\nMODULE_API void osiph_set_payload_type(void * header, uint32_t type);\r\n\r\n/**\r\n * Getter für OSI Payload Type\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return Payload Type\r\n */\r\nMODULE_API uint32_t osiph_get_payload_type(void * header);\r\n\r\n/**\r\n * Getter für OSI Payload\r\n *\r\n * @param header Header, bei dem die Version zurückgeliefert wird\r\n * @return Pointer auf OSI Payload\r\n */\r\nMODULE_API void * osiph_get_payload(void * header);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // NETWORK_PROTOCOL_HEADER"
},
{
"alpha_fraction": 0.7977099418640137,
"alphanum_fraction": 0.7977099418640137,
"avg_line_length": 64.5,
"blob_id": "e05459c97525c158395569507c9a3610f4b281f6",
"content_id": "bed8251d0baaa36f48b0e4fcf64199f4378191c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1048,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 16,
"path": "/build/object_list/CMakeFiles/object_list_generate_messages_nodejs.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/object_list_generate_messages_nodejs\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/Features.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/Geometric.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/SensorProperty.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/ObjectList.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/Classification.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/ObjectsList.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/Dimension.js\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list/msg/EgoData.js\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/object_list_generate_messages_nodejs.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7960288524627686,
"alphanum_fraction": 0.798435628414154,
"avg_line_length": 47.882354736328125,
"blob_id": "2fc625094e7c1a22fd5c8d481d06cd5f928e49c3",
"content_id": "4d094d3b6592456cf5eb7b46a3544d21ffeae314",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1662,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 34,
"path": "/build/catkin_generated/order_packages.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from catkin/cmake/em/order_packages.cmake.em\n\nset(CATKIN_ORDERED_PACKAGES \"\")\nset(CATKIN_ORDERED_PACKAGE_PATHS \"\")\nset(CATKIN_ORDERED_PACKAGES_IS_META \"\")\nset(CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"osi3_bridge\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"osi3_bridge\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"object_list\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"object_list\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"sensor_model\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"sensor_model\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"vehicle_control\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"vehicle_control\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"aeb\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"aeb\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\nlist(APPEND CATKIN_ORDERED_PACKAGES \"fusion\")\nlist(APPEND CATKIN_ORDERED_PACKAGE_PATHS \"fusion\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_IS_META \"False\")\nlist(APPEND CATKIN_ORDERED_PACKAGES_BUILD_TYPE \"catkin\")\n\nset(CATKIN_MESSAGE_GENERATORS )\n\nset(CATKIN_METAPACKAGE_CMAKE_TEMPLATE \"/home/student/.local/lib/python2.7/site-packages/catkin_pkg/templates/metapackage.cmake.in\")\n"
},
{
"alpha_fraction": 0.7880184054374695,
"alphanum_fraction": 0.8018433451652527,
"avg_line_length": 53.25,
"blob_id": "d31266543362cf8b51fba5b844d2cd1225317b31",
"content_id": "fefbc4d2eec35348648911e7d4a14179111e80ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 217,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 4,
"path": "/devel/share/osi3_bridge/cmake/osi3_bridge-msg-paths.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-msg-paths.cmake.develspace.in\n\nset(osi3_bridge_MSG_INCLUDE_DIRS \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg\")\nset(osi3_bridge_MSG_DEPENDENCIES geometry_msgs;std_msgs)\n"
},
{
"alpha_fraction": 0.5704734921455383,
"alphanum_fraction": 0.5864583849906921,
"avg_line_length": 38.927860260009766,
"blob_id": "682f00ecd253c397e921446eb313d1272daa3d83",
"content_id": "6da42b5f7de8a13c5423f0ca7dacd073581f2e8e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16453,
"license_type": "no_license",
"max_line_length": 193,
"num_lines": 402,
"path": "/src/fusion/src/Association.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Dec 13 09:13:52 2020\r\n\r\n@author: Redge\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport rospy\r\nimport math\r\n\r\nimport message_filters\r\nfrom scipy.spatial import distance as di\r\nfrom scipy.stats import chi2\r\nfrom scipy.linalg import sqrtm\r\nfrom object_list.msg import ObjectList, ObjectsList\r\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\r\n\r\nimport sys\r\n# import function\r\n#from rotate import rotate\r\nfrom scipy.spatial import distance\r\n\r\nclass Features:\r\n def __init__(self):\r\n\r\n self.FL = 0.0\r\n self.FM = 0.0\r\n self.FR = 0.0\r\n self.MR = 0.0\r\n self.RR = 0.0\r\n self.RM = 0.0\r\n self.RL = 0.0\r\n self.ML = 0.0\r\nclass Pos:\r\n def __init__(self):\r\n self.y = 0 # m\r\n self.x = 0 # m\r\n\r\nclass Vel:\r\n def __init__(self):\r\n self.y = 0 # m/s\r\n self.x = 0 # m/s\r\n\r\nclass Acc:\r\n def __init__(self):\r\n self.y = 0 # m/s^2\r\n self.x = 0 # m/s^2\r\nclass Ego:\r\n def __init__(self):\r\n self.pos = Pos()\r\n self.pos.x = 0\r\n self.pos.y = 0\r\n self.vel = Vel()\r\n self.vel.x = 0\r\n self.vel.y = 0\r\n self.acc = Acc()\r\n self.acc.x = 0\r\n self.acc.y = 0\r\n self.neworientation = 0\r\n self.oldorientation = 0\r\n self.oldyaw = 0\r\n self.newyaw = 0\r\n self.yawrate = 0\r\n self.testyaw = 0\r\n self.testyawrate = 0\r\n self.t = 0\r\n\r\ndef feature_select(global_obj, sensor_obj):\r\n\r\n #glob_feat_x,glob_feat_y = calculate_x_y(global_obj)\r\n #sens_feat_x,sens_feat_y = calculate_x_y(sensor_obj)\r\n\r\n glob_feat_x, glob_feat_y = calculate_features(global_obj)\r\n sens_feat_x, sens_feat_y = calculate_features(sensor_obj)\r\n\r\n #scenario: 1 for common corner,2 for common side feature, 3 for features on common side, features unrelated\r\n if global_obj.features.FL == 1 and sensor_obj.features.FL == 1:\r\n scenario = 1\r\n globalxf = glob_feat_x.FL\r\n globalyf = glob_feat_y.FL\r\n sensorxf = sens_feat_x.FL\r\n sensoryf = sens_feat_y.FL\r\n geometric = [0,0]\r\n elif global_obj.features.FR == 1 and sensor_obj.features.FR == 1:\r\n scenario = 1\r\n globalxf = glob_feat_x.FR\r\n globalyf = glob_feat_y.FR\r\n sensorxf = sens_feat_x.FR\r\n sensoryf = sens_feat_y.FR\r\n geometric = [0,0]\r\n elif global_obj.features.RL == 1 and sensor_obj.features.RL == 1:\r\n scenario = 1\r\n globalxf = glob_feat_x.RL\r\n globalyf = glob_feat_y.RL\r\n sensorxf = sens_feat_x.RL\r\n sensoryf = sens_feat_y.RL\r\n geometric = [0,0]\r\n elif global_obj.features.RR == 1 and sensor_obj.features.RR == 1:\r\n scenario = 1\r\n globalxf = glob_feat_x.RR\r\n globalyf = glob_feat_y.RR\r\n sensorxf = sens_feat_x.RR\r\n sensoryf = sens_feat_y.RR\r\n geometric = [0,0]\r\n elif global_obj.features.FM == 1 and sensor_obj.features.FM == 1:\r\n scenario = 2\r\n if abs(global_obj.dimension.width - global_obj.dimension.width) < (np.sqrt(global_obj.dimension.width_variance) + np.sqrt(sensor_obj.dimension.width_variance)):\r\n globalxf = glob_feat_x.FM\r\n globalyf = glob_feat_y.FM\r\n sensorxf = sens_feat_x.FM\r\n sensoryf = sens_feat_y.FM\r\n geometric = [0,0]\r\n\r\n else :\r\n globalxf = glob_feat_x.FM\r\n globalyf = glob_feat_y.FM\r\n sensorxf = sens_feat_x.FM\r\n sensoryf = sens_feat_y.FM\r\n geometric = [0,1]\r\n\r\n elif global_obj.features.RM == 1 and sensor_obj.features.RM == 1:\r\n scenario = 2\r\n if abs(global_obj.dimension.width - sensor_obj.dimension.width) < (\r\n np.sqrt(global_obj.dimension.width_variance) + np.sqrt(sensor_obj.dimension.width_variance)):\r\n globalxf = glob_feat_x.RM\r\n globalyf = glob_feat_y.RM\r\n sensorxf = sens_feat_x.RM\r\n sensoryf = sens_feat_y.RM\r\n geometric = [0, 0]\r\n\r\n else:\r\n globalxf = glob_feat_x.RM\r\n globalyf = glob_feat_y.RM\r\n sensorxf = sens_feat_x.RM\r\n sensoryf = sens_feat_y.RM\r\n geometric = [0,1]\r\n\r\n elif global_obj.features.ML == 1 and sensor_obj.features.ML == 1:\r\n scenario = 2\r\n if abs(global_obj.dimension.length - sensor_obj.dimension.length) < (\r\n np.sqrt(global_obj.dimension.length_variance) + np.sqrt(sensor_obj.dimension.length_variance)):\r\n globalxf = glob_feat_x.ML\r\n globalyf = glob_feat_y.ML\r\n sensorxf = sens_feat_x.ML\r\n sensoryf = sens_feat_y.ML\r\n geometric = [1,0]\r\n\r\n else:\r\n globalxf = glob_feat_x.ML\r\n globalyf = glob_feat_y.ML\r\n sensorxf = sens_feat_x.ML\r\n sensoryf = sens_feat_y.ML\r\n geometric = [1,0]\r\n elif global_obj.features.MR == 1 and sensor_obj.features.MR == 1:\r\n scenario = 2\r\n if abs(global_obj.dimension.length - sensor_obj.dimension.length) < (\r\n np.sqrt(global_obj.dimension.length_variance) + np.sqrt(sensor_obj.dimension.length_variance)):\r\n globalxf = glob_feat_x.MR\r\n globalyf = glob_feat_y.MR\r\n sensorxf = sens_feat_x.MR\r\n sensoryf = sens_feat_y.MR\r\n geometric = [0, 1]\r\n\r\n else:\r\n globalxf = glob_feat_x.MR\r\n globalyf = glob_feat_y.MR\r\n sensorxf = sens_feat_x.MR\r\n sensoryf = sens_feat_y.MR\r\n geometric = [0, 1]\r\n elif global_obj.features.FL == 1 and sensor_obj.features.FR == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.FL\r\n globalyf = glob_feat_y.FL\r\n sensorxf = sens_feat_x.FR\r\n sensoryf = sens_feat_y.FR\r\n geometric = [0, 1] # delete y elements since only x is common\r\n elif global_obj.features.FL == 1 and sensor_obj.features.RL == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.FL\r\n globalyf = glob_feat_y.FL\r\n sensorxf = sens_feat_x.RL\r\n sensoryf = sens_feat_y.RL\r\n geometric = [1, 0] # delete x elements since only y is common\r\n\r\n elif global_obj.features.FR == 1 and sensor_obj.features.FL == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.FR\r\n globalyf = glob_feat_y.FR\r\n sensorxf = sens_feat_x.FL\r\n sensoryf = sens_feat_y.FL\r\n geometric = [0,1] # delete y elements since only x is common\r\n elif global_obj.features.FR == 1 and sensor_obj.features.RR == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.FR\r\n globalyf = glob_feat_y.FR\r\n sensorxf = sens_feat_x.RR\r\n sensoryf = sens_feat_y.RR\r\n geometric = [1, 0] # delete x elements since only y is common\r\n elif global_obj.features.RR == 1 and sensor_obj.features.FR == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.RR\r\n globalyf = glob_feat_y.RR\r\n sensorxf = sens_feat_x.RR\r\n sensoryf = sens_feat_x.RR\r\n geometric = [1, 0] # delete x elements since only y is common\r\n elif global_obj.features.RR == 1 and sensor_obj.features.RL == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.RR\r\n globalyf = glob_feat_y.RR\r\n sensorxf = sens_feat_x.RL\r\n sensoryf = sens_feat_y.RL\r\n geometric = [0, 1] # delete y elements since only x is common\r\n elif global_obj.features.RL == 1 and sensor_obj.features.RR == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.RL\r\n globalyf = glob_feat_y.RL\r\n sensorxf = sens_feat_x.RR\r\n sensoryf = sens_feat_y.RR\r\n geometric = [0, 1] # delete y elements since only x is common\r\n elif global_obj.features.RL == 1 and sensor_obj.features.FL == 1:\r\n scenario = 3\r\n globalxf = glob_feat_x.RL\r\n globalyf = glob_feat_y.RL\r\n sensorxf = sens_feat_x.FL\r\n sensoryf = sens_feat_y.FL\r\n geometric = [1, 0] # delete x elements since only y is common\r\n else:\r\n scenario = 4\r\n #return all feature coordinates\r\n globalxf = glob_feat_x\r\n globalyf = glob_feat_y\r\n sensorxf = sens_feat_x\r\n sensoryf = sens_feat_y\r\n geometric = [0, 0]\r\n\r\n return [scenario,globalxf,globalyf,sensorxf,sensoryf,geometric]\r\n\r\ndef calculate_features(obj):\r\n\r\n obj_list = ObjectList()\r\n features = obj_list.features # import a all features as bool\r\n x = Features() # Import a class with all features as float 0.0\r\n y = Features()\r\n # Version 1 do not include the verification of the hided features - Just take into account if it is inside the FOV\r\n\r\n # Calculate the features\r\n features_check = 0\r\n\r\n tg_wl = math.atan(obj.dimension.width / obj.dimension.length)\r\n hip_wl = 0.5 * math.sqrt(obj.dimension.width ** 2 + obj.dimension.length ** 2)\r\n beta = obj.geometric.yaw - tg_wl\r\n psi = obj.geometric.yaw + tg_wl\r\n\r\n x.FL = obj.geometric.x + hip_wl * math.cos(psi)\r\n y.FL = obj.geometric.y + hip_wl * math.sin(psi)\r\n\r\n #[features.FL, features_check] = evaluate_feature(x.FL, y.FL, sens, features_check)\r\n\r\n x.FR = obj.geometric.x + hip_wl * math.cos(beta)\r\n y.FR = obj.geometric.y + hip_wl * math.sin(beta)\r\n\r\n #[features.FR, features_check] = evaluate_feature(x.FR, y.FR, sens, features_check)\r\n\r\n x.RR = obj.geometric.x - hip_wl * math.cos(-psi)\r\n y.RR = obj.geometric.y - hip_wl * math.sin(psi)\r\n # [features.RR, features_check] = evaluate_feature(x.RR, y.RR, sens, features_check)\r\n\r\n x.RL = obj.geometric.x - hip_wl * math.cos(-beta)\r\n y.RL = obj.geometric.y - hip_wl * math.sin(beta)\r\n # [features.RL, features_check] = evaluate_feature(x.RL, y.RL, sens, features_check)\r\n\r\n x.FM = (x.FR + x.FL) / 2\r\n y.FM = (y.FR + y.FL) / 2\r\n # [features.FM, features_check] = evaluate_feature(x.FM, y.FM, sens, features_check)\r\n\r\n x.ML = (x.RL + x.FL) / 2\r\n y.ML = (y.RL + y.FL) / 2\r\n # [features.ML, features_check] = evaluate_feature(x.ML, y.ML, sens, features_check)\r\n\r\n x.MR = (x.RR + x.FR) / 2\r\n y.MR = (y.RR + y.FR) / 2\r\n # [features.MR, features_check] = evaluate_feature(x.MR, y.MR, sens, features_check)\r\n\r\n x.RM = (x.RR + x.RL) / 2\r\n y.RM = (y.RR + y.RL) / 2\r\n # [features.RM, features_check] = evaluate_feature(x.RM, y.RM, sens, features_check)\r\n\r\n #if more than two features are availabel\r\n X = np.asarray( [x.FL, x.FM, x.FR, x.MR, x.RR, x.RM, x.RL, x.ML]) # Vector of x position for the Features\r\n Y = np.asarray( [y.FL, y.FM, y.FR, y.MR, y.RR, y.RM, y.RL, y.ML]) # Vector of y position for the Features\r\n #FOV_features = np.asarray([features.FL, features.FM, features.FR, features.MR, features.RR, features.RM, features.RL, features.ML]) # Vector of y position for the Features\r\n #hidden_features = evaluate_hidden_features(X,Y)\r\n\r\n #features_list = FOV_features*hidden_features\r\n\r\n\r\n #[features.FL, features.FM, features.FR, features.MR, features.RR, features.RM, features.RL, features.ML] = features_list\r\n\r\n #plt.plot(y.RR, x.RR, 'g^', y.RL, x.RL, 'go', y.FR, x.FR, 'r^', y.FL, x.FL, 'ro', y.FM, x.FM, 'rs', y.ML, x.ML, 'bo', y.MR, x.MR, 'b^', y.RM, x.RM, 'gs' )\r\n #plt.show()\r\n\r\n return x,y\r\n\r\ndef statistical_distance(sensor_association_state, global_association_state, sensor_covariance, global_covariance):\r\n \"\"\"\r\n Fusion to calculate the statistical distance between the sensor object and global objects NOT based on feature points .\r\n calculated based on Central coordinates\r\n (FOR TESTING ONLY)\r\n\r\n \"\"\"\r\n\r\n C = np.array([[1, 0], [0, 1]])\r\n c_m = np.array([[0.1, 0], [0, 0.1]])\r\n innov_cov = (C.dot(sensor_covariance)).dot(C.transpose()) + c_m\r\n distance = di.mahalanobis(sensor_association_state, global_association_state, innov_cov)\r\n threshold = chi2.ppf(0.95, len(sensor_association_state))\r\n return distance,threshold\r\n\r\n\r\ndef get_statistical_distance(scenario, globalxf, globalyf, sensorxf, sensoryf, geometric, sensor_covariance, global_covariance):\r\n #from scipy.spatial import distance\r\n \"\"\"\r\n Fusion to calculate the statistical distance between the sensor object and global objects based on feature points .\r\n\r\n \"\"\"\r\n\r\n C = np.array([[1, 0], [0, 1]])\r\n c_m = np.array([[0.1, 0], [0, 0.1]])\r\n innov_cov = (C.dot(sensor_covariance)).dot(C.transpose()) + c_m\r\n\r\n\r\n\r\n\r\n if scenario == 1:\r\n global_association_state = np.array([[globalxf],[globalyf]])\r\n sensor_association_state = np.array([[sensorxf],[sensoryf]])\r\n distance = distance.mahalanobis(sensor_association_state, global_association_state, innov_cov) #+ 2 * np.log(\r\n #np.sqrt(np.linalg.det(innov_cov)))\r\n threshold = chi2.ppf(0.05, len(sensor_association_state))\r\n elif scenario == 2:\r\n if geometric[0] == 0 and geometric[1] == 0:\r\n global_association_state = np.array([[globalxf], [globalyf]])\r\n sensor_association_state = np.array([[sensorxf],[sensoryf]])\r\n elif geometric[0] == 1:\r\n global_association_state = float(globalyf)\r\n sensor_association_state = float(sensoryf)\r\n sensor_covariance =sensor_covariance[1,1]\r\n global_covariance = global_covariance[1,1]\r\n innov_cov = sensor_covariance + 0.1\r\n elif geometric[1] == 1:\r\n global_association_state = float(globalxf)\r\n sensor_association_state = float(sensorxf)\r\n sensor_covariance = sensor_covariance[0, 0]\r\n global_covariance = global_covariance[0, 0]\r\n innov_cov = sensor_covariance + 0.1\r\n distance = distance.mahalanobis(sensor_association_state, global_association_state, innov_cov)# + 2 * np.log(\r\n #np.sqrt(np.linalg.det(innov_cov)))\r\n try:\r\n threshold = chi2.ppf(0.95, len(sensor_association_state))\r\n except:\r\n threshold = chi2.ppf(0.95, 2)\r\n elif scenario == 3:\r\n if geometric[0] == 1:\r\n global_association_state = float(globalyf)\r\n sensor_association_state = float(sensoryf)\r\n sensor_covariance = sensor_covariance[1, 1]\r\n global_covariance = global_covariance[1, 1]\r\n innov_cov = sensor_covariance + 0.1\r\n elif geometric[1] == 1:\r\n global_association_state = float(globalxf)\r\n sensor_association_state = float(sensorxf)\r\n sensor_covariance = sensor_covariance[0, 0]\r\n global_covariance = global_covariance[0, 0]\r\n innov_cov = sensor_covariance + 0.1\r\n distance = distance.mahalanobis(sensor_association_state, global_association_state, innov_cov) #+ 2 * np.log(np.sqrt(innov_cov))\r\n\r\n try:\r\n threshold = chi2.ppf(0.95, len(sensor_association_state))\r\n except:\r\n threshold = chi2.ppf(0.95, 1)\r\n else:\r\n #globalx = [globalxf.FL, globalxf.FM, globalxf.FR, globalxf.MR, globalxf.RR, globalxf.RM, globalxf.RL,globalxf.ML] # Vector of x position for the Features / List of objects y(features)\r\n globalx = [globalxf.FL, globalxf.FM, globalxf.FR, globalxf.MR, globalxf.RR, globalxf.RM, globalxf.RL,globalxf.ML] # Vector of x position for the Features / List of objects y(features)\r\n globaly = [globalyf.FL, globalyf.FM, globalyf.FR, globalyf.MR, globalyf.RR, globalyf.RM, globalyf.RL, globalyf.ML] # Vector of y position for the Features\r\n\r\n sensorx = [sensorxf.FL, sensorxf.FM, sensorxf.FR, sensorxf.MR, sensorxf.RR, sensorxf.RM, sensorxf.RL,\r\n sensorxf.ML] # Vector of x position for the Features / List of objects y(features)\r\n sensory = [sensoryf.FL, sensoryf.FM, sensoryf.FR, sensoryf.MR, sensoryf.RR, sensoryf.RM, sensoryf.RL,\r\n sensoryf.ML]\r\n distance = 9999999\r\n for i in range(len(globalx)):\r\n global_association_state = np.array([[globalx[i]], [globaly[i]]])\r\n sensor_association_state = np.array([[sensorx[i]], [sensory[i]]])\r\n innov_cov = (C.dot(sensor_covariance)).dot(C.transpose()) + c_m\r\n\r\n d = distance.mahalanobis(sensor_association_state, global_association_state, innov_cov) #+ 2 * np.log(\r\n #np.sqrt(np.linalg.det(innov_cov)))\r\n if d < distance:\r\n distance = d\r\n threshold = chi2.ppf(0.95, len(sensor_association_state))\r\n return(distance, threshold)\r\n"
},
{
"alpha_fraction": 0.7924944758415222,
"alphanum_fraction": 0.7924944758415222,
"avg_line_length": 55.625,
"blob_id": "d46c9e8e9932cd02352c41136eee4c480e9b1384",
"content_id": "2f867ae13eac8fa2f62ae55fe334ed37f459d016",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 906,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 16,
"path": "/build/object_list/CMakeFiles/object_list_generate_messages_cpp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/object_list_generate_messages_cpp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/Features.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/Geometric.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/SensorProperty.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/ObjectList.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/Classification.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/ObjectsList.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/Dimension.h\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list/EgoData.h\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/object_list_generate_messages_cpp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7863045930862427,
"alphanum_fraction": 0.7981109619140625,
"avg_line_length": 59.5,
"blob_id": "34cf397a48966ff5783711160888e773dd06ada9",
"content_id": "533c731ab4313eb0523d7d7d8cf5e59f5392db29",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 847,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 14,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_eus.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_generate_messages_eus\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/msg/GroundTruthMovingObjects.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/msg/MovingObject.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/msg/Dimension3d.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/msg/TrafficUpdateMovingObject.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/msg/Orientation3d.l\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/osi3_bridge/manifest.l\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/osi3_bridge_generate_messages_eus.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.5118794441223145,
"alphanum_fraction": 0.5356382727622986,
"avg_line_length": 49.93584060668945,
"blob_id": "878d719f02b6a8e674db83c3c574658fdb949fa7",
"content_id": "b4fb6cde39ba7e57ba6f2c495b6c8bd188d93125",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 23023,
"license_type": "no_license",
"max_line_length": 407,
"num_lines": 452,
"path": "/src/sensor_model/scripts/sensor2Ego_2.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport numpy as np\nimport rospy\nimport math\nimport message_filters\n\n## Import Funtions\nfrom rotate import rotate\n\n## Import Objects\nfrom ClassKF import KF , rotatedata,Prob\nfrom ClassSens import Sens , Ego\n\n# import all necessary ROS messages\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\ncount1 = 0 # TO CALCULATE TIME - Objectlist related\negocount=0 # counter to calculate ego yawrate\ntime =0\n\nKFlist=[] # List of all objects KF (Every object in OL \"Object_managment\")\nrotateinfo=[] # List of all objects rotatedata (Every object in OL \"Object_managment\")\n\negoveh = Ego()\n\n\n#create a list o update kalman parameters\noldKFlist=[]\nprob_tracker = []\nfor i in range(1000):\n a = KF()\n b=rotatedata()\n KFlist.append(a)\n rotateinfo.append(b)\n oldKFlist.append(a)\n c=Prob()\n prob_tracker.append(c)\ndef sensor_rotate():\n # Node initialization\n\n rospy.init_node('sensor2ego', anonymous=False) # Start node\n rospy.Rate(rospy.get_param(\"freq\"))\n\n # subscribe to sensor data and ego data with time synchronization\n ego_data = message_filters.Subscriber('/ego_data', TrafficUpdateMovingObject) #maikol pls confirm msg name from matlab function\n objs_list = message_filters.Subscriber('objs_list', ObjectsList)\n #ts = message_filters.ApproximateTimeSynchronizer([ego_data, objs_list],10,0.1)\n ts = message_filters.TimeSynchronizer([ego_data, objs_list], 30)\n ts.registerCallback(callback)\n\n rospy.spin()\n\ndef callback(ego_data,objs_list):\n global egoveh # object containing ego parameters\n global egocount\n global time # Time step\n tic = rospy.Time.now()\n # provide ego data parameters to global variable ego_veh\n egoveh.vel.x = ego_data.object.velocity.x\n egoveh.vel.y = ego_data.object.velocity.y\n egoveh.acc.x = ego_data.object.acceleration.x\n egoveh.acc.y = ego_data.object.acceleration.y\n # calculate ego yaw (how much the car has rotated since last measurement in egoveh.newyaw)\n if egocount == 0 :\n egoveh.neworientation = ego_data.object.orientation.yaw\n egocount+=1\n egoveh.testyaw = egoveh.neworientation\n egoveh.newyaw = 0\n else:\n egoveh.oldorientation = egoveh.neworientation\n egoveh.neworientation = ego_data.object.orientation.yaw\n egoveh.newyaw = egoveh.oldorientation-egoveh.neworientation\n\n\n ## Start the ROS publisher\n pub=rospy.Publisher('obj_list_egoframe', ObjectsList, queue_size=10,latch=True)\n\n #Apply kalman filter to sensor data if not an ideal sensorori\n if rospy.get_param(\"sensortype\") != 5:\n #apply kalman filter function\n filtered_objs_list = kalman(objs_list)\n filtered_objs_list = calculate_probability(filtered_objs_list)\n # rotate objects list from sensor to ego frame\n new_objs_list = vector_rotate(filtered_objs_list)\n else:\n\n # rotate objects list from sensor to ego frame\n new_objs_list = vector_rotate(objs_list)\n for i, a in enumerate(objs_list.obj_list):\n #change relative to absolute velocities\n a.geometric.vx += egoveh.vel.x\n a.geometric.vy += egoveh.vel.y\n a.geometric.ax += egoveh.acc.x\n a.geometric.ay += egoveh.acc.y\n #for i , obj in enumerate(objs_list.obj_list):\n # if np.sqrt(np.square(obj.geometric.vx) + np.square(obj.geometric.vy)) > (rospy.get_param('posxerr')*2*rospy.get_param('freq')):\n # obj.prop_mov = 70\n\n toc = rospy.Time.now()\n time = toc.to_sec() - tic.to_sec()\n print('Sensor to EGO time', time)\n pub.publish(new_objs_list)\n\ndef vector_rotate(objs_list):\n #rotate objects list from sensor to ego frame\n global egoveh # object containing ego parameters\n global new #current objects list time\n global old # previous object list time\n global time # time step\n global count1 #count for time step calculation\n #global count\n global old_objs #previous objects list\n\n #calculate time\n if count1 ==0:\n new = objs_list.header.stamp.to_sec()\n time = 0.1\n count1 +=1\n else:\n old = new\n new = objs_list.header.stamp.to_sec()\n time = float((new-old))\n\n sens = Sens()\n\n for i, a in enumerate(objs_list.obj_list):\n objs_list.header.frame_id = \"EGOframe\"\n a.time = objs_list.header.stamp.to_sec()\n\n ## Change the sensor position/velocity/acc origin from sensor to ego\n [a.geometric.x, a.geometric.y] = rotate(a.geometric.x, a.geometric.y, sens.rot.yaw)\n [a.geometric.vx, a.geometric.vy] = rotate(a.geometric.vx, a.geometric.vy, sens.rot.yaw)\n [a.geometric.ax, a.geometric.ay] = rotate(a.geometric.ax, a.geometric.ay, sens.rot.yaw)\n a.geometric.x = a.geometric.x + sens.pos.x\n a.geometric.y = a.geometric.y + sens.pos.y\n a.geometric.yaw += sens.rot.yaw #confirm + or -\n return objs_list\n\n\n\ndef kalman(objs_list):\n global KF # kalman filter class object\n global oldtime\n global newtime\n global egoveh # object containing ego parameters\n global oldob # old objects list\n global xnm1 # current state vector x[n|n-1] estimate given all previous measurements (n = time instance)\n\n yaw = egoveh.newyaw\n #yaw = 0\n if rospy.get_param(\"sensortype\") == 1:\n #For Camera\n for i,a in enumerate(objs_list.obj_list):\n x = KFlist[a.obj_id]\n\n if x.track == 0: ## Element of Class KF and Work Similar to a counter\n x.track = 1\n x.newtime= objs_list.header.stamp.to_sec()\n x.oldtime= x.newtime\n t = 0.1\n\n #kinematic model #trying const yaw rate model\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array([[-t*np.cos(yaw),-t*t*np.cos(yaw)/2,-t*np.sin(yaw),-t*t*np.sin(yaw)/2],[0,0,0,0],[0,0,0,0],[t*np.sin(yaw),t*t*np.sin(yaw),-t*np.cos(yaw),-t*t*np.cos(yaw)],[0,0,0,0],[0,0,0,0]])\n #state vector estimate givel all measurememnts including preent measurement\n x.xnn = np.array([[a.geometric.x], [a.geometric.vx], [a.geometric.ax], [a.geometric.y],[a.geometric.vy],[a.geometric.ay]])\n #error model #trying const yaw rate model\n x.g = np.array([[t * t * t * np.cos(yaw) / 6, -t * t * t * np.cos(yaw) / 6, t * t * t * np.sin(yaw) / 6,\n -t * t * t * np.sin(yaw) / 6],\n [t * t * np.cos(yaw) / 2, 0, t * t * np.sin(yaw) / 2, 0],\n [t * np.cos(yaw), 0, t * np.sin(yaw), 0],\n [-t * t * t * np.sin(yaw) / 6, t * t * t * np.sin(yaw) / 6,\n -t * t * t * np.cos(yaw) / 6, -t * t * t * np.cos(yaw) / 6],\n [-t * t * np.sin(yaw) / 2, 0, t * t * np.cos(yaw) / 2, 0],\n [-t * np.sin(yaw), 0, t * np.cos(yaw), 0]])\n x.u = np.array([[egoveh.vel.x], [egoveh.acc.x],[egoveh.vel.y],[egoveh.acc.y]])\n else:\n x.newtime = objs_list.header.stamp.to_sec()\n t = x.newtime-x.oldtime\n if t >= 1.5:\n x=KF()\n x.track = 1\n x.newtime= objs_list.header.stamp.to_sec()\n t=0.1\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n x.xnn = np.array(\n [[a.geometric.x], [a.geometric.vx], [a.geometric.ax], [a.geometric.y], [a.geometric.vy],\n [a.geometric.ay]])\n x.g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],[t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],[t*np.cos(yaw),0,t*np.sin(yaw),0],[-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],[-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],[-t*np.sin(yaw),0,t*np.cos(yaw),0]])\n\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n x.g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],\n [t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],\n [t*np.cos(yaw),0,t*np.sin(yaw),0],\n [-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],\n [-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],\n [-t*np.sin(yaw),0,t*np.cos(yaw),0]])\n x.u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\n x.oldtime = x.newtime\n \n x.yn = np.array([[a.geometric.x], [a.geometric.y]]) # column vector\n noise = rospy.get_param(\"processnoise\")\n if abs(yaw) > 0.1:\n #x.c_s[0,0] = 100000000000\n \n #x.c_s[2,2]=100000000000\n noise = 10000000000\n x.a = np.array([[np.cos(yaw),0,0,np.sin(yaw),0,0],[0,np.cos(yaw),0,0,np.sin(yaw),0],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),0,0,np.cos(yaw),0,0],[0,-np.sin(yaw),0,0,np.cos(yaw),0],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n id1 = np.zeros((6,6))\n np.fill_diagonal(id1,noise)\n print('noise',noise)\n #else:\n # x.c_s[0,0] = rospy.get_param(\"processnoise\")\n # x.c_s[2, 2] = rospy.get_param(\"processnoise\")\n x.xn_nm1 =x.a.dot(x.xnn) + x.b.dot(x.u) #column vector\n #print(x.g)\n if abs(yaw) > 0.1:\n x.pn_nm1 = (x.a.dot(x.pnn)).dot(x.a.transpose()) + id1 #+(x.g.dot(x.c_s)).dot(x.g.transpose())\n else: \n x.pn_nm1 = (x.a.dot(x.pnn)).dot(x.a.transpose()) +(x.g.dot(x.c_s)).dot(x.g.transpose())\n x.gamma_n = x.yn -x.c.dot(x.xn_nm1) - x.d.dot(x.u)\n x.s_n = (x.c.dot(x.pn_nm1)).dot(x.c.transpose()) + x.c_m\n x.k_n = (x.pn_nm1.dot(x.c.transpose())).dot(np.linalg.inv(x.s_n))\n x.xnn = x.xn_nm1 + x.k_n.dot(x.gamma_n)\n #print( x.pn_nm1 )\n #print(\"YAW\",yaw)\n #print('X', 'Measured :',x.yn[0],'Predicted :' ,x.xn_nm1[0], 'state',x.xnn[0])\n #print('Y', 'Measured :',x.yn[1],'Predicted :' ,x.xn_nm1[1],'state',x.xnn[1])\n \n I = np.zeros((6,6),int)\n np.fill_diagonal(I,1)\n x.pnn = (I - x.k_n.dot(x.c)).dot(x.pn_nm1)\n KFlist[a.obj_id]=x\n a.geometric.x = float(x.xnn[0])\n a.geometric.vx = float(x.xnn[1])\n a.geometric.ax = float(x.xnn[2])\n a.geometric.y = float(x.xnn[3])\n a.geometric.vy = float(x.xnn[4])\n a.geometric.ay = float(x.xnn[5])\n a.covariance = x.pnn.flatten()\n #print(rospy.get_param(sensortype))\n\n oldob = objs_list\n return(objs_list)\n\n elif rospy.get_param(\"sensortype\") == 0:\n #FOR RADAR\n for i, a in enumerate(objs_list.obj_list):\n #convert relative velocities/acceleration to absolute as input for kalman filter\n #a.geometric.vx += egoveh.vel.x\n #a.geometric.vy += egoveh.vel.y\n #a.geometric.ax += egoveh.acc.x\n #a.geometric.ay += egoveh.acc.y\n\n x = KFlist[a.obj_id]\n\n if x.track == 0:\n\n x.track = 1\n x.newtime = objs_list.header.stamp.to_sec()\n x.oldtime = x.newtime\n t = 0.1\n #yaw = 0\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n x.xnn = np.array([[a.geometric.x], [a.geometric.vx], [a.geometric.ax], [a.geometric.y], [a.geometric.vy],\n [a.geometric.ay]])\n x.g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],[t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],[t*np.cos(yaw),0,t*np.sin(yaw),0],[-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],[-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],[-t*np.sin(yaw),0,t*np.cos(yaw),0]])\n x.u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\n\n else:\n x.newtime = objs_list.header.stamp.to_sec()\n t = x.newtime - x.oldtime\n if t >= 1.5:\n x = KF()\n x.track = 1\n x.newtime = objs_list.header.stamp.to_sec()\n t = 0.1\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n x.xnn = np.array(\n [[a.geometric.x], [a.geometric.vx], [a.geometric.ax], [a.geometric.y], [a.geometric.vy],\n [a.geometric.ay]])\n x.g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],[t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],[t*np.cos(yaw),0,t*np.sin(yaw),0],[-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],[-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],[-t*np.sin(yaw),0,t*np.cos(yaw),0]])\n x.u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\n x.a = np.array([[np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2,np.sin(yaw),t*np.sin(yaw),t*t*np.sin(yaw)/2],[0,np.cos(yaw),t*np.cos(yaw),0,np.sin(yaw),t*np.sin(yaw)],[0,0,np.cos(yaw),0,0,np.sin(yaw)],[-np.sin(yaw),-t*np.sin(yaw),-t*t*np.sin(yaw)/2,np.cos(yaw),t*np.cos(yaw),t*t*np.cos(yaw)/2],[0,-np.sin(yaw),-t*np.sin(yaw),0,np.cos(yaw),t*np.cos(yaw)],[0,0,-np.sin(yaw),0,0,np.cos(yaw)]])\n x.b = np.array(\n [[-t * np.cos(yaw), -t * t * np.cos(yaw) / 2, -t * np.sin(yaw), -t * t * np.sin(yaw) / 2],\n [0, 0, 0, 0], [0, 0, 0, 0],\n [t * np.sin(yaw), t * t * np.sin(yaw), -t * np.cos(yaw), -t * t * np.cos(yaw)], [0, 0, 0, 0],\n [0, 0, 0, 0]])\n # x.g = np.array([[0, 0], [0, 0], [t, 0], [0, 0], [0, 0], [0, 0]])\n x.g = np.array([[t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.sin(yaw)/6],[t*t*np.cos(yaw)/2,0,t*t*np.sin(yaw)/2,0],[t*np.cos(yaw),0,t*np.sin(yaw),0],[-t*t*t*np.sin(yaw)/6,t*t*t*np.sin(yaw)/6,-t*t*t*np.cos(yaw)/6,-t*t*t*np.cos(yaw)/6],[-t*t*np.sin(yaw)/2,0,t*t*np.cos(yaw)/2,0],[-t*np.sin(yaw),0,t*np.cos(yaw),0]])\n x.u = np.array([[egoveh.vel.x], [egoveh.acc.x], [egoveh.vel.y], [egoveh.acc.y]])\n x.oldtime = x.newtime\n #print('time2',t)\n x.yn = np.array([[a.geometric.x],[a.geometric.vx], [a.geometric.y],[a.geometric.vy]]) # column vector\n\n x.xn_nm1 = x.a.dot(x.xnn) + x.b.dot(x.u) #column vector\n\n x.pn_nm1 = (x.a.dot(x.pnn)).dot(x.a.transpose()) + (x.g.dot(x.c_s)).dot(x.g.transpose())\n x.gamma_n = x.yn - x.c.dot(x.xn_nm1) - x.d.dot(x.u)\n x.s_n = (x.c.dot(x.pn_nm1)).dot(x.c.transpose()) + x.c_m\n\n x.k_n = (x.pn_nm1.dot(x.c.transpose())).dot(np.linalg.inv(x.s_n))\n x.xnn = x.xn_nm1 + x.k_n.dot(x.gamma_n)\n\n\n I = np.zeros((6, 6), int)\n np.fill_diagonal(I, 1)\n x.pnn = (I - x.k_n.dot(x.c)).dot(x.pn_nm1)\n\n KFlist[a.obj_id] = x\n a.geometric.x = float(x.xnn[0])\n a.geometric.vx = float(x.xnn[1])\n a.geometric.ax = float(x.xnn[2])\n a.geometric.y = float(x.xnn[3])\n a.geometric.vy = float(x.xnn[4])\n a.geometric.ay = float(x.xnn[5])\n a.covariance = x.pnn.flatten()\n\n\n\n #print(a.obj_id, (a.covariance[0]+a.covariance[18]))\n # oldob = objs_list\n #print('time2', t)\n return (objs_list)\n\ndef calculate_probability(objs_list):\n\n global KFlist\n global prob_tracker\n for i,obj in enumerate(objs_list.obj_list):\n # Calculate/Model Probabilitys\n # persistance_probability based on range\n #tracker = prob_tracker[obj.id]\n sensor_range = rospy.get_param('senrange')\n obj_range = np.sqrt(np.square(obj.geometric.x) + np.square(obj.geometric.y))\n #print(\"obj_range\",obj_range)\n max_persistance_range = rospy.get_param('range_max_persistance') # parameterize\n range_max_persistance = max_persistance_range * sensor_range\n\n # birth probability\n\n birth_probability = rospy.get_param('birth_existance') # parameterize\n detection_probability =rospy.get_param('detection_probability')\n clutter_probability =rospy.get_param('clutter_probability')\n\n\n\n alpha = 2\n max_persistance = rospy.get_param('max_persistance') # parameterize\n range_min_persistance = 0.1 # m\n if obj_range < range_min_persistance:\n peristance_probability_range = 0\n #print(\"below_min\")\n elif obj_range > sensor_range:\n peristance_probability_range = 0\n #print(\"obj_range\",obj_range)\n #print(\"beyond max\")\n elif obj_range < sensor_range and obj_range > range_max_persistance:\n #print(\"intermide\")\n peristance_probability_range = max_persistance * math.exp((math.log(alpha, 10) * (obj_range-sensor_range)) / (sensor_range-sensor_range*(1-max_persistance_range)))\n\n else:\n peristance_probability_range = max_persistance\n\n if KFlist[obj.obj_id].track2 == 0:\n probability_existance = birth_probability\n probability_nonexistance = birth_probability\n KFlist[obj.obj_id].track2 = 1\n #print(\"!!!!\")\n else:\n prev_existance = prob_tracker[obj.obj_id].existance\n prev_noneexistance = prob_tracker[obj.obj_id].nonexistance\n predicted_existance = prob_tracker[obj.obj_id].persistance * prev_existance + birth_probability * prev_noneexistance\n predicted_non_existance = (1-prob_tracker[obj.obj_id].persistance) * prev_existance + (1-birth_probability) * prev_noneexistance\n print(\"predicted_existance\",predicted_existance, predicted_non_existance)\n eta = 1/(detection_probability * predicted_existance + predicted_non_existance * clutter_probability)\n print (\"eta\",eta)\n\n probability_existance = eta * detection_probability * predicted_existance\n probability_nonexistance = eta * clutter_probability * predicted_non_existance\n if probability_existance > rospy.get_param('max_existance'):\n probability_existance = rospy.get_param('max_existance')\n if probability_nonexistance > rospy.get_param('max_existance'):\n probability_nonexistance = rospy.get_param('max_existance')\n #print(probability_existance)\n #print(\"______\")\n print ( \"final existance\",probability_existance, probability_nonexistance)\n\n obj.prop_existence = probability_existance\n obj.prop_nonexistence = probability_nonexistance\n prob_tracker[obj.obj_id].existance = probability_existance\n prob_tracker[obj.obj_id].nonexistance = probability_nonexistance\n prob_tracker[obj.obj_id].persistance = peristance_probability_range\n #print(\"persitance\",peristance_probability_range)\n obj.prop_persistance = peristance_probability_range\n\n return objs_list\n\n\n\n\n\n\n\n\n\n\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\nif __name__ == '__main__':\n sensor_rotate()\n"
},
{
"alpha_fraction": 0.6476269960403442,
"alphanum_fraction": 0.6637802124023438,
"avg_line_length": 21.660377502441406,
"blob_id": "6a35cfd5c9ce2fe9dae78feaee17ae12270880f3",
"content_id": "78637948896b5e8e9e50d5f3d5c45dd2b2385f6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6005,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 265,
"path": "/devel/include/object_list/Features.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/Features.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_FEATURES_H\n#define OBJECT_LIST_MESSAGE_FEATURES_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct Features_\n{\n typedef Features_<ContainerAllocator> Type;\n\n Features_()\n : FL(0)\n , FM(0)\n , FR(0)\n , MR(0)\n , RR(0)\n , RM(0)\n , RL(0)\n , ML(0) {\n }\n Features_(const ContainerAllocator& _alloc)\n : FL(0)\n , FM(0)\n , FR(0)\n , MR(0)\n , RR(0)\n , RM(0)\n , RL(0)\n , ML(0) {\n (void)_alloc;\n }\n\n\n\n typedef uint8_t _FL_type;\n _FL_type FL;\n\n typedef uint8_t _FM_type;\n _FM_type FM;\n\n typedef uint8_t _FR_type;\n _FR_type FR;\n\n typedef uint8_t _MR_type;\n _MR_type MR;\n\n typedef uint8_t _RR_type;\n _RR_type RR;\n\n typedef uint8_t _RM_type;\n _RM_type RM;\n\n typedef uint8_t _RL_type;\n _RL_type RL;\n\n typedef uint8_t _ML_type;\n _ML_type ML;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::Features_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::Features_<ContainerAllocator> const> ConstPtr;\n\n}; // struct Features_\n\ntypedef ::object_list::Features_<std::allocator<void> > Features;\n\ntypedef boost::shared_ptr< ::object_list::Features > FeaturesPtr;\ntypedef boost::shared_ptr< ::object_list::Features const> FeaturesConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::Features_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::Features_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::Features_<ContainerAllocator1> & lhs, const ::object_list::Features_<ContainerAllocator2> & rhs)\n{\n return lhs.FL == rhs.FL &&\n lhs.FM == rhs.FM &&\n lhs.FR == rhs.FR &&\n lhs.MR == rhs.MR &&\n lhs.RR == rhs.RR &&\n lhs.RM == rhs.RM &&\n lhs.RL == rhs.RL &&\n lhs.ML == rhs.ML;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::Features_<ContainerAllocator1> & lhs, const ::object_list::Features_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Features_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Features_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Features_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Features_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Features_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Features_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::Features_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"acfb5ca82687e271a6722833317ebf1a\";\n }\n\n static const char* value(const ::object_list::Features_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0xacfb5ca82687e271ULL;\n static const uint64_t static_value2 = 0xa6722833317ebf1aULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::Features_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/Features\";\n }\n\n static const char* value(const ::object_list::Features_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::Features_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"uint8 FL\\n\"\n\"uint8 FM\\n\"\n\"uint8 FR\\n\"\n\"uint8 MR\\n\"\n\"uint8 RR\\n\"\n\"uint8 RM\\n\"\n\"uint8 RL\\n\"\n\"uint8 ML\\n\"\n;\n }\n\n static const char* value(const ::object_list::Features_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::Features_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.FL);\n stream.next(m.FM);\n stream.next(m.FR);\n stream.next(m.MR);\n stream.next(m.RR);\n stream.next(m.RM);\n stream.next(m.RL);\n stream.next(m.ML);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct Features_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::Features_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::Features_<ContainerAllocator>& v)\n {\n s << indent << \"FL: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.FL);\n s << indent << \"FM: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.FM);\n s << indent << \"FR: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.FR);\n s << indent << \"MR: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.MR);\n s << indent << \"RR: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.RR);\n s << indent << \"RM: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.RM);\n s << indent << \"RL: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.RL);\n s << indent << \"ML: \";\n Printer<uint8_t>::stream(s, indent + \" \", v.ML);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_FEATURES_H\n"
},
{
"alpha_fraction": 0.7678185701370239,
"alphanum_fraction": 0.7915766835212708,
"avg_line_length": 65.14286041259766,
"blob_id": "cd4d9fea50daf0e1054345a1c7b151c22ba1679c",
"content_id": "3d6681a7f6e11a371876c383e205c279ae42a9b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 926,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 14,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_py.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_generate_messages_py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/_GroundTruthMovingObjects.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/_MovingObject.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/_Dimension3d.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/_TrafficUpdateMovingObject.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/_Orientation3d.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/osi3_bridge/msg/__init__.py\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/osi3_bridge_generate_messages_py.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.5952380895614624,
"alphanum_fraction": 0.5952380895614624,
"avg_line_length": 20,
"blob_id": "1d8333205ea5ae53d44799a5eb171fb2a96e88fb",
"content_id": "85461743b6ef23294c480e5ea52950dd629ecb36",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 168,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 8,
"path": "/src/vehicle_control/scripts/rotate.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import math\n\ndef rotate (x,y,angle):\n\n rotx = x * math.cos(angle) - y * math.sin(angle)\n roty = x * math.sin(angle) + y * math.cos(angle)\n\n return [rotx,roty]\n"
},
{
"alpha_fraction": 0.49601390957832336,
"alphanum_fraction": 0.5309154391288757,
"avg_line_length": 41.30392074584961,
"blob_id": "c85e502304c87454fc325a30b143b3ede9b3b944",
"content_id": "3466188e55f254d8fc12bc0b00471cb14050e66b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 43150,
"license_type": "no_license",
"max_line_length": 178,
"num_lines": 1020,
"path": "/src/aeb/src/EURO_NCAP2.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\n# Copyright (c) 2018 Intel Labs.\n# authors: German Ros ([email protected])\n#\n# This work is licensed under the terms of the MIT license.\n# For a copy, see <https://opensource.org/licenses/MIT>.\n\n\"\"\"\n EURO NCAP Test Scenario\n\"\"\"\n\nfrom __future__ import print_function\n\nimport collections\nimport datetime\nimport glob\nimport logging\nimport math\nimport rospy\nimport os\nimport re\nimport sys\nimport weakref\n\ntry:\n import pygame\n from pygame.locals import KMOD_CTRL\n from pygame.locals import K_ESCAPE\n from pygame.locals import K_q\nexcept ImportError:\n raise RuntimeError('cannot import pygame, make sure pygame package is installed')\n\ntry:\n import numpy as np\nexcept ImportError:\n raise RuntimeError(\n 'cannot import numpy, make sure numpy package is installed')\n\n# ==============================================================================\n# -- find carla module ---------------------------------------------------------\n# ==============================================================================\n\ntry:\n # sys.path.append(glob.glob('/opt/carla-simulator/PythonAPI/carla/dist/carla-0.9.8-py2.7-linux-x86_64.egg')[0])\n\n # sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (\n sys.path.append(glob.glob('/opt/carla-simulator/PythonAPI/carla/dist/carla-*%d.%d-%s.egg' % (\n sys.version_info.major,\n sys.version_info.minor,\n 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])\nexcept IndexError:\n pass\n\n# ==============================================================================\n# -- add PythonAPI for release mode --------------------------------------------\n# ==============================================================================\ntry:\n sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + '/carla')\nexcept IndexError:\n pass\n\nimport carla\nfrom carla import ColorConverter as cc\n\n\n# ==============================================================================\n# -- Global functions ----------------------------------------------------------\n# ==============================================================================\n\ndef find_weather_presets():\n rgx = re.compile('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)')\n name = lambda x: ' '.join(m.group(0) for m in rgx.finditer(x))\n presets = [x for x in dir(carla.WeatherParameters) if re.match('[A-Z].+', x)]\n return [(getattr(carla.WeatherParameters, x), name(x)) for x in presets]\n\n\ndef get_actor_display_name(actor, truncate=250):\n name = ' '.join(actor.type_id.replace('_', '.').title().split('.')[1:])\n return (name[:truncate - 1] + u'\\u2026') if len(name) > truncate else name\n\n\n# ==============================================================================\n# -- World ---------------------------------------------------------------\n# ==============================================================================\n\nclass World(object):\n def __init__(self, carla_world, hud):\n #def __init__(self, carla_world, actor_filter):\n\n self.world = carla_world\n self.map = self.world.get_map()\n self.hud = hud\n\n self.blueprint_library = self.world.get_blueprint_library()\n self.player = None\n self.npc1 = None\n self.npc2 = None\n self.npc3 = None\n self.npc4 = None\n self.player_tranform = carla.Transform(carla.Location(x=29.4, y=7.2, z=1.843102), carla.Rotation(yaw=0))\n self.bp = self.blueprint_library.find('vehicle.chevrolet.impala')\n\n self.npc1_transform = carla.Transform(carla.Location(x=241.5, y=147, z=1.843102), carla.Rotation(yaw=270))\n self.npc1_bp = self.blueprint_library.find('vehicle.chevrolet.impala')\n\n self.npc2_transform = carla.Transform(carla.Location(x=134.5, y=-6.6, z=1.843102), carla.Rotation(yaw=180))\n self.npc2_bp =self.blueprint_library.find('vehicle.kawasaki.ninja')\n\n self.npc3_transform = carla.Transform(carla.Location(x=214.5, y=12.8, z=1.843102), carla.Rotation(yaw=0))\n self.npc3_bp= self.blueprint_library.find('vehicle.mustang.mustang')\n\n\n self.walker1 = None\n self.walker1_bp = self.blueprint_library.filter('\"walker.pedestrian.*\"')\n self.walker1_transform = carla.Transform(carla.Location(x=225.3, y=19, z=2), carla.Rotation(yaw=270))\n\n self.bicycle = None\n self.bicycle_transform = carla.Transform(carla.Location(x=35.7, y=10, z=1.843102), carla.Rotation(yaw=0))\n self.bicycle_transform = self.blueprint_library.find('vehicle.gazelle.omafiets')\n\n\n self.walker1_control = carla.WalkerControl()\n self.walker1_control.speed = 1.39\n self.walker1_control.direction = carla.Vector3D(x=0.000000, y=-1.000000, z=0.000000)\n\n self.control2 = carla.VehicleControl()\n self.control2.throttle = 1\n\n self.control3 = carla.VehicleControl()\n\n self.control3.throttle = 0\n self.control3.brake = 1\n self.control3.hand_brake = True\n\n\n\n\n\n\n\n\n\n\n\n\n\n self.collision_sensor = None\n self.lane_invasion_sensor = None\n self.gnss_sensor = None\n self.camera_manager = None\n self._weather_presets = find_weather_presets()\n self._weather_index = 0\n self.restart()\n self.world.on_tick(hud.on_world_tick)\n self.recording_enabled = False\n self.recording_start = 0\n\n\n def restart(self):\n\n # Get a blueprint.\n\n blueprint = self.world.get_blueprint_library().filter(\"vehicle.*\")\n\n #0 ActorBlueprint(id=vehicle.audi.a2, tags=[a2, audi, vehicle])\n #1 ActorBlueprint(id=vehicle.audi.tt, tags=[tt, audi, vehicle]),\n #2 ActorBlueprint(id=vehicle.carlamotors.carlacola, tags=[carlacola, carlamotors, vehicle]),\n #3 ActorBlueprint(id=vehicle.bmw.isetta, tags=[isetta, bmw, vehicle]),\n #4 ActorBlueprint(id=vehicle.nissan.micra, tags=[micra, nissan, vehicle]),\n #5 ActorBlueprint(id=vehicle.citroen.c3, tags=[c3, citroen, vehicle]),\n #6 ActorBlueprint(id=vehicle.gazelle.omafiets, tags=[omafiets, gazelle, vehicle]),\n #7 ActorBlueprint(id=vehicle.mercedes - benz.coupe, tags=[coupe, mercedes - benz, vehicle]),\n #8 ActorBlueprint(id=vehicle.mini.cooperst, tags=[mini, cooperst, vehicle]),\n #9 ActorBlueprint(id=vehicle.nissan.patrol, tags=[patrol, nissan, vehicle]),\n #10 ActorBlueprint(id=vehicle.mustang.mustang, tags=[mustang, vehicle]),\n #11 ActorBlueprint(id=vehicle.lincoln.mkz2017, tags=[mkz2017, lincoln, vehicle]),\n #12 ActorBlueprint(id=vehicle.tesla.cybertruck, tags=[cybertruck, tesla, vehicle]),\n #13 ActorBlueprint(id=vehicle.toyota.prius, tags=[prius, toyota, vehicle]),\n #14 ActorBlueprint(id=vehicle.volkswagen.t2, tags=[volkswagen, t2, vehicle]),\n #15 ActorBlueprint(id=vehicle.bmw.grandtourer, tags=[grandtourer, bmw, vehicle]),\n #16 ActorBlueprint(id=vehicle.tesla.model3, tags=[tesla, model3, vehicle]),\n #17 ActorBlueprint(id=vehicle.diamondback.century, tags=[century, diamondback, vehicle]),\n #18 ActorBlueprint(id=vehicle.dodge_charger.police, tags=[police, dodge_charger, vehicle]),\n #19 ActorBlueprint(id=vehicle.bh.crossbike, tags=[crossbike, bh, vehicle]),\n #20 ActorBlueprint(id=vehicle.kawasaki.ninja, tags=[ninja, kawasaki, vehicle]),\n #21 ActorBlueprint(id=vehicle.jeep.wrangler_rubicon, tags=[wrangler_rubicon, jeep, vehicle]),\n #22 ActorBlueprint(id=vehicle.yamaha.yzf, tags=[yzf, yamaha, vehicle]),\n #23 ActorBlueprint(id=vehicle.chevrolet.impala, tags=[impala, chevrolet, vehicle]),\n #24 ActorBlueprint(id=vehicle.harley - davidson.low_rider, tags=[low_rider, harley - davidson, vehicle]),\n #25 ActorBlueprint(id=vehicle.audi.etron, tags=[etron, audi, vehicle]),\n #26 ActorBlueprint(id=vehicle.seat.leon, tags=[leon, seat, vehicle])\n\n self.player = self.world.spawn_actor(self.bp, self.player_tranform)\n self.npc1 = self.world.spawn_actor(self.npc1_bp, self.npc1_tranform)\n self.npc2 = self.world.spawn_actor(self.npc2_bp, self.npc2_tranform)\n self.npc3 = self.world.spawn_actor(self.npc3_bp, self.npc3_tranform)\n self.npc4 = self.world.spawn_actor(self.npc4_bp, self.npc4_tranform)\n self.walker1 = self.world.spawn_actor(self.walker1_bp, self.walker1_tranform)\n self.bicycle = self.world.spawn_actor(self.bicycle_bp, self.bicycle_tranform)\n\n\n\n\n self.walker1.apply_control(self.walker1_control)\n\n self.npc1.apply_control(self.control2)\n self.bicycle.apply_control(self.control2)\n self.player.apply_control(self.control2)\n\n\n while (True):\n location = self.npc1.get_location()\n\n\n if location.y < 15:\n print(location.y)\n print('Braking')\n\n break\n\n self.npc1.apply_control(self.control3)\n self.bicycle.apply_control(self.control3)\n self.player.apply_control(self.control3)\n\n\n \"\"\"\n\n if rospy.get_param(\"scenario\") == 1:\n print ('''\n \n EURO NCAP CAR TO PEDESTRIAN NEAR SIDE CHILD - CPNC\n \n ''')\n blueprintPlayer = blueprint[0]\n blueprintNpc1 = blueprint[1]\n blueprintNpc2 = blueprint[25]\n\n\n blueprintPlayer.set_attribute('role_name', 'hero')\n blueprintNpc1.set_attribute('role_name', 'npc1')\n blueprintNpc2.set_attribute('role_name', 'npc2')\n\n blueprintPlayer.set_attribute('color','255,255,255')\n blueprintNpc1.set_attribute('color', '10,10,10')\n blueprintNpc2.set_attribute('color', '10,10,10')\n\n while self.player is None:\n spawn_point = carla.Transform(carla.Location(x=123.0845139, y=8.330196, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.player = self.world.spawn_actor(blueprintPlayer, spawn_point)\n self.player.set_target_velocity(carla.Vector3D(x=0.0000, y=0.000000, z=0.000000))\n\n # Spawn the npc cars\n spawn_point = carla.Transform(carla.Location(x=219.5872746, y=12.62206999, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.npc1 = self.world.try_spawn_actor(blueprintNpc1, spawn_point)\n spawn_point = carla.Transform(carla.Location(x=214.2239483, y=12.54190365, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.npc2 = self.world.spawn_actor(blueprintNpc2, spawn_point)\n\n # Spawn the walker\n blueprintsWalkers = self.world.get_blueprint_library().filter(\"walker.*\")\n blueprintsWalkers = blueprintsWalkers[25]\n if blueprintsWalkers.has_attribute('is_invincible'):\n blueprintsWalkers.set_attribute('is_invincible', 'false')\n spawn_point = carla.Transform(carla.Location(x=223.013613, y=13.82338109, z=2.063102),\n carla.Rotation(pitch=0.000000, yaw=270.855823, roll=0.000000))\n self.walker1 = self.world.spawn_actor(blueprintsWalkers, spawn_point)\n\n self.world.wait_for_tick()\n\n # Set up walker control by direct input\n self.control = carla.WalkerControl()\n self.control.speed = 5 / 3.6\n self.control.direction = carla.Vector3D(x=0.000000, y=-1.000000, z=0.000000)\n\n\n elif rospy.get_param(\"scenario\") == 2:\n print ('''\n\n EURO NCAP CAR TO PEDESTRIAN LONGITUDINAL ADULT - CPLA \n\n ''')\n blueprintPlayer = blueprint[0]\n\n blueprintPlayer.set_attribute('role_name', 'hero')\n blueprintPlayer.set_attribute('color', '255,255,255')\n\n while self.player is None:\n spawn_point = carla.Transform(carla.Location(x=73.09, y=7.5833, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.player = self.world.spawn_actor(blueprintPlayer, spawn_point)\n self.player.set_target_velocity(carla.Vector3D(x=0.0000, y=0.000000, z=0.000000))\n\n #\n # Spawn the walker\n blueprintsWalkers = self.world.get_blueprint_library().filter(\"walker.*\")\n blueprintsWalkers = blueprintsWalkers[14]\n if blueprintsWalkers.has_attribute('is_invincible'):\n blueprintsWalkers.set_attribute('is_invincible', 'false')\n spawn_point = carla.Transform(carla.Location(x=213.0744, y=9.6744, z=2.063102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.walker1 = self.world.spawn_actor(blueprintsWalkers, spawn_point)\n self.world.wait_for_tick()\n\n # Set up walker control by direct input\n self.control = carla.WalkerControl()\n self.control.speed = 5 / 3.6\n self.control.direction = carla.Vector3D(x=1.000000, y=0.000000, z=0.000000)\n\n elif rospy.get_param(\"scenario\") == 3:\n print ('''\n\n EURO NCAP CAR TO BICYCLIST LONGITUDINAL ADULT - CBLA \n\n ''')\n blueprintPlayer = blueprint[0]\n blueprintNpc1 = blueprint[17]\n\n blueprintPlayer.set_attribute('role_name', 'hero')\n blueprintNpc1.set_attribute('role_name', 'npc1')\n self.vehicle_control = carla.VehicleControl()\n\n blueprintPlayer.set_attribute('color', '255,255,255')\n blueprintNpc1.set_attribute('color', '10,10,10')\n\n while self.player is None:\n spawn_point = carla.Transform(carla.Location(x=77.09, y=7.5833, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.player = self.world.spawn_actor(blueprintPlayer, spawn_point)\n self.player.set_target_velocity(carla.Vector3D(x=0.0000, y=0.000000, z=0.000000))\n\n # Spawn the npc cars\n spawn_point = carla.Transform(carla.Location(x=195.0764, y=9.4056, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.npc1 = self.world.try_spawn_actor(blueprintNpc1, spawn_point)\n\n # set PI-Controller for Bycicle\n self.npc1.kp= 1.0 # Proportional value\n self.npc1.tn = 0.5 # Integral value\n self.npc1.vsoll = 15 * 1.16 / 3.6 # set 15km/h multiplicator\n self.npc1.stop = True\n\n\n\n elif rospy.get_param(\"scenario\") == 4:\n print ('''\n\n EURO NCAP CAR TO CAR REAR STATIONARY - CCRS\n\n ''')\n blueprintPlayer = blueprint[0]\n blueprintNpc1 = blueprint[15]\n\n blueprintPlayer.set_attribute('role_name', 'hero')\n blueprintNpc1.set_attribute('role_name', 'npc1')\n\n blueprintPlayer.set_attribute('color', '255,255,255')\n blueprintNpc1.set_attribute('color', '192,192,192')\n\n while self.player is None:\n\n spawn_point = carla.Transform(carla.Location(x=77.09, y=7.5833, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.player = self.world.spawn_actor(blueprintPlayer, spawn_point)\n self.player.set_target_velocity(carla.Vector3D(x=0.0000, y=0.000000, z=0.000000))\n\n # Spawn the npc cars\n spawn_point = carla.Transform(carla.Location(x=218.6299, y=9.7574, z=1.843102),\n carla.Rotation(pitch=0.000000, yaw=0.855823, roll=0.000000))\n self.npc1 = self.world.try_spawn_actor(blueprintNpc1, spawn_point)\n\n else:\n logging.info('''\n\n Unavailable Scenario selected\n \n 1 - EURO NCAP CPNC\n 2 - EURO NCAP CPLA\n 3 - EURO NCAP CBLA\n 4 - EURO NCAP CCRS\n\n ''')\n\n \"\"\"\n\n\n # Keep same camera config if the camera manager exists.\n cam_index = self.camera_manager.index if self.camera_manager is not None else 0\n cam_pos_index = self.camera_manager.transform_index if self.camera_manager is not None else 0\n\n # Set up the sensors.\n self.camera_manager = CameraManager(self.player, self.hud)\n self.camera_manager.transform_index = cam_pos_index\n self.camera_manager.set_sensor(cam_index, notify=False)\n\n\n def next_weather(self, reverse=False):\n self._weather_index += -1 if reverse else 1\n self._weather_index %= len(self._weather_presets)\n preset = self._weather_presets[self._weather_index]\n self.hud.notification('Weather: %s' % preset[1])\n self.player.get_world().set_weather(preset[0])\n\n def tick(self, clock):\n self.hud.tick(self, clock)\n\n def render(self, display):\n self.camera_manager.render(display)\n self.hud.render(display)\n\n def destroy_sensors(self):\n self.camera_manager.sensor.destroy()\n self.camera_manager.sensor = None\n self.camera_manager.index = None\n\n\n def destroy(self):\n if rospy.get_param(\"scenario\") == 1:\n actors = [\n self.camera_manager.sensor,\n #self.collision_sensor.sensor,\n #self.lane_invasion_sensor.sensor,\n #self.gnss_sensor.sensor,\n self.player,\n self.npc1,\n self.npc2,\n self.walker1]\n elif rospy.get_param(\"scenario\") == 2:\n actors = [\n self.camera_manager.sensor,\n self.player,\n self.walker1]\n elif rospy.get_param(\"scenario\") == 3 or rospy.get_param(\"scenario\") == 4:\n actors = [\n self.camera_manager.sensor,\n self.player,\n self.npc1]\n\n for actor in actors:\n if actor is not None:\n actor.destroy()\n\n# ==============================================================================\n# -- KeyboardControl -----------------------------------------------------------\n# ==============================================================================\n\n\nclass KeyboardControl(object):\n def __init__(self, world):\n world.hud.notification(\"Press 'H' or '?' for help.\", seconds=4.0)\n\n def parse_events(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return True\n elif event.type == pygame.KEYUP:\n if self._is_quit_shortcut(event.key):\n return True\n\n @staticmethod\n def _is_quit_shortcut(key):\n return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)\n\n# ==============================================================================\n# -- HUD -----------------------------------------------------------------------\n# ==============================================================================\n\n\nclass HUD(object):\n def __init__(self, width, height):\n self.dim = (width, height)\n font = pygame.font.Font(pygame.font.get_default_font(), 20)\n font_name = 'courier' if os.name == 'nt' else 'mono'\n fonts = [x for x in pygame.font.get_fonts() if font_name in x]\n default_font = 'ubuntumono'\n mono = default_font if default_font in fonts else fonts[0]\n mono = pygame.font.match_font(mono)\n self._font_mono = pygame.font.Font(mono, 12 if os.name == 'nt' else 14)\n self._notifications = FadingText(font, (width, 40), (0, height - 40))\n self.help = HelpText(pygame.font.Font(mono, 24), width, height)\n self.server_fps = 0\n self.frame = 0\n self.simulation_time = 0\n self._show_info = True\n self._info_text = []\n self._server_clock = pygame.time.Clock()\n if rospy.get_param(\"scenario\") == 1:\n self.scenario= \"EURO NCAP CPNC\"\n elif rospy.get_param(\"scenario\") == 2:\n self.scenario= \"EURO NCAP CPLA\"\n elif rospy.get_param(\"scenario\") == 3:\n self.scenario = \"EURO NCAP CBLA\"\n elif rospy.get_param(\"scenario\") == 4:\n self.scenario = \"EURO NCAP CCRS\"\n else:\n self.scenario = \"Unknown\"\n self.desvel = rospy.get_param(\"des_vel\")\n\n def on_world_tick(self, timestamp):\n self._server_clock.tick()\n self.server_fps = self._server_clock.get_fps()\n self.frame = timestamp.frame\n self.simulation_time = timestamp.elapsed_seconds\n\n def tick(self, world, clock):\n self._notifications.tick(world, clock)\n if not self._show_info:\n return\n t = world.player.get_transform()\n v = world.player.get_velocity()\n c = world.player.get_control()\n heading = 'N' if abs(t.rotation.yaw) < 89.5 else ''\n heading += 'S' if abs(t.rotation.yaw) > 90.5 else ''\n heading += 'E' if 179.5 > t.rotation.yaw > 0.5 else ''\n heading += 'W' if -0.5 > t.rotation.yaw > -179.5 else ''\n #colhist = world.collision_sensor.get_collision_history()\n #collision = [colhist[x + self.frame - 200] for x in range(0, 200)]\n #max_col = max(1.0, max(collision))\n #collision = [x / max_col for x in collision]\n vehicles = world.world.get_actors().filter('vehicle.*')\n pedestrians = world.world.get_actors().filter('walker.*')\n self._info_text = [\n 'Scenario: % 20s' % self.scenario,\n 'Desired Speed: % 9.0f km/h' % self.desvel,\n '',\n 'Vehicle: % 20s' % get_actor_display_name(world.player, truncate=20),\n 'Simulation time: % 12s' % datetime.timedelta(seconds=int(self.simulation_time)),\n '',\n 'Server: % 16.0f FPS' % self.server_fps,\n #'Client: % 16.0f FPS' % clock.get_fps(),\n #'',\n #'Map: % 20s' % world.map.name,\n '',\n\n 'Speed: % 15.0f km/h' % (3.6 * math.sqrt(v.x ** 2 + v.y ** 2 + v.z ** 2)),\n u'Heading:% 16.0f\\N{DEGREE SIGN} % 2s' % (t.rotation.yaw, heading),\n 'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (t.location.x, t.location.y)),\n #'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (world.gnss_sensor.lat, world.gnss_sensor.lon)),\n 'Height: % 18.0f m' % t.location.z,\n '']\n #if isinstance(c, carla.VehicleControl):\n # self._info_text += [\n # ('Throttle:', c.throttle, 0.0, 1.0),\n # ('Steer:', c.steer, -1.0, 1.0),\n # ('Brake:', c.brake, 0.0, 1.0),\n # ('Reverse:', c.reverse),\n # ('Hand brake:', c.hand_brake),\n # ('Manual:', c.manual_gear_shift),\n # 'Gear: %s' % {-1: 'R', 0: 'N'}.get(c.gear, c.gear)]\n #elif isinstance(c, carla.WalkerControl):\n # self._info_text += [\n # ('Speed:', c.speed, 0.0, 5.556),\n # ('Jump:', c.jump)]\n self._info_text += [\n #'',\n #'Collision:',\n #collision,\n '',\n 'Number of vehicles: % 8d' % len(vehicles)]\n\n if len(vehicles) > 1:\n self._info_text += ['Nearby vehicles:']\n def distance(l): return math.sqrt(\n (l.x - t.location.x) ** 2 + (l.y - t.location.y) ** 2 + (l.z - t.location.z) ** 2)\n vehicles = [(distance(x.get_location()), x) for x in vehicles if x.id != world.player.id]\n for d, vehicle in sorted(vehicles):\n if d > 200.0:\n break\n vehicle_type = get_actor_display_name(vehicle, truncate=22)\n self._info_text.append('% 4dm %s' % (int(d), vehicle_type))\n\n self._info_text += ['','Number of pedestrians: % 5d' % len(pedestrians)]\n if len(pedestrians) > 0:\n self._info_text += ['Nearby pedestrians:']\n\n def distance(l):\n return math.sqrt(\n (l.x - t.location.x) ** 2 + (l.y - t.location.y) ** 2 + (l.z - t.location.z) ** 2)\n pedestrians = [(distance(x.get_location()), x) for x in pedestrians if x.id != world.player.id]\n for d, pedestrian in sorted(pedestrians):\n if d > 200.0:\n break\n pedestrian_type = get_actor_display_name(pedestrian, truncate=22)\n self._info_text.append('% 4dm %s' % (int(d), pedestrian_type))\n\n def toggle_info(self):\n self._show_info = not self._show_info\n\n def notification(self, text, seconds=2.0):\n self._notifications.set_text(text, seconds=seconds)\n\n def error(self, text):\n self._notifications.set_text('Error: %s' % text, (255, 0, 0))\n\n def render(self, display):\n if self._show_info:\n info_surface = pygame.Surface((220, self.dim[1]))\n info_surface.set_alpha(100)\n display.blit(info_surface, (0, 0))\n v_offset = 4\n bar_h_offset = 100\n bar_width = 106\n for item in self._info_text:\n if v_offset + 18 > self.dim[1]:\n break\n if isinstance(item, list):\n if len(item) > 1:\n points = [(x + 8, v_offset + 8 + (1.0 - y) * 30) for x, y in enumerate(item)]\n pygame.draw.lines(display, (255, 136, 0), False, points, 2)\n item = None\n v_offset += 18\n elif isinstance(item, tuple):\n if isinstance(item[1], bool):\n rect = pygame.Rect((bar_h_offset, v_offset + 8), (6, 6))\n pygame.draw.rect(display, (255, 255, 255), rect, 0 if item[1] else 1)\n else:\n rect_border = pygame.Rect((bar_h_offset, v_offset + 8), (bar_width, 6))\n pygame.draw.rect(display, (255, 255, 255), rect_border, 1)\n f = (item[1] - item[2]) / (item[3] - item[2])\n if item[2] < 0.0:\n rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))\n else:\n rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))\n pygame.draw.rect(display, (255, 255, 255), rect)\n item = item[0]\n if item: # At this point has to be a str.\n surface = self._font_mono.render(item, True, (255, 255, 255))\n display.blit(surface, (8, v_offset))\n v_offset += 18\n self._notifications.render(display)\n self.help.render(display)\n\n# ==============================================================================\n# -- FadingText ----------------------------------------------------------------\n# ==============================================================================\n\n\nclass FadingText(object):\n def __init__(self, font, dim, pos):\n self.font = font\n self.dim = dim\n self.pos = pos\n self.seconds_left = 0\n self.surface = pygame.Surface(self.dim)\n\n def set_text(self, text, color=(255, 255, 255), seconds=2.0):\n text_texture = self.font.render(text, True, color)\n self.surface = pygame.Surface(self.dim)\n self.seconds_left = seconds\n self.surface.fill((0, 0, 0, 0))\n self.surface.blit(text_texture, (10, 11))\n\n def tick(self, _, clock):\n delta_seconds = 1e-3 * clock.get_time()\n self.seconds_left = max(0.0, self.seconds_left - delta_seconds)\n self.surface.set_alpha(500.0 * self.seconds_left)\n\n def render(self, display):\n display.blit(self.surface, self.pos)\n\n# ==============================================================================\n# -- HelpText ------------------------------------------------------------------\n# ==============================================================================\n\n\nclass HelpText(\n\n\n\nobject):\n def __init__(self, font, width, height):\n lines = __doc__.split('\\n')\n self.font = font\n self.dim = (680, len(lines) * 22 + 12)\n self.pos = (0.5 * width - 0.5 * self.dim[0], 0.5 * height - 0.5 * self.dim[1])\n self.seconds_left = 0\n self.surface = pygame.Surface(self.dim)\n self.surface.fill((0, 0, 0, 0))\n for n, line in enumerate(lines):\n text_texture = self.font.render(line, True, (255, 255, 255))\n self.surface.blit(text_texture, (22, n * 22))\n self._render = False\n self.surface.set_alpha(220)\n\n def toggle(self):\n self._render = not self._render\n\n def render(self, display):\n if self._render:\n display.blit(self.surface, self.pos)\n\n# ==============================================================================\n# -- CollisionS\n\n\n\n#ensor -----------------------------------------------------------\n# ==============================================================================\n\n\nclass CollisionSensor(object):\n def __init__(self, parent_actor, hud):\n self.sensor = None\n self.history = []\n self._parent = parent_actor\n self.hud = hud\n world = self._parent.get_world()\n bp = world.get_blueprint_library().find('sensor.other.collision')\n self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)\n # We need to pass the lambda a weak reference to self to avoid circular\n # reference.\n weak_self = weakref.ref(self)\n self.sensor.listen(lambda event: CollisionSensor._on_collision(weak_self, event))\n\n def get_collision_history(self):\n history = collections.defaultdict(int)\n for frame, intensity in self.history:\n history[frame] += intensity\n return history\n\n @staticmethod\n def _on_collision(weak_self, event):\n self = weak_self()\n if not self:\n return\n actor_type = get_actor_display_name(event.other_actor)\n self.hud.notification('Collision with %r' % actor_type)\n impulse = event.normal_impulse\n intensity = math.sqrt(impulse.x ** 2 + impulse.y ** 2 + impulse.z ** 2)\n self.history.append((event.frame, intensity))\n if len(self.history) > 4000:\n self.history.pop(0)\n\n# ==============================================================================\n# -- LaneInvasionSensor --------------------------------------------------------\n# ==============================================================================\n\n\nclass LaneInvasionSensor(object):\n def __init__(self, parent_actor, hud):\n self.sensor = None\n self._parent = parent_actor\n self.hud = hud\n world = self._parent.get_world()\n bp = world.get_blueprint_library().find('sensor.other.lane_invasion')\n self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)\n # We need to pass the lambda a weak reference to self to avoid circular\n # reference.\n weak_self = weakref.ref(self)\n self.sensor.listen(lambda event: LaneInvasionSensor._on_invasion(weak_self, event))\n\n @staticmethod\n def _on_invasion(weak_self, event):\n self = weak_self()\n if not self:\n return\n lane_types = set(x.type for x in event.crossed_lane_markings)\n text = ['%r' % str(x).split()[-1] for x in lane_types]\n self.hud.notification('Crossed line %s' % ' and '.join(text))\n\n# ==============================================================================\n# -- GnssSensor --------------------------------------------------------\n# ==============================================================================\n\n\nclass GnssSensor(object):\n def __init__(self, parent_actor):\n self.sensor = None\n self._parent = parent_actor\n self.lat = 0.0\n self.lon = 0.0\n world = self._parent.get_world()\n bp = world.get_blueprint_library().find('sensor.other.gnss')\n self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=1.0, z=2.8)),\n attach_to=self._parent)\n # We need to pass the lambda a weak reference to self to avoid circular\n # reference.\n weak_self = weakref.ref(self)\n self.sensor.listen(lambda event: GnssSensor._on_gnss_event(weak_self, event))\n\n @staticmethod\n def _on_gnss_event(weak_self, event):\n self = weak_self()\n if not self:\n return\n self.lat = event.latitude\n self.lon = event.longitude\n\n# ==============================================================================\n# -- CameraManager -------------------------------------------------------------\n# ==============================================================================\n\n\nclass CameraManager(object):\n def __init__(self, parent_actor, hud):\n self.sensor = None\n self.surface = None\n self._parent = parent_actor\n self.hud = hud\n self.recording = False\n #self._camera_transforms = [carla.Transform(carla.Location(x=-5.5, z=2.8), carla.Rotation(pitch=-15)),carla.Transform(carla.Location(x=1.6, z=1.7))]\n self._camera_transforms = [carla.Transform(carla.Location(x=0.44,y=0, z=1.45), carla.Rotation(pitch=-5,yaw=0)),carla.Transform(carla.Location(x=1.6, z=1.7))]\n #self._camera_transforms = [carla.Transform(carla.Location(x=0.44,y=1.0, z=0.8), carla.Rotation(pitch=-15,yaw=-15,roll=10)),carla.Transform(carla.Location(x=1.6, z=1.7))]\n\n self.transform_index = 1\n self.sensors = [\n ['sensor.camera.rgb', cc.Raw, 'Camera RGB'],\n ['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)'],\n ['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)'],\n ['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)'],\n ['sensor.camera.semantic_segmentation', cc.Raw, 'Camera Semantic Segmentation (Raw)'],\n ['sensor.camera.semantic_segmentation', cc.CityScapesPalette,\n 'Camera Semantic Segmentation (CityScapes Palette)'],\n ['sensor.lidar.ray_cast', None, 'Lidar (Ray-Cast)']]\n world = self._parent.get_world()\n bp_library = world.get_blueprint_library()\n for item in self.sensors:\n bp = bp_library.find(item[0])\n if item[0].startswith('sensor.camera'):\n bp.set_attribute('image_size_x', str(hud.dim[0]))\n bp.set_attribute('image_size_y', str(hud.dim[1]))\n elif item[0].startswith('sensor.lidar'):\n bp.set_attribute('range', '50')\n item.append(bp)\n self.index = None\n\n def toggle_camera(self):\n self.transform_index = (self.transform_index + 1) % len(self._camera_transforms)\n self.sensor.set_transform(self._camera_transforms[self.transform_index])\n\n def set_sensor(self, index, notify=True):\n index = index % len(self.sensors)\n needs_respawn = True if self.index is None \\\n else self.sensors[index][0] != self.sensors[self.index][0]\n if needs_respawn:\n if self.sensor is not None:\n self.sensor.destroy()\n self.surface = None\n self.sensor = self._parent.get_world().spawn_actor(\n self.sensors[index][-1],\n self._camera_transforms[self.transform_index],\n attach_to=self._parent)\n # We need to pass the lambda a weak reference to self to avoid\n # circular reference.\n weak_self = weakref.ref(self)\n self.sensor.listen(lambda image: CameraManager._parse_image(weak_self, image))\n if notify:\n self.hud.notification(self.sensors[index][2])\n self.index = index\n\n def next_sensor(self):\n self.set_sensor(self.index + 1)\n\n def toggle_recording(self):\n self.recording = not self.recording\n self.hud.notification('Recording %s' % ('On' if self.recording else 'Off'))\n\n def render(self, display):\n if self.surface is not None:\n display.blit(self.surface, (0, 0))\n\n @staticmethod\n def _parse_image(weak_self, image):\n self = weak_self()\n if not self:\n return\n if self.sensors[self.index][0].startswith('sensor.lidar'):\n points = np.frombuffer(image.raw_data, dtype=np.dtype('f4'))\n points = np.reshape(points, (int(points.shape[0] / 3), 3))\n lidar_data = np.array(points[:, :2])\n lidar_data *= min(self.hud.dim) / 100.0\n lidar_data += (0.5 * self.hud.dim[0], 0.5 * self.hud.dim[1])\n lidar_data = np.fabs(lidar_data) # pylint: disable=E1111\n lidar_data = lidar_data.astype(np.int32)\n lidar_data = np.reshape(lidar_data, (-1, 2))\n lidar_img_size = (self.hud.dim[0], self.hud.dim[1], 3)\n lidar_img = np.zeros(lidar_img_size)\n lidar_img[tuple(lidar_data.T)] = (255, 255, 255)\n self.surface = pygame.surfarray.make_surface(lidar_img)\n else:\n image.convert(self.sensors[self.index][1])\n array = np.frombuffer(image.raw_data, dtype=np.dtype(\"uint8\"))\n array = np.reshape(array, (image.height, image.width, 4))\n array = array[:, :, :3]\n array = array[:, :, ::-1]\n self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))\n if self.recording:\n image.save_to_disk('_out/%08d' % image.frame)\n\ndef run_scenario(scenario,world):\n\n empiric_delay = 0.075 ## empiric parameter in a way that the pedestrian collide in the middle of the car 'system delay'\n player_pos = world.player.get_location()\n player_size = world.player.bounding_box.extent.x\n crash_point = 223.0733\n\n if scenario == 1:\n walker_pos = world.walker1.get_location()\n walker_size = world.walker1.bounding_box.extent.y\n\n if player_pos.x > ((walker_pos.x - player_size - walker_size - rospy.get_param(\"des_vel\") * 4 / 5 - rospy.get_param(\"des_vel\") * empiric_delay) * math.cos(0.014937)):\n world.walker1.apply_control(world.control)\n\n elif scenario == 2:\n walker_size = world.walker1.bounding_box.extent.x\n walker_pos = world.walker1.get_location()\n if player_pos.x > (crash_point-player_size-walker_size-rospy.get_param(\"des_vel\")*10/5-rospy.get_param(\"des_vel\")*empiric_delay)*math.cos(0.014937):\n if walker_pos.x > 225:\n world.control.speed = 0\n world.walker1.apply_control(world.control)\n\n\n elif scenario == 3:\n npc1_size = world.npc1.bounding_box.extent.x\n npc1_pos = world.npc1.get_location()\n npc1_velocity = world.npc1.get_velocity()\n\n npc1_vel = math.sqrt(npc1_velocity.x ** 2 + npc1_velocity.y ** 2)\n\n if player_pos.x > (\n crash_point - player_size - npc1_size - rospy.get_param(\"des_vel\") * 28 / 15 - rospy.get_param(\"des_vel\") * empiric_delay) * math.cos(\n 0.014937):\n\n if npc1_pos.x > 225:\n # start braking\n world.vehicle_control.manual_gear_shift = False\n world.vehicle_control.throttle = 0.0\n world.vehicle_control.brake = 1\n world.npc1.apply_control(world.vehicle_control)\n else:\n if world.npc1.stop:\n world.npc1.set_target_velocity(carla.Vector3D(x=14.9983 / 3.6, y=0.2240 / 3.6, z=0.000000))\n npc1_vel = 15 / 3.6\n world.npc1.stop = False\n # accelerating to 15 Km/h and hold\n world.vehicle_control.manual_gear_shift = True\n world.vehicle_control.gear = 2\n E = world.npc1.vsoll - npc1_vel\n throttle = E * world.npc1.kp * (1 / world.npc1.tn)\n if throttle > 1:\n throttle = 1\n elif throttle < 0.2:\n throttle = 0.2\n world.vehicle_control.throttle = throttle\n world.npc1.apply_control(world.vehicle_control)\n\n\n\n\n# ==============================================================================\n# -- game_loop() ---------------------------------------------------------\n# ==============================================================================\n\ndef game_loop():\n pygame.init()\n pygame.font.init()\n world = None\n\n try:\n ## Connect the client with the Server according to the arguments and wait 4 seconds for Server response\n client = carla.Client(rospy.get_param(\"carla_host\"), int(rospy.get_param(\"carla_port\")))\n client.set_timeout(4.0)\n print(\"Connected to client\")\n ## Inicialize the display and Hud\n display = pygame.display.set_mode((rospy.get_param(\"cam_width\"), rospy.get_param(\"cam_height\")),pygame.HWSURFACE | pygame.DOUBLEBUF)\n hud = HUD(rospy.get_param(\"cam_width\"), rospy.get_param(\"cam_height\"))\n\n ## Initialise the world (Moving Object and Sensors)\n world = World(client.get_world(),hud)\n\n\n #world.restart()\n ## Initialise the clock\n clock = pygame.time.Clock()\n\n ## Initialize the Keyboard interaction\n controller = KeyboardControl(world)\n\n while True:\n if controller.parse_events():\n return\n\n\n\n\n # as soon as the server is ready continue!\n world.world.wait_for_tick(10.0)\n world.tick(clock)\n world.render(display)\n pygame.display.flip()\n world.restart()\n # run_scenario(rospy.get_param(\"scenario\"),world)\n\n\n finally:\n world.destroy()\n pygame.quit()\n\n# ==============================================================================\n# -- main() --------------------------------------------------------------\n# ==============================================================================\n\ndef main():\n\n game_loop()\n\n #log_level = logging.DEBUG if rospy.get_param(\"verbose\") else logging.INFO\n #logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)\n #logging.info('listening to server %s:%s', rospy.get_param(\"carla_host\"), rospy.get_param(\"carla_port\"))\n #try:\n # game_loop()\n #except KeyboardInterrupt:\n # print('\\nCancelled by user. Bye!')\n\n# ==============================================================================\n# ==============================================================================\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6054980158805847,
"alphanum_fraction": 0.61985844373703,
"avg_line_length": 37.53359603881836,
"blob_id": "791b5157ea7d31e0e43dd33c0a682680c271e90b",
"content_id": "377e1dbc2e252795ec7aa689f68280e20474eb0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9749,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 253,
"path": "/src/aeb/script/aeb.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport rospy\nimport message_filters\nimport numpy as np\nimport math\nimport time\n\n# import ROS messages\nfrom object_list.msg import ObjectsList\nfrom osi3_bridge.msg import TrafficUpdateMovingObject\nfrom vehicle_control.msg import Trajectory\nfrom std_msgs.msg import Int32\n\n# import class with aeb parameters\nfrom ClassAeb import Aeb\nfrom ClassAeb import Features\n\nglobal initial_time\ninitial_time = 0\nglobal is_1step\nis_1step= True\n\n\naeb_data=Aeb()\n\n\ndef aeb_funtion():\n\n # Node initialization\n rospy.init_node('aeb', anonymous=False) # Start node\n #rate = rospy.Rate(rospy.get_param(\"freq\") ) # Define the node frequency 100hz\n\n #Define the Publisher with time stamp combination based on ROS time\n ego_sub = message_filters.Subscriber(\"/ego_data\", TrafficUpdateMovingObject)\n objs_sub = message_filters.Subscriber(\"/sensor0/obj_list_egoframe\", ObjectsList)\n ts = message_filters.ApproximateTimeSynchronizer([ego_sub, objs_sub], 3,3)\n ts.registerCallback(callback)\n rospy.spin() # Make sure that every time a message is received the callback will be called again\n\ndef callback(ego, obj_list):\n\n ## Define initial Time because the controller cannot manage the high time values from ROS\n\n global initial_time\n global is_1step\n global aeb_data\n\n\n aeb_data.last_status = aeb_data.status\n if is_1step:\n is_1step = False\n initial_time = ego.header.stamp.secs + ego.header.stamp.nsecs * 1e-9\n aeb_data.yaw = ego.object.orientation.yaw\n\n # Find the Target\n near_obj = find_target(obj_list)\n\n ## Initialization variables\n #aeb_data = Aeb() # Object which contain all AEB Parameters\n egovx = ego.object.velocity.x # Ego velocity\n Traj = Trajectory() # Trajectory message\n Traj.header.stamp = rospy.Time.now() # Include actual time on the stamp\n Traj.reftime=initial_time ## Include the initial time on the trajectory message\n\n ## Considering just longitudinal movement the yaw of the car keeps always the same\n Traj.yaw = np.full((aeb_data.amount_data), aeb_data.yaw)\n\n ## Fullfil Trajectory time with header time + steps\n Timenow = np.full(aeb_data.amount_data, (ego.header.stamp.secs+ego.header.stamp.nsecs*1e-9-initial_time)) # Vector with the actual time\n Traj.time = aeb_data.timesteparray + Timenow\n\n #print(ego.object.position.x )\n ## Calculate AEB\n if near_obj != 9999:\n [aeb_data,reldist] = calculate_aeb(egovx, obj_list.obj_list[near_obj],aeb_data)\n ## Definition of actual condition\n if aeb_data.offset >= abs(reldist) and obj_list.obj_list[near_obj].geometric.vx <=0.5:\n print (\"Stop\")\n aeb_data.status = 3\n vel_aux = np.full(aeb_data.amount_data + 1, 0)\n elif aeb_data.offset >= abs(reldist) and obj_list.obj_list[near_obj].geometric.vx > 0.5:\n print (\"Following\")\n aeb_data.status = 4\n vel_aux = np.full(aeb_data.amount_data+1,obj_list.obj_list[near_obj].geometric.vx)\n elif (abs(aeb_data.ttc) < aeb_data.stoptime.stage3) and (aeb_data.ttc < 0):\n print(\"Stage 3 is on\")\n aeb_data.status = 3\n vel_aux = np.full(aeb_data.amount_data + 1, 0)\n #vel_aux = velocity_calculation(egovx,aeb_data.acc.stage3,aeb_data.time_step,aeb_data.amount_data)\n elif (abs(aeb_data.ttc) < aeb_data.stoptime.stage2) and (aeb_data.ttc < 0):\n print(\"Stage 2 is on\")\n aeb_data.status = 2\n vel_aux = velocity_calculation(egovx,aeb_data.acc.stage2,aeb_data.time_step,aeb_data.amount_data)\n elif (abs(aeb_data.ttc) < aeb_data.stoptime.stage1) and (aeb_data.ttc < 0):\n print(\"Stage 1 is on\")\n aeb_data.status = 1\n vel_aux = velocity_calculation(egovx,aeb_data.acc.stage1,aeb_data.time_step,aeb_data.amount_data)\n elif (abs(aeb_data.ttc) < aeb_data.stoptime.fw) and (aeb_data.ttc < 0):\n aeb_data.status = 5\n print(\"FW is on\")\n vel_aux = np.full(aeb_data.amount_data+1, aeb_data.des_vel)\n else:\n print(\"AEB is off\")\n aeb_data.status = 0\n ## keep the expected velocity\n vel_aux = np.full(aeb_data.amount_data + 1, aeb_data.des_vel)\n\n #print (\"TTC \", aeb_data.ttc)\n #print(\"relative distance \", reldist)\n #print(\"relative velocity \", obj_list.obj_list[near_obj].geometric.vx - egovx)\n elif ego.object.position.x > 240:\n print(\"End of the Test\")\n vel_aux = np.full(aeb_data.amount_data + 1, 0)\n aeb_data.status = 3\n else:\n print(\"AEB is off, out of range\")\n aeb_data.status = 0\n ## keep the expected velocity\n vel_aux = np.full(aeb_data.amount_data + 1, aeb_data.des_vel)\n\n #if aeb_data.status == 3 and aeb_data.last_status == 3:\n # egox = np.full(aeb_data.amount_data, aeb_data.x)\n # egoy = np.full(aeb_data.amount_data, aeb_data.y)\n # print ('Full brake')\n #elif aeb_data.status == 3:\n # aeb_data.x = ego.object.position.x\n # aeb_data.y = ego.object.position.y\n # egox = np.full(aeb_data.amount_data, ego.object.position.x)\n # egoy = np.full(aeb_data.amount_data, ego.object.position.y)\n #else:\n # egox = np.full(aeb_data.amount_data, ego.object.position.x)\n # egoy = np.full(aeb_data.amount_data, ego.object.position.y)\n\n\n egox = np.full(aeb_data.amount_data, ego.object.position.x)\n egoy = np.full(aeb_data.amount_data, ego.object.position.y)\n\n\n Traj = position_calculation(aeb_data, vel_aux, Traj,egox,egoy,egovx)\n Traj.v = vel_aux[0:aeb_data.amount_data]\n #return Traj\n pub = rospy.Publisher('trajectory', Trajectory, queue_size=10,latch=True)\n pub.publish(Traj)\n pub_aeb = rospy.Publisher ('aeb_status',Int32,queue_size=10)\n pub_aeb.publish(aeb_data.status)\n #rate = rospy.Rate(25) # Define the node frequency 100hz\n #rate.sleep()\n\ndef position_calculation(aeb_data, vel, Traj, egox,egoy,egovx):\n pos = np.zeros(len(vel)-1)\n if aeb_data.status == 3:\n vel_aux = velocity_calculation(egovx, 9.0, aeb_data.time_step, aeb_data.amount_data)\n for i in range (1,len(vel_aux)-1):\n pos[i] = pos[i - 1] + (vel_aux[i] + vel_aux[i + 1]) * 0.5 * aeb_data.time_step\n else:\n for i in range (1,len(vel)-1):\n pos[i] = pos[i-1] + (vel[i]+vel[i+1]) * 0.5 * aeb_data.time_step\n\n Traj.x = egox+pos*math.cos(Traj.yaw[0])\n Traj.y = egoy+pos*math.sin(Traj.yaw[0])\n #Traj.x = pos*math.cos(Traj.yaw[0])\n #Traj.y = pos*math.sin(Traj.yaw[0])\n\n return(Traj)\n\n\ndef velocity_calculation(egovx,acc,step,amount_data):\n v=egovx\n i=0\n vel = np.zeros(amount_data+1)\n while v >= 0 and (i) <= amount_data:\n vel[i] = v\n v = v - acc * step\n i = i+1\n return(vel)\n\n\ndef find_target(obj_list):\n\n near_x = 9999\n near_obj = 9999\n for i in range(len(obj_list.obj_list)):\n\n ### calculate near x and near y from each object###\n near_feature_x = calculate_x_y(obj_list.obj_list[i])\n if near_feature_x < near_x:\n near_x = near_feature_x\n near_obj = i\n return near_obj\n\ndef calculate_aeb (egovx,obj,aeb):\n rel_velx = obj.geometric.vx - egovx\n reldist = obj.geometric.x # - abs(obj.dimension.length * math.cos(obj.geometric.yaw)) - abs(obj.dimension.width * math.sin(obj.geometric.yaw))\n #reldist = calculate_rel_dist(obj)\n ##ttc = (distance - offset) / relative velocity * Signal of relative velocity\n aeb.ttc = safe_div(reldist-aeb.offset, abs(rel_velx)) * safe_div(rel_velx, abs(rel_velx))\n aeb.stoptime.fw = egovx/aeb.acc.fw + aeb.react.driver\n aeb.stoptime.stage1 = egovx / aeb.acc.stage1 + aeb.react.system\n aeb.stoptime.stage2 = egovx / aeb.acc.stage2 + aeb.react.system\n aeb.stoptime.stage3 = egovx / aeb.acc.stage3 + aeb.react.system\n\n return [aeb,reldist]\n\ndef safe_div(x,y):\n try:\n return x/y\n except ZeroDivisionError:\n return 9999\n\n\ndef calculate_x_y(obj):\n\n x = Features() # Import a class with all features as float 0.0\n y = Features()\n\n # Calculate the features\n tg_wl = math.atan(obj.dimension.width / obj.dimension.length)\n hip_wl = 0.5 * math.sqrt(obj.dimension.width ** 2 + obj.dimension.length ** 2)\n beta = obj.geometric.yaw - tg_wl\n psi = obj.geometric.yaw + tg_wl\n\n x.FL = obj.geometric.x + hip_wl * math.cos(psi)\n y.FL = obj.geometric.y - hip_wl * math.sin(psi)\n x.FR = obj.geometric.x + hip_wl * math.cos(beta)\n y.FR = obj.geometric.y - hip_wl * math.sin(beta)\n x.RR = obj.geometric.x - hip_wl * math.cos(-psi)\n y.RR = obj.geometric.y - hip_wl * math.sin(-psi)\n x.RL = obj.geometric.x - hip_wl * math.cos(-beta)\n y.RL = obj.geometric.y - hip_wl * math.sin(-beta)\n x.FM = (x.FR + x.FL) / 2\n y.FM = (y.FR + y.FL) / 2\n x.ML = (x.RL + x.FL) / 2\n y.ML = (y.RL + y.FL) / 2\n x.MR = (x.RR + x.FR) / 2\n y.MR = (y.RR + y.FR) / 2\n x.RM = (x.RR + x.RL) / 2\n y.RM = (y.RR + y.RL) / 2\n\n X = [x.FL, x.FM, x.FR, x.MR, x.RR, x.RM, x.RL, x.ML] # Vector of x position for the Features\n Y = [y.FL, y.FM, y.FR, y.MR, y.RR, y.RM, y.RL, y.ML] # Vector of y position for the Features\n\n ## Discover which Features are inside the lateral range of the AEB\n lat_rang = rospy.get_param(\"lat_rang\") # 1.5 ## Lateral range of evaluation [m]\n near_x=9999\n for i in range (8):\n if abs(Y[i]) < lat_rang: ################# Lateral range of evaluation\n ### calculate near x inside desired y###\n if X[i] < near_x:\n near_x = X[i]\n return near_x\n\nif __name__ == '__main__':\n aeb_funtion()\n"
},
{
"alpha_fraction": 0.7827242016792297,
"alphanum_fraction": 0.7879108190536499,
"avg_line_length": 60.36345672607422,
"blob_id": "d85cacac783924c16a60d661d73f38f376c41fa1",
"content_id": "9f6b8a7c2567ae6ecd66409128edc57177848e6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 62469,
"license_type": "no_license",
"max_line_length": 281,
"num_lines": 1018,
"path": "/build/osi3_bridge/open-simulation-interface/Makefile",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# CMAKE generated file: DO NOT EDIT!\n# Generated by \"Unix Makefiles\" Generator, CMake Version 3.10\n\n# Default target executed when no arguments are given to make.\ndefault_target: all\n\n.PHONY : default_target\n\n# Allow only one \"make -f Makefile2\" at a time, but pass parallelism.\n.NOTPARALLEL:\n\n\n#=============================================================================\n# Special targets provided by cmake.\n\n# Disable implicit rules so canonical targets will work.\n.SUFFIXES:\n\n\n# Remove some rules from gmake that .SUFFIXES does not remove.\nSUFFIXES =\n\n.SUFFIXES: .hpux_make_needs_suffix_list\n\n\n# Suppress display of executed commands.\n$(VERBOSE).SILENT:\n\n\n# A target that is always out of date.\ncmake_force:\n\n.PHONY : cmake_force\n\n#=============================================================================\n# Set environment variables for the build.\n\n# The shell in which to execute make rules.\nSHELL = /bin/sh\n\n# The CMake executable.\nCMAKE_COMMAND = /usr/bin/cmake\n\n# The command to remove a file.\nRM = /usr/bin/cmake -E remove -f\n\n# Escaping for special characters.\nEQUALS = =\n\n# The top-level source directory on which CMake was run.\nCMAKE_SOURCE_DIR = /home/student/Desktop/Redge_Thesis/vil/src\n\n# The top-level build directory on which CMake was run.\nCMAKE_BINARY_DIR = /home/student/Desktop/Redge_Thesis/vil/build\n\n#=============================================================================\n# Targets provided globally by CMake.\n\n# Special rule for the target install/strip\ninstall/strip: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip\n\n# Special rule for the target install/strip\ninstall/strip/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing the project stripped...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake\n.PHONY : install/strip/fast\n\n# Special rule for the target install\ninstall: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install\n\n# Special rule for the target install\ninstall/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Install the project...\"\n\t/usr/bin/cmake -P cmake_install.cmake\n.PHONY : install/fast\n\n# Special rule for the target rebuild_cache\nrebuild_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running CMake to regenerate build system...\"\n\t/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)\n.PHONY : rebuild_cache\n\n# Special rule for the target rebuild_cache\nrebuild_cache/fast: rebuild_cache\n\n.PHONY : rebuild_cache/fast\n\n# Special rule for the target list_install_components\nlist_install_components:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Available install components are: \\\"Unspecified\\\" \\\"dev\\\" \\\"lib\\\"\"\n.PHONY : list_install_components\n\n# Special rule for the target list_install_components\nlist_install_components/fast: list_install_components\n\n.PHONY : list_install_components/fast\n\n# Special rule for the target edit_cache\nedit_cache:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"No interactive CMake dialog available...\"\n\t/usr/bin/cmake -E echo No\\ interactive\\ CMake\\ dialog\\ available.\n.PHONY : edit_cache\n\n# Special rule for the target edit_cache\nedit_cache/fast: edit_cache\n\n.PHONY : edit_cache/fast\n\n# Special rule for the target install/local\ninstall/local: preinstall\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local\n\n# Special rule for the target install/local\ninstall/local/fast: preinstall/fast\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Installing only the local directory...\"\n\t/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake\n.PHONY : install/local/fast\n\n# Special rule for the target test\ntest:\n\t@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan \"Running tests...\"\n\t/usr/bin/ctest --force-new-ctest-process $(ARGS)\n.PHONY : test\n\n# Special rule for the target test\ntest/fast: test\n\n.PHONY : test/fast\n\n# The main all target\nall: cmake_check_build_system\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles /home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/progress.marks\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/all\n\t$(CMAKE_COMMAND) -E cmake_progress_start /home/student/Desktop/Redge_Thesis/vil/build/CMakeFiles 0\n.PHONY : all\n\n# The main clean target\nclean:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/clean\n.PHONY : clean\n\n# The main clean target\nclean/fast: clean\n\n.PHONY : clean/fast\n\n# Prepare targets for installation.\npreinstall: all\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/preinstall\n.PHONY : preinstall\n\n# Prepare targets for installation.\npreinstall/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/preinstall\n.PHONY : preinstall/fast\n\n# clear depends\ndepend:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1\n.PHONY : depend\n\n# Convenience name for target.\nosi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/rule\n.PHONY : osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/rule\n\n# Convenience name for target.\nopen_simulation_interface_obj: osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/rule\n\n.PHONY : open_simulation_interface_obj\n\n# fast build rule for target.\nopen_simulation_interface_obj/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build\n.PHONY : open_simulation_interface_obj/fast\n\n# Convenience name for target.\nosi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/rule\n.PHONY : osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/rule\n\n# Convenience name for target.\nopen_simulation_interface_static: osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/rule\n\n.PHONY : open_simulation_interface_static\n\n# fast build rule for target.\nopen_simulation_interface_static/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build\n.PHONY : open_simulation_interface_static/fast\n\n# Convenience name for target.\nosi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/rule\n.PHONY : osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/rule\n\n# Convenience name for target.\nopen_simulation_interface_pic: osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/rule\n\n.PHONY : open_simulation_interface_pic\n\n# fast build rule for target.\nopen_simulation_interface_pic/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_pic.dir/build\n.PHONY : open_simulation_interface_pic/fast\n\n# Convenience name for target.\nosi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/rule:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f CMakeFiles/Makefile2 osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/rule\n.PHONY : osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/rule\n\n# Convenience name for target.\nopen_simulation_interface: osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/rule\n\n.PHONY : open_simulation_interface\n\n# fast build rule for target.\nopen_simulation_interface/fast:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/build\n.PHONY : open_simulation_interface/fast\n\nosi_common.pb.o: osi_common.pb.cc.o\n\n.PHONY : osi_common.pb.o\n\n# target to build an object file\nosi_common.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_common.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_common.pb.cc.o\n.PHONY : osi_common.pb.cc.o\n\nosi_common.pb.i: osi_common.pb.cc.i\n\n.PHONY : osi_common.pb.i\n\n# target to preprocess a source file\nosi_common.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_common.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_common.pb.cc.i\n.PHONY : osi_common.pb.cc.i\n\nosi_common.pb.s: osi_common.pb.cc.s\n\n.PHONY : osi_common.pb.s\n\n# target to generate assembly for a file\nosi_common.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_common.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_common.pb.cc.s\n.PHONY : osi_common.pb.cc.s\n\nosi_datarecording.pb.o: osi_datarecording.pb.cc.o\n\n.PHONY : osi_datarecording.pb.o\n\n# target to build an object file\nosi_datarecording.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_datarecording.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_datarecording.pb.cc.o\n.PHONY : osi_datarecording.pb.cc.o\n\nosi_datarecording.pb.i: osi_datarecording.pb.cc.i\n\n.PHONY : osi_datarecording.pb.i\n\n# target to preprocess a source file\nosi_datarecording.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_datarecording.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_datarecording.pb.cc.i\n.PHONY : osi_datarecording.pb.cc.i\n\nosi_datarecording.pb.s: osi_datarecording.pb.cc.s\n\n.PHONY : osi_datarecording.pb.s\n\n# target to generate assembly for a file\nosi_datarecording.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_datarecording.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_datarecording.pb.cc.s\n.PHONY : osi_datarecording.pb.cc.s\n\nosi_detectedlane.pb.o: osi_detectedlane.pb.cc.o\n\n.PHONY : osi_detectedlane.pb.o\n\n# target to build an object file\nosi_detectedlane.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedlane.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedlane.pb.cc.o\n.PHONY : osi_detectedlane.pb.cc.o\n\nosi_detectedlane.pb.i: osi_detectedlane.pb.cc.i\n\n.PHONY : osi_detectedlane.pb.i\n\n# target to preprocess a source file\nosi_detectedlane.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedlane.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedlane.pb.cc.i\n.PHONY : osi_detectedlane.pb.cc.i\n\nosi_detectedlane.pb.s: osi_detectedlane.pb.cc.s\n\n.PHONY : osi_detectedlane.pb.s\n\n# target to generate assembly for a file\nosi_detectedlane.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedlane.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedlane.pb.cc.s\n.PHONY : osi_detectedlane.pb.cc.s\n\nosi_detectedobject.pb.o: osi_detectedobject.pb.cc.o\n\n.PHONY : osi_detectedobject.pb.o\n\n# target to build an object file\nosi_detectedobject.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedobject.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedobject.pb.cc.o\n.PHONY : osi_detectedobject.pb.cc.o\n\nosi_detectedobject.pb.i: osi_detectedobject.pb.cc.i\n\n.PHONY : osi_detectedobject.pb.i\n\n# target to preprocess a source file\nosi_detectedobject.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedobject.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedobject.pb.cc.i\n.PHONY : osi_detectedobject.pb.cc.i\n\nosi_detectedobject.pb.s: osi_detectedobject.pb.cc.s\n\n.PHONY : osi_detectedobject.pb.s\n\n# target to generate assembly for a file\nosi_detectedobject.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedobject.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedobject.pb.cc.s\n.PHONY : osi_detectedobject.pb.cc.s\n\nosi_detectedoccupant.pb.o: osi_detectedoccupant.pb.cc.o\n\n.PHONY : osi_detectedoccupant.pb.o\n\n# target to build an object file\nosi_detectedoccupant.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedoccupant.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedoccupant.pb.cc.o\n.PHONY : osi_detectedoccupant.pb.cc.o\n\nosi_detectedoccupant.pb.i: osi_detectedoccupant.pb.cc.i\n\n.PHONY : osi_detectedoccupant.pb.i\n\n# target to preprocess a source file\nosi_detectedoccupant.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedoccupant.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedoccupant.pb.cc.i\n.PHONY : osi_detectedoccupant.pb.cc.i\n\nosi_detectedoccupant.pb.s: osi_detectedoccupant.pb.cc.s\n\n.PHONY : osi_detectedoccupant.pb.s\n\n# target to generate assembly for a file\nosi_detectedoccupant.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedoccupant.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedoccupant.pb.cc.s\n.PHONY : osi_detectedoccupant.pb.cc.s\n\nosi_detectedroadmarking.pb.o: osi_detectedroadmarking.pb.cc.o\n\n.PHONY : osi_detectedroadmarking.pb.o\n\n# target to build an object file\nosi_detectedroadmarking.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedroadmarking.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedroadmarking.pb.cc.o\n.PHONY : osi_detectedroadmarking.pb.cc.o\n\nosi_detectedroadmarking.pb.i: osi_detectedroadmarking.pb.cc.i\n\n.PHONY : osi_detectedroadmarking.pb.i\n\n# target to preprocess a source file\nosi_detectedroadmarking.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedroadmarking.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedroadmarking.pb.cc.i\n.PHONY : osi_detectedroadmarking.pb.cc.i\n\nosi_detectedroadmarking.pb.s: osi_detectedroadmarking.pb.cc.s\n\n.PHONY : osi_detectedroadmarking.pb.s\n\n# target to generate assembly for a file\nosi_detectedroadmarking.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedroadmarking.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedroadmarking.pb.cc.s\n.PHONY : osi_detectedroadmarking.pb.cc.s\n\nosi_detectedtrafficlight.pb.o: osi_detectedtrafficlight.pb.cc.o\n\n.PHONY : osi_detectedtrafficlight.pb.o\n\n# target to build an object file\nosi_detectedtrafficlight.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficlight.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficlight.pb.cc.o\n.PHONY : osi_detectedtrafficlight.pb.cc.o\n\nosi_detectedtrafficlight.pb.i: osi_detectedtrafficlight.pb.cc.i\n\n.PHONY : osi_detectedtrafficlight.pb.i\n\n# target to preprocess a source file\nosi_detectedtrafficlight.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficlight.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficlight.pb.cc.i\n.PHONY : osi_detectedtrafficlight.pb.cc.i\n\nosi_detectedtrafficlight.pb.s: osi_detectedtrafficlight.pb.cc.s\n\n.PHONY : osi_detectedtrafficlight.pb.s\n\n# target to generate assembly for a file\nosi_detectedtrafficlight.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficlight.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficlight.pb.cc.s\n.PHONY : osi_detectedtrafficlight.pb.cc.s\n\nosi_detectedtrafficsign.pb.o: osi_detectedtrafficsign.pb.cc.o\n\n.PHONY : osi_detectedtrafficsign.pb.o\n\n# target to build an object file\nosi_detectedtrafficsign.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficsign.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficsign.pb.cc.o\n.PHONY : osi_detectedtrafficsign.pb.cc.o\n\nosi_detectedtrafficsign.pb.i: osi_detectedtrafficsign.pb.cc.i\n\n.PHONY : osi_detectedtrafficsign.pb.i\n\n# target to preprocess a source file\nosi_detectedtrafficsign.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficsign.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficsign.pb.cc.i\n.PHONY : osi_detectedtrafficsign.pb.cc.i\n\nosi_detectedtrafficsign.pb.s: osi_detectedtrafficsign.pb.cc.s\n\n.PHONY : osi_detectedtrafficsign.pb.s\n\n# target to generate assembly for a file\nosi_detectedtrafficsign.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_detectedtrafficsign.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficsign.pb.cc.s\n.PHONY : osi_detectedtrafficsign.pb.cc.s\n\nosi_environment.pb.o: osi_environment.pb.cc.o\n\n.PHONY : osi_environment.pb.o\n\n# target to build an object file\nosi_environment.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_environment.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_environment.pb.cc.o\n.PHONY : osi_environment.pb.cc.o\n\nosi_environment.pb.i: osi_environment.pb.cc.i\n\n.PHONY : osi_environment.pb.i\n\n# target to preprocess a source file\nosi_environment.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_environment.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_environment.pb.cc.i\n.PHONY : osi_environment.pb.cc.i\n\nosi_environment.pb.s: osi_environment.pb.cc.s\n\n.PHONY : osi_environment.pb.s\n\n# target to generate assembly for a file\nosi_environment.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_environment.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_environment.pb.cc.s\n.PHONY : osi_environment.pb.cc.s\n\nosi_featuredata.pb.o: osi_featuredata.pb.cc.o\n\n.PHONY : osi_featuredata.pb.o\n\n# target to build an object file\nosi_featuredata.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_featuredata.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_featuredata.pb.cc.o\n.PHONY : osi_featuredata.pb.cc.o\n\nosi_featuredata.pb.i: osi_featuredata.pb.cc.i\n\n.PHONY : osi_featuredata.pb.i\n\n# target to preprocess a source file\nosi_featuredata.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_featuredata.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_featuredata.pb.cc.i\n.PHONY : osi_featuredata.pb.cc.i\n\nosi_featuredata.pb.s: osi_featuredata.pb.cc.s\n\n.PHONY : osi_featuredata.pb.s\n\n# target to generate assembly for a file\nosi_featuredata.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_featuredata.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_featuredata.pb.cc.s\n.PHONY : osi_featuredata.pb.cc.s\n\nosi_groundtruth.pb.o: osi_groundtruth.pb.cc.o\n\n.PHONY : osi_groundtruth.pb.o\n\n# target to build an object file\nosi_groundtruth.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_groundtruth.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_groundtruth.pb.cc.o\n.PHONY : osi_groundtruth.pb.cc.o\n\nosi_groundtruth.pb.i: osi_groundtruth.pb.cc.i\n\n.PHONY : osi_groundtruth.pb.i\n\n# target to preprocess a source file\nosi_groundtruth.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_groundtruth.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_groundtruth.pb.cc.i\n.PHONY : osi_groundtruth.pb.cc.i\n\nosi_groundtruth.pb.s: osi_groundtruth.pb.cc.s\n\n.PHONY : osi_groundtruth.pb.s\n\n# target to generate assembly for a file\nosi_groundtruth.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_groundtruth.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_groundtruth.pb.cc.s\n.PHONY : osi_groundtruth.pb.cc.s\n\nosi_hostvehicledata.pb.o: osi_hostvehicledata.pb.cc.o\n\n.PHONY : osi_hostvehicledata.pb.o\n\n# target to build an object file\nosi_hostvehicledata.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_hostvehicledata.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_hostvehicledata.pb.cc.o\n.PHONY : osi_hostvehicledata.pb.cc.o\n\nosi_hostvehicledata.pb.i: osi_hostvehicledata.pb.cc.i\n\n.PHONY : osi_hostvehicledata.pb.i\n\n# target to preprocess a source file\nosi_hostvehicledata.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_hostvehicledata.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_hostvehicledata.pb.cc.i\n.PHONY : osi_hostvehicledata.pb.cc.i\n\nosi_hostvehicledata.pb.s: osi_hostvehicledata.pb.cc.s\n\n.PHONY : osi_hostvehicledata.pb.s\n\n# target to generate assembly for a file\nosi_hostvehicledata.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_hostvehicledata.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_hostvehicledata.pb.cc.s\n.PHONY : osi_hostvehicledata.pb.cc.s\n\nosi_lane.pb.o: osi_lane.pb.cc.o\n\n.PHONY : osi_lane.pb.o\n\n# target to build an object file\nosi_lane.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_lane.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_lane.pb.cc.o\n.PHONY : osi_lane.pb.cc.o\n\nosi_lane.pb.i: osi_lane.pb.cc.i\n\n.PHONY : osi_lane.pb.i\n\n# target to preprocess a source file\nosi_lane.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_lane.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_lane.pb.cc.i\n.PHONY : osi_lane.pb.cc.i\n\nosi_lane.pb.s: osi_lane.pb.cc.s\n\n.PHONY : osi_lane.pb.s\n\n# target to generate assembly for a file\nosi_lane.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_lane.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_lane.pb.cc.s\n.PHONY : osi_lane.pb.cc.s\n\nosi_object.pb.o: osi_object.pb.cc.o\n\n.PHONY : osi_object.pb.o\n\n# target to build an object file\nosi_object.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_object.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_object.pb.cc.o\n.PHONY : osi_object.pb.cc.o\n\nosi_object.pb.i: osi_object.pb.cc.i\n\n.PHONY : osi_object.pb.i\n\n# target to preprocess a source file\nosi_object.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_object.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_object.pb.cc.i\n.PHONY : osi_object.pb.cc.i\n\nosi_object.pb.s: osi_object.pb.cc.s\n\n.PHONY : osi_object.pb.s\n\n# target to generate assembly for a file\nosi_object.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_object.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_object.pb.cc.s\n.PHONY : osi_object.pb.cc.s\n\nosi_occupant.pb.o: osi_occupant.pb.cc.o\n\n.PHONY : osi_occupant.pb.o\n\n# target to build an object file\nosi_occupant.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_occupant.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_occupant.pb.cc.o\n.PHONY : osi_occupant.pb.cc.o\n\nosi_occupant.pb.i: osi_occupant.pb.cc.i\n\n.PHONY : osi_occupant.pb.i\n\n# target to preprocess a source file\nosi_occupant.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_occupant.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_occupant.pb.cc.i\n.PHONY : osi_occupant.pb.cc.i\n\nosi_occupant.pb.s: osi_occupant.pb.cc.s\n\n.PHONY : osi_occupant.pb.s\n\n# target to generate assembly for a file\nosi_occupant.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_occupant.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_occupant.pb.cc.s\n.PHONY : osi_occupant.pb.cc.s\n\nosi_roadmarking.pb.o: osi_roadmarking.pb.cc.o\n\n.PHONY : osi_roadmarking.pb.o\n\n# target to build an object file\nosi_roadmarking.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_roadmarking.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_roadmarking.pb.cc.o\n.PHONY : osi_roadmarking.pb.cc.o\n\nosi_roadmarking.pb.i: osi_roadmarking.pb.cc.i\n\n.PHONY : osi_roadmarking.pb.i\n\n# target to preprocess a source file\nosi_roadmarking.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_roadmarking.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_roadmarking.pb.cc.i\n.PHONY : osi_roadmarking.pb.cc.i\n\nosi_roadmarking.pb.s: osi_roadmarking.pb.cc.s\n\n.PHONY : osi_roadmarking.pb.s\n\n# target to generate assembly for a file\nosi_roadmarking.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_roadmarking.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_roadmarking.pb.cc.s\n.PHONY : osi_roadmarking.pb.cc.s\n\nosi_sensordata.pb.o: osi_sensordata.pb.cc.o\n\n.PHONY : osi_sensordata.pb.o\n\n# target to build an object file\nosi_sensordata.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensordata.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensordata.pb.cc.o\n.PHONY : osi_sensordata.pb.cc.o\n\nosi_sensordata.pb.i: osi_sensordata.pb.cc.i\n\n.PHONY : osi_sensordata.pb.i\n\n# target to preprocess a source file\nosi_sensordata.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensordata.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensordata.pb.cc.i\n.PHONY : osi_sensordata.pb.cc.i\n\nosi_sensordata.pb.s: osi_sensordata.pb.cc.s\n\n.PHONY : osi_sensordata.pb.s\n\n# target to generate assembly for a file\nosi_sensordata.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensordata.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensordata.pb.cc.s\n.PHONY : osi_sensordata.pb.cc.s\n\nosi_sensorspecific.pb.o: osi_sensorspecific.pb.cc.o\n\n.PHONY : osi_sensorspecific.pb.o\n\n# target to build an object file\nosi_sensorspecific.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorspecific.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorspecific.pb.cc.o\n.PHONY : osi_sensorspecific.pb.cc.o\n\nosi_sensorspecific.pb.i: osi_sensorspecific.pb.cc.i\n\n.PHONY : osi_sensorspecific.pb.i\n\n# target to preprocess a source file\nosi_sensorspecific.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorspecific.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorspecific.pb.cc.i\n.PHONY : osi_sensorspecific.pb.cc.i\n\nosi_sensorspecific.pb.s: osi_sensorspecific.pb.cc.s\n\n.PHONY : osi_sensorspecific.pb.s\n\n# target to generate assembly for a file\nosi_sensorspecific.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorspecific.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorspecific.pb.cc.s\n.PHONY : osi_sensorspecific.pb.cc.s\n\nosi_sensorview.pb.o: osi_sensorview.pb.cc.o\n\n.PHONY : osi_sensorview.pb.o\n\n# target to build an object file\nosi_sensorview.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorview.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorview.pb.cc.o\n.PHONY : osi_sensorview.pb.cc.o\n\nosi_sensorview.pb.i: osi_sensorview.pb.cc.i\n\n.PHONY : osi_sensorview.pb.i\n\n# target to preprocess a source file\nosi_sensorview.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorview.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorview.pb.cc.i\n.PHONY : osi_sensorview.pb.cc.i\n\nosi_sensorview.pb.s: osi_sensorview.pb.cc.s\n\n.PHONY : osi_sensorview.pb.s\n\n# target to generate assembly for a file\nosi_sensorview.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorview.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorview.pb.cc.s\n.PHONY : osi_sensorview.pb.cc.s\n\nosi_sensorviewconfiguration.pb.o: osi_sensorviewconfiguration.pb.cc.o\n\n.PHONY : osi_sensorviewconfiguration.pb.o\n\n# target to build an object file\nosi_sensorviewconfiguration.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorviewconfiguration.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorviewconfiguration.pb.cc.o\n.PHONY : osi_sensorviewconfiguration.pb.cc.o\n\nosi_sensorviewconfiguration.pb.i: osi_sensorviewconfiguration.pb.cc.i\n\n.PHONY : osi_sensorviewconfiguration.pb.i\n\n# target to preprocess a source file\nosi_sensorviewconfiguration.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorviewconfiguration.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorviewconfiguration.pb.cc.i\n.PHONY : osi_sensorviewconfiguration.pb.cc.i\n\nosi_sensorviewconfiguration.pb.s: osi_sensorviewconfiguration.pb.cc.s\n\n.PHONY : osi_sensorviewconfiguration.pb.s\n\n# target to generate assembly for a file\nosi_sensorviewconfiguration.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_sensorviewconfiguration.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_sensorviewconfiguration.pb.cc.s\n.PHONY : osi_sensorviewconfiguration.pb.cc.s\n\nosi_trafficlight.pb.o: osi_trafficlight.pb.cc.o\n\n.PHONY : osi_trafficlight.pb.o\n\n# target to build an object file\nosi_trafficlight.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficlight.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficlight.pb.cc.o\n.PHONY : osi_trafficlight.pb.cc.o\n\nosi_trafficlight.pb.i: osi_trafficlight.pb.cc.i\n\n.PHONY : osi_trafficlight.pb.i\n\n# target to preprocess a source file\nosi_trafficlight.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficlight.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficlight.pb.cc.i\n.PHONY : osi_trafficlight.pb.cc.i\n\nosi_trafficlight.pb.s: osi_trafficlight.pb.cc.s\n\n.PHONY : osi_trafficlight.pb.s\n\n# target to generate assembly for a file\nosi_trafficlight.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficlight.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficlight.pb.cc.s\n.PHONY : osi_trafficlight.pb.cc.s\n\nosi_trafficsign.pb.o: osi_trafficsign.pb.cc.o\n\n.PHONY : osi_trafficsign.pb.o\n\n# target to build an object file\nosi_trafficsign.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficsign.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficsign.pb.cc.o\n.PHONY : osi_trafficsign.pb.cc.o\n\nosi_trafficsign.pb.i: osi_trafficsign.pb.cc.i\n\n.PHONY : osi_trafficsign.pb.i\n\n# target to preprocess a source file\nosi_trafficsign.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficsign.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficsign.pb.cc.i\n.PHONY : osi_trafficsign.pb.cc.i\n\nosi_trafficsign.pb.s: osi_trafficsign.pb.cc.s\n\n.PHONY : osi_trafficsign.pb.s\n\n# target to generate assembly for a file\nosi_trafficsign.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_trafficsign.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_trafficsign.pb.cc.s\n.PHONY : osi_trafficsign.pb.cc.s\n\nosi_version.pb.o: osi_version.pb.cc.o\n\n.PHONY : osi_version.pb.o\n\n# target to build an object file\nosi_version.pb.cc.o:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_version.pb.cc.o\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_version.pb.cc.o\n.PHONY : osi_version.pb.cc.o\n\nosi_version.pb.i: osi_version.pb.cc.i\n\n.PHONY : osi_version.pb.i\n\n# target to preprocess a source file\nosi_version.pb.cc.i:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_version.pb.cc.i\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_version.pb.cc.i\n.PHONY : osi_version.pb.cc.i\n\nosi_version.pb.s: osi_version.pb.cc.s\n\n.PHONY : osi_version.pb.s\n\n# target to generate assembly for a file\nosi_version.pb.cc.s:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_obj.dir/osi_version.pb.cc.s\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(MAKE) -f osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/build.make osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/osi_version.pb.cc.s\n.PHONY : osi_version.pb.cc.s\n\n# Help Target\nhelp:\n\t@echo \"The following are some of the valid targets for this Makefile:\"\n\t@echo \"... all (the default if no target is provided)\"\n\t@echo \"... clean\"\n\t@echo \"... depend\"\n\t@echo \"... install/strip\"\n\t@echo \"... install\"\n\t@echo \"... rebuild_cache\"\n\t@echo \"... open_simulation_interface_obj\"\n\t@echo \"... list_install_components\"\n\t@echo \"... open_simulation_interface_static\"\n\t@echo \"... edit_cache\"\n\t@echo \"... open_simulation_interface_pic\"\n\t@echo \"... open_simulation_interface\"\n\t@echo \"... install/local\"\n\t@echo \"... test\"\n\t@echo \"... osi_common.pb.o\"\n\t@echo \"... osi_common.pb.i\"\n\t@echo \"... osi_common.pb.s\"\n\t@echo \"... osi_datarecording.pb.o\"\n\t@echo \"... osi_datarecording.pb.i\"\n\t@echo \"... osi_datarecording.pb.s\"\n\t@echo \"... osi_detectedlane.pb.o\"\n\t@echo \"... osi_detectedlane.pb.i\"\n\t@echo \"... osi_detectedlane.pb.s\"\n\t@echo \"... osi_detectedobject.pb.o\"\n\t@echo \"... osi_detectedobject.pb.i\"\n\t@echo \"... osi_detectedobject.pb.s\"\n\t@echo \"... osi_detectedoccupant.pb.o\"\n\t@echo \"... osi_detectedoccupant.pb.i\"\n\t@echo \"... osi_detectedoccupant.pb.s\"\n\t@echo \"... osi_detectedroadmarking.pb.o\"\n\t@echo \"... osi_detectedroadmarking.pb.i\"\n\t@echo \"... osi_detectedroadmarking.pb.s\"\n\t@echo \"... osi_detectedtrafficlight.pb.o\"\n\t@echo \"... osi_detectedtrafficlight.pb.i\"\n\t@echo \"... osi_detectedtrafficlight.pb.s\"\n\t@echo \"... osi_detectedtrafficsign.pb.o\"\n\t@echo \"... osi_detectedtrafficsign.pb.i\"\n\t@echo \"... osi_detectedtrafficsign.pb.s\"\n\t@echo \"... osi_environment.pb.o\"\n\t@echo \"... osi_environment.pb.i\"\n\t@echo \"... osi_environment.pb.s\"\n\t@echo \"... osi_featuredata.pb.o\"\n\t@echo \"... osi_featuredata.pb.i\"\n\t@echo \"... osi_featuredata.pb.s\"\n\t@echo \"... osi_groundtruth.pb.o\"\n\t@echo \"... osi_groundtruth.pb.i\"\n\t@echo \"... osi_groundtruth.pb.s\"\n\t@echo \"... osi_hostvehicledata.pb.o\"\n\t@echo \"... osi_hostvehicledata.pb.i\"\n\t@echo \"... osi_hostvehicledata.pb.s\"\n\t@echo \"... osi_lane.pb.o\"\n\t@echo \"... osi_lane.pb.i\"\n\t@echo \"... osi_lane.pb.s\"\n\t@echo \"... osi_object.pb.o\"\n\t@echo \"... osi_object.pb.i\"\n\t@echo \"... osi_object.pb.s\"\n\t@echo \"... osi_occupant.pb.o\"\n\t@echo \"... osi_occupant.pb.i\"\n\t@echo \"... osi_occupant.pb.s\"\n\t@echo \"... osi_roadmarking.pb.o\"\n\t@echo \"... osi_roadmarking.pb.i\"\n\t@echo \"... osi_roadmarking.pb.s\"\n\t@echo \"... osi_sensordata.pb.o\"\n\t@echo \"... osi_sensordata.pb.i\"\n\t@echo \"... osi_sensordata.pb.s\"\n\t@echo \"... osi_sensorspecific.pb.o\"\n\t@echo \"... osi_sensorspecific.pb.i\"\n\t@echo \"... osi_sensorspecific.pb.s\"\n\t@echo \"... osi_sensorview.pb.o\"\n\t@echo \"... osi_sensorview.pb.i\"\n\t@echo \"... osi_sensorview.pb.s\"\n\t@echo \"... osi_sensorviewconfiguration.pb.o\"\n\t@echo \"... osi_sensorviewconfiguration.pb.i\"\n\t@echo \"... osi_sensorviewconfiguration.pb.s\"\n\t@echo \"... osi_trafficlight.pb.o\"\n\t@echo \"... osi_trafficlight.pb.i\"\n\t@echo \"... osi_trafficlight.pb.s\"\n\t@echo \"... osi_trafficsign.pb.o\"\n\t@echo \"... osi_trafficsign.pb.i\"\n\t@echo \"... osi_trafficsign.pb.s\"\n\t@echo \"... osi_version.pb.o\"\n\t@echo \"... osi_version.pb.i\"\n\t@echo \"... osi_version.pb.s\"\n.PHONY : help\n\n\n\n#=============================================================================\n# Special targets to cleanup operation of make.\n\n# Special rule to run CMake to check the build system integrity.\n# No rule that depends on this can have commands that come from listfiles\n# because they might be regenerated.\ncmake_check_build_system:\n\tcd /home/student/Desktop/Redge_Thesis/vil/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0\n.PHONY : cmake_check_build_system\n\n"
},
{
"alpha_fraction": 0.7925000190734863,
"alphanum_fraction": 0.8037499785423279,
"avg_line_length": 60.53845977783203,
"blob_id": "825e5c7ea46434e13a14b55fff0ab59c9299156d",
"content_id": "1060ea485a3d7c2b32767eda6364800da299e677",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 800,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 13,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_generate_messages_lisp.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_generate_messages_lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/osi3_bridge/msg/GroundTruthMovingObjects.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/osi3_bridge/msg/MovingObject.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/osi3_bridge/msg/Dimension3d.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/osi3_bridge/msg/TrafficUpdateMovingObject.lisp\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/osi3_bridge/msg/Orientation3d.lisp\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/osi3_bridge_generate_messages_lisp.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6198433637619019,
"alphanum_fraction": 0.6313315629959106,
"avg_line_length": 43.034481048583984,
"blob_id": "5a9317ec492b17cf7b7f5d4649c84741b3a93887",
"content_id": "919a8960e582880633e9a10ec3b3f103e283d0d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3830,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 87,
"path": "/src/sensor_model/scripts/ClassSensor.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import math\nimport numpy\n\n\nclass Sensor:\n \"\"\"\n This class is used to generate the transformation matrix for the transformation from sensor coordinated to EGO coordinates.\n The source for the sensor position and orientation is configured in config file: '/SensorFusion/ConfigFiles/sensorconfig.ini'.\n\n The config file consists of sensor information which is read and transformation matrices are created in the class.\n\n Attributes:\n\n Mathods:\n '__init__()': class constructor\n 'set_sensor_properties()': using sensor properties creates transformation matricies for all the sensors\n \"\"\"\n\n\n def __init__(self, sensor_id, sensor_x_distance, sensor_y_distance, sensor_rotation, trust_existance, trust_car,\n trust_truck, trust_motorcycle, trust_bicycle, trust_pedestrian, trust_stationary, trust_other):\n \"\"\"\n This is a class constructor where it contains all the instance attributes.\n The objects are the different sensors whose parameters are read from the config file: '/SensorFusion/ConfigFiles/sensorconfig.ini'\n\n :param sensor_id:\n :param sensor_x_distance:\n :param sensor_y_distance:\n :param sensor_rotation:\n :param trust_existance:\n :param trust_car:\n :param trust_truck:\n :param trust_motorcycle:\n :param trust_bicycle:\n :param trust_pedestrian:\n :param trust_stationary:\n :param trust_other:\n \"\"\"\n\n self.sensor_id = sensor_id\n self.sensor_x_distance = sensor_x_distance\n self.sensor_y_distance = sensor_y_distance\n self.sensor_rotation = sensor_rotation\n self.trust_existance = trust_existance\n self.trust_car = trust_car\n self.trust_truck = trust_truck\n self.trust_motorcycle = trust_motorcycle\n self.trust_bicycle = trust_bicycle\n self.trust_pedestrian = trust_pedestrian\n self.trust_stationary = trust_stationary\n self.trust_other = trust_other\n self.transformation_matrix = None\n\n\n def set_sensor_properties(self):\n \"\"\"\n This function is used to create the transformation matrix for each sensor using the distance and rotation values.\n The distances and rotation are specified to instance attributes in class constructor\n The transformation matrices are appended to the class attribute 'transformation_matrices'.\n\n :return:\n \"\"\"\n\n # Converting the string values to float to use with math library\n theta = float(self.sensor_rotation)\n delta_x = float(self.sensor_x_distance)\n delta_y = float(self.sensor_y_distance)\n\n # Calculating the mathematical values which are used to create the transformation matrix\n cosine_theta = math.cos(theta * math.pi / 180)\n sine_theta = math.sin(theta * math.pi / 180)\n minus_sine_theta = -(math.sin(theta * math.pi / 180))\n\n # The transformation matrix in the form of nested list\n transformation_matrix_list = [[cosine_theta, minus_sine_theta, 0, 0, 0, 0, delta_x],\n [sine_theta, cosine_theta, 0, 0, 0, 0, delta_y],\n [0, 0, cosine_theta, minus_sine_theta, 0, 0, 0],\n [0, 0, sine_theta, cosine_theta, 0, 0, 0],\n [0, 0, 0, 0, cosine_theta, minus_sine_theta, 0],\n [0, 0, 0, 0, sine_theta, cosine_theta, 0],\n [0, 0, 0, 0, 0, 0, 1]]\n\n # Transformation matrix list to numpy array (python matrix)\n transformation_matrix = numpy.array(transformation_matrix_list)\n\n # Set transformation matrix to the object instance variable\n self.transformation_matrix = transformation_matrix"
},
{
"alpha_fraction": 0.7431192398071289,
"alphanum_fraction": 0.7431192398071289,
"avg_line_length": 26.25,
"blob_id": "3fda76941ffefb5adc2f2b20ac8642c19f249d29",
"content_id": "5b71d451fa740d368c44ad29dfe37a41751058a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 218,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 8,
"path": "/devel/lib/python2.7/dist-packages/object_list/msg/__init__.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "from ._Classification import *\nfrom ._Dimension import *\nfrom ._EgoData import *\nfrom ._Features import *\nfrom ._Geometric import *\nfrom ._ObjectList import *\nfrom ._ObjectsList import *\nfrom ._SensorProperty import *\n"
},
{
"alpha_fraction": 0.8039215803146362,
"alphanum_fraction": 0.8039215803146362,
"avg_line_length": 75.5,
"blob_id": "5f2e6f77d9e236beff744ec8954b260d672df2e7",
"content_id": "8420533982e91f273e768eda4b09bc18511a1918",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 153,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 2,
"path": "/build/vehicle_control/catkin_generated/vehicle_control-msg-extras.cmake.develspace.in",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(vehicle_control_MESSAGE_FILES \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\")\nset(vehicle_control_SERVICE_FILES \"\")\n"
},
{
"alpha_fraction": 0.7545126080513,
"alphanum_fraction": 0.7593261003494263,
"avg_line_length": 51,
"blob_id": "048b6859a8f67e9e1d705bdf6066372ef65ac85c",
"content_id": "fbefcd06dedec207dff027ab046510a04b758bef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 831,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 16,
"path": "/build/sensor_model/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"sensor_model\")\nset(sensor_model_VERSION \"0.0.0\")\nset(sensor_model_MAINTAINER \"drechsler <[email protected]>\")\nset(sensor_model_PACKAGE_FORMAT \"2\")\nset(sensor_model_BUILD_DEPENDS \"message_generation\" \"geometry_msgs\" \"object_list\" \"roscpp\" \"rospy\")\nset(sensor_model_BUILD_EXPORT_DEPENDS \"geometry_msgs\" \"object_list\" \"roscpp\" \"rospy\")\nset(sensor_model_BUILDTOOL_DEPENDS \"catkin\")\nset(sensor_model_BUILDTOOL_EXPORT_DEPENDS )\nset(sensor_model_EXEC_DEPENDS \"geometry_msgs\" \"object_list\" \"roscpp\" \"rospy\" \"message_runtime\")\nset(sensor_model_RUN_DEPENDS \"geometry_msgs\" \"object_list\" \"roscpp\" \"rospy\" \"message_runtime\")\nset(sensor_model_TEST_DEPENDS )\nset(sensor_model_DOC_DEPENDS )\nset(sensor_model_URL_WEBSITE \"\")\nset(sensor_model_URL_BUGTRACKER \"\")\nset(sensor_model_URL_REPOSITORY \"\")\nset(sensor_model_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.4477798640727997,
"alphanum_fraction": 0.47779861092567444,
"avg_line_length": 21.22222137451172,
"blob_id": "f84bfbcb8e8fa57e37be3cfdff42b015c7648756",
"content_id": "b4d0860a1ebf1d440c90c8387cd1df2980635715",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1599,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 72,
"path": "/src/sensor_model/scripts/ClassSens.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import math\nimport rospy\nimport math\n\nclass Fov:\n def __init__(self):\n self.angle = 2 * math.pi # rad\n self.r = 100 # m\n\nclass Pos:\n def __init__(self):\n self.y = 0 # m\n self.x = 0 # m\n\nclass Rot:\n def __init__(self):\n self.yaw = 0 # m\n\nclass Sens:\n def __init__(self):\n self.fov = Fov()\n self.fov.angle= rospy.get_param(\"angle\") * math.pi/180\n self.fov.r = rospy.get_param(\"senrange\")\n #self.fov.angle= rospy.get_param(\"angle\")\n #self.fov.r = rospy.get_param(\"senrange\")\n self.pos = Pos()\n self.pos.x = rospy.get_param(\"posx\")\n self.pos.y = rospy.get_param(\"posy\")\n\n self.rot = Rot()\n self.rot.yaw = rospy.get_param(\"yaw\")* math.pi/180\n\nclass Vel:\n def __init__(self):\n self.y = 0 # m/s\n self.x = 0 # m/s\n\nclass Acc:\n def __init__(self):\n self.y = 0 # m/s^2\n self.x = 0 # m/s^2\n\nclass Ego:\n def __init__(self):\n self.pos = Pos()\n self.pos.x = 0\n self.pos.y = 0\n self.vel = Vel()\n self.vel.x = 0\n self.vel.y = 0\n self.acc = Acc()\n self.acc.x = 0\n self.acc.y = 0\n self.neworientation = 0\n self.oldorientation = 0\n self.oldyaw = 0\n self.newyaw = 0\n self.yawrate = 0\n self.testyaw = 0\n self.testyawrate = 0\n\nclass Features:\n def __init__(self):\n\n self.FL = 0.0\n self.FM = 0.0\n self.FR = 0.0\n self.MR = 0.0\n self.RR = 0.0\n self.RM = 0.0\n self.RL = 0.0\n self.ML = 0.0"
},
{
"alpha_fraction": 0.7807680368423462,
"alphanum_fraction": 0.781078577041626,
"avg_line_length": 37.64400100708008,
"blob_id": "cfcceb5abc07b18c527955b779c95c819d2c7280",
"content_id": "278d9982f34a1a8e929e2aec00a251d43a0130e5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 9661,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 250,
"path": "/build/vehicle_control/cmake/vehicle_control-genmsg.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-genmsg.cmake.em\n\nmessage(STATUS \"vehicle_control: 1 messages, 0 services\")\n\nset(MSG_I_FLAGS \"-Ivehicle_control:/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg;-Istd_msgs:/opt/ros/melodic/share/std_msgs/cmake/../msg\")\n\n# Find all generators\nfind_package(gencpp REQUIRED)\nfind_package(geneus REQUIRED)\nfind_package(genlisp REQUIRED)\nfind_package(gennodejs REQUIRED)\nfind_package(genpy REQUIRED)\n\nadd_custom_target(vehicle_control_generate_messages ALL)\n\n# verify that message/service dependencies have not changed since configure\n\n\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_custom_target(_vehicle_control_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"vehicle_control\" \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" \"std_msgs/Header\"\n)\n\n#\n# langs = gencpp;geneus;genlisp;gennodejs;genpy\n#\n\n### Section generating for lang: gencpp\n### Generating Messages\n_generate_msg_cpp(vehicle_control\n \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/vehicle_control\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_cpp(vehicle_control\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/vehicle_control\n \"${ALL_GEN_OUTPUT_FILES_cpp}\"\n)\n\nadd_custom_target(vehicle_control_generate_messages_cpp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_cpp}\n)\nadd_dependencies(vehicle_control_generate_messages vehicle_control_generate_messages_cpp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_dependencies(vehicle_control_generate_messages_cpp _vehicle_control_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(vehicle_control_gencpp)\nadd_dependencies(vehicle_control_gencpp vehicle_control_generate_messages_cpp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS vehicle_control_generate_messages_cpp)\n\n### Section generating for lang: geneus\n### Generating Messages\n_generate_msg_eus(vehicle_control\n \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/vehicle_control\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_eus(vehicle_control\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/vehicle_control\n \"${ALL_GEN_OUTPUT_FILES_eus}\"\n)\n\nadd_custom_target(vehicle_control_generate_messages_eus\n DEPENDS ${ALL_GEN_OUTPUT_FILES_eus}\n)\nadd_dependencies(vehicle_control_generate_messages vehicle_control_generate_messages_eus)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_dependencies(vehicle_control_generate_messages_eus _vehicle_control_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(vehicle_control_geneus)\nadd_dependencies(vehicle_control_geneus vehicle_control_generate_messages_eus)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS vehicle_control_generate_messages_eus)\n\n### Section generating for lang: genlisp\n### Generating Messages\n_generate_msg_lisp(vehicle_control\n \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/vehicle_control\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_lisp(vehicle_control\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/vehicle_control\n \"${ALL_GEN_OUTPUT_FILES_lisp}\"\n)\n\nadd_custom_target(vehicle_control_generate_messages_lisp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_lisp}\n)\nadd_dependencies(vehicle_control_generate_messages vehicle_control_generate_messages_lisp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_dependencies(vehicle_control_generate_messages_lisp _vehicle_control_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(vehicle_control_genlisp)\nadd_dependencies(vehicle_control_genlisp vehicle_control_generate_messages_lisp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS vehicle_control_generate_messages_lisp)\n\n### Section generating for lang: gennodejs\n### Generating Messages\n_generate_msg_nodejs(vehicle_control\n \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/vehicle_control\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_nodejs(vehicle_control\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/vehicle_control\n \"${ALL_GEN_OUTPUT_FILES_nodejs}\"\n)\n\nadd_custom_target(vehicle_control_generate_messages_nodejs\n DEPENDS ${ALL_GEN_OUTPUT_FILES_nodejs}\n)\nadd_dependencies(vehicle_control_generate_messages vehicle_control_generate_messages_nodejs)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_dependencies(vehicle_control_generate_messages_nodejs _vehicle_control_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(vehicle_control_gennodejs)\nadd_dependencies(vehicle_control_gennodejs vehicle_control_generate_messages_nodejs)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS vehicle_control_generate_messages_nodejs)\n\n### Section generating for lang: genpy\n### Generating Messages\n_generate_msg_py(vehicle_control\n \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\"\n \"${MSG_I_FLAGS}\"\n \"/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/vehicle_control\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_py(vehicle_control\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/vehicle_control\n \"${ALL_GEN_OUTPUT_FILES_py}\"\n)\n\nadd_custom_target(vehicle_control_generate_messages_py\n DEPENDS ${ALL_GEN_OUTPUT_FILES_py}\n)\nadd_dependencies(vehicle_control_generate_messages vehicle_control_generate_messages_py)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/vehicle_control/msg/Trajectory.msg\" NAME_WE)\nadd_dependencies(vehicle_control_generate_messages_py _vehicle_control_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(vehicle_control_genpy)\nadd_dependencies(vehicle_control_genpy vehicle_control_generate_messages_py)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS vehicle_control_generate_messages_py)\n\n\n\nif(gencpp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/vehicle_control)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/vehicle_control\n DESTINATION ${gencpp_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_cpp)\n add_dependencies(vehicle_control_generate_messages_cpp std_msgs_generate_messages_cpp)\nendif()\n\nif(geneus_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/vehicle_control)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/vehicle_control\n DESTINATION ${geneus_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_eus)\n add_dependencies(vehicle_control_generate_messages_eus std_msgs_generate_messages_eus)\nendif()\n\nif(genlisp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/vehicle_control)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/vehicle_control\n DESTINATION ${genlisp_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_lisp)\n add_dependencies(vehicle_control_generate_messages_lisp std_msgs_generate_messages_lisp)\nendif()\n\nif(gennodejs_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/vehicle_control)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/vehicle_control\n DESTINATION ${gennodejs_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_nodejs)\n add_dependencies(vehicle_control_generate_messages_nodejs std_msgs_generate_messages_nodejs)\nendif()\n\nif(genpy_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/vehicle_control)\n install(CODE \"execute_process(COMMAND \\\"/usr/bin/python2\\\" -m compileall \\\"${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/vehicle_control\\\")\")\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/vehicle_control\n DESTINATION ${genpy_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_py)\n add_dependencies(vehicle_control_generate_messages_py std_msgs_generate_messages_py)\nendif()\n"
},
{
"alpha_fraction": 0.7551912665367126,
"alphanum_fraction": 0.7639344334602356,
"avg_line_length": 56.25,
"blob_id": "e1d33508d9aa183dc5db8a87208567d4df020ab0",
"content_id": "52ec6df271ea823d0c0691cf2a96b4ffb0f9240a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 915,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 16,
"path": "/build/vehicle_control/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"vehicle_control\")\nset(vehicle_control_VERSION \"0.0.0\")\nset(vehicle_control_MAINTAINER \"drechsler <[email protected]>\")\nset(vehicle_control_PACKAGE_FORMAT \"2\")\nset(vehicle_control_BUILD_DEPENDS \"message_generation\" \"object_list\" \"osi3_bridge\" \"rospy\" \"roscpp\" \"std_msgs\")\nset(vehicle_control_BUILD_EXPORT_DEPENDS \"object_list\" \"osi3_bridge\" \"rospy\" \"roscpp\" \"std_msgs\")\nset(vehicle_control_BUILDTOOL_DEPENDS \"catkin\")\nset(vehicle_control_BUILDTOOL_EXPORT_DEPENDS )\nset(vehicle_control_EXEC_DEPENDS \"object_list\" \"osi3_bridge\" \"rospy\" \"std_msgs\" \"roscpp\" \"message_runtime\")\nset(vehicle_control_RUN_DEPENDS \"object_list\" \"osi3_bridge\" \"rospy\" \"std_msgs\" \"roscpp\" \"message_runtime\")\nset(vehicle_control_TEST_DEPENDS )\nset(vehicle_control_DOC_DEPENDS )\nset(vehicle_control_URL_WEBSITE \"\")\nset(vehicle_control_URL_BUGTRACKER \"\")\nset(vehicle_control_URL_REPOSITORY \"\")\nset(vehicle_control_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.736121654510498,
"alphanum_fraction": 0.7452471256256104,
"avg_line_length": 19.53125,
"blob_id": "b39a806f33b4d4884ce557cfffe4496bff8d1098",
"content_id": "52593d0dde95d0b61b0b58dba43c09c02d7418e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 1315,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 64,
"path": "/src/osi3_bridge/CMakeLists.txt",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "cmake_minimum_required(VERSION 2.8.3)\nproject(osi3_bridge)\n\nset(CMAKE_CXX_STANDARD 11)\nset(CMAKE_CXX_STANDARD_REQUIRED ON)\n\nfind_package(Protobuf 2.6.1 REQUIRED)\n\nadd_subdirectory(open-simulation-interface)\ninclude_directories(open-simulation-interface)\n\nset(NETWORK_C_FILES\n src/udp.c\n src/osi_protocol_header.c\n)\n\nfind_package(catkin REQUIRED COMPONENTS\n geometry_msgs\n roscpp\n std_msgs\n message_generation\n roslaunch\n)\n\nadd_message_files(\n FILES\n Dimension3d.msg\n Orientation3d.msg\n MovingObject.msg\n GroundTruthMovingObjects.msg\n TrafficUpdateMovingObject.msg\n)\n\ngenerate_messages(\n DEPENDENCIES\n geometry_msgs\n std_msgs\n)\n\ncatkin_package(\n# INCLUDE_DIRS include\n CATKIN_DEPENDS message_runtime\n# CATKIN_DEPENDS geometry_msgs roscpp std_msgs\n# DEPENDS system_lib\n)\n\ninclude_directories(include ${catkin_INCLUDE_DIRS})\n\nadd_executable(${PROJECT_NAME}_publisher src/osi3_publisher.cpp \n ${NETWORK_C_FILES}) \n\nadd_dependencies(${PROJECT_NAME}_publisher\n ${${PROJECT_NAME}_EXPORTED_TARGETS}\n ${catkin_EXPORTED_TARGETS}\n)\n\n## Specify libraries to link a library or executable target against\ntarget_link_libraries(${PROJECT_NAME}_publisher\n open_simulation_interface\n ${catkin_LIBRARIES}\n ${Protobuf_LIBRARIES}\n)\n\nroslaunch_add_file_check(launch)\n\n"
},
{
"alpha_fraction": 0.6675786375999451,
"alphanum_fraction": 0.6744186282157898,
"avg_line_length": 35.5,
"blob_id": "ff02e9848f66c2d9fe799b4ebca7cd0b2ededa3b",
"content_id": "006e44443ea27b71fa635844d252d66155475d6e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 731,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 20,
"path": "/build/osi3_bridge/open-simulation-interface/open_simulation_interface-config-version.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "\nset(PACKAGE_VERSION_MAJOR \"3\")\n\nset(PACKAGE_VERSION \"3.0.1\")\nif(\"${PACKAGE_FIND_VERSION}\" VERSION_EQUAL \"0\")\n message(FATAL_ERROR \"Please select at least the major version you want to use!\")\nelse()\n # Check whether the requested PACKAGE_FIND_VERSION is compatible\n if(\"${PACKAGE_VERSION_MAJOR}\" VERSION_EQUAL \"${PACKAGE_FIND_VERSION_MAJOR}\")\n if(\"${PACKAGE_VERSION}\" VERSION_LESS \"${PACKAGE_FIND_VERSION}\")\n set(PACKAGE_VERSION_COMPATIBLE FALSE)\n else()\n set(PACKAGE_VERSION_COMPATIBLE TRUE)\n if (\"${PACKAGE_VERSION}\" VERSION_EQUAL \"${PACKAGE_FIND_VERSION}\")\n set(PACKAGE_VERSION_EXACT TRUE)\n endif()\n endif()\n else()\n set(PACKAGE_VERSION_COMPATIBLE FALSE)\n endif()\nendif()\n"
},
{
"alpha_fraction": 0.6213337182998657,
"alphanum_fraction": 0.6284838914871216,
"avg_line_length": 47.609928131103516,
"blob_id": "282e930ac4a66b6b026f2f642fba431886aaa3ec",
"content_id": "ad18353db4eb8fbdabb1755c07b654c27c692196",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6853,
"license_type": "no_license",
"max_line_length": 206,
"num_lines": 141,
"path": "/src/aeb/launch/ClassBoss2.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport rospy\nimport math\nimport message_filters\nfrom scipy.spatial import distance\nfrom scipy.stats import chi2\nfrom scipy.linalg import sqrtm\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\nimport sys\n# import function\nfrom rotate import rotate\n\n\n\nclass boss:\n def __init__(self):\n self.egoveh = Ego() #consists of updated ego parameters\n\n self.globaltrack = ObjectsList() #Objects list of fused sensor data/ Global track\n\n self.globaltrack_predicted= ObjectsList() #global track predicted to current time\n\n self.sensorslist = [] # list of sensor objectlists\n self.sensorlist_previous = [] # list of sensor objectlists from previous update\n self.sensorslist_predicted = [] # list of sensor objectlists predicted to current time\n\n\n self.CostMatrixA = None\n self.CostMatrixB = None\n self.CostMatrix = None\n self.AssociationMatrix = None\n self.ThresholdMatrix = None\n self.AssignmentMatrix = None\n self.AssignmentList = []\n self.tolerance = 1 #tolerance for auction algorithm\n self.breakr = 0\n\n\n\n def associate(self):\n '''method that performs association of objects from sensor list to objects from global track '''\n ''' Output is an Association matrix (vector) that indicates which object of the sensor object list needs to be fused with which object from the global object list'''\n #print('assciate runs')\n self.AssignmentList = []\n global fused_track\n\n if len(self.globaltrack.obj_list) == 0:\n self.globaltrack = self.sensorslist[0]\n a=0\n for i, obj in enumerate(self.globaltrack.obj_list):\n\n obj.obj_id = int(a)\n obj.sensors_fused= [self.sensorslist[0].sensor_property.sensor_id]\n a+=1\n\n for m,sensor in enumerate(self.sensorslist):\n\n self.AssociationMatrix = np.zeros((len(sensor.obj_list), len(\n self.globaltrack.obj_list))) # intialize Association matrix (M*N) M - objs in sensor track, N - objs in global track\n self.CostMatrixA = np.zeros((len(sensor.obj_list), len(\n self.globaltrack.obj_list))) # intialize Cost matrixA (M*N) M - objs in sensor track, N - objs in global track\n self.ThresholdMatrix = np.zeros((len(sensor.obj_list), len(\n self.globaltrack.obj_list))) # intialize Threshold matrixA (M*N) M - objs in sensor track, N - objs in global track\n threshold = chi2.ppf(0.95, 6) # select threshold from chi distribution usinf 2 degrees of freedom\n\n self.CostMatrixB = np.zeros((len(sensor.obj_list), len(sensor.obj_list))) # intialize Cost matrixB (M*M) M - objs in sensor track\n np.fill_diagonal(self.CostMatrixB, threshold)\n\n for c,globalobj in enumerate(self.globaltrack.obj_list):\n for i, sensorobj in enumerate(sensor.obj_list):\n\n #[scenario,globalxf,globalyf,sensorxf,sensoryf,geometric] = feature_select(globalobj, sensorobj)\n #print('GLOBAL ID',globalobj.obj_id)\n # print('SENSOR ID',sensorobj.obj_id)\n #maha_distance,threshold = StateAssociation(scenario,globalxf,globalyf,sensorxf,sensoryf,globalobj,sensorobj,sigma2omegax,sigma2omegay,geometric)\n #global_association_state = np.array([[globalobj.geometric.x],[globalobj.geometric.y]])\n global_association_state = np.array([[globalobj.geometric.x],[globalobj.geometric.vx],[globalobj.geometric.ax],[globalobj.geometric.y],[globalobj.geometric.vy],[globalobj.geometric.ay]])\n\n #sensor_association_state = np.array([[sensorobj.geometric.x],[sensorobj.geometric.y]])\n sensor_association_state = np.array([[sensorobj.geometric.x],[sensorobj.geometric.vx],[sensorobj.geometric.ax],[sensorobj.geometric.y],[sensorobj.geometric.vy],[sensorobj.geometric.ay]])\n #globalobjcovariance = globalobj.covariance.flatten()\n #sensorobj.covariance = sensorobj.covariance.flatten()\n #print(globalobj.covariance)\n #print(sensorobj.covariance)\n #global_covariance = np.array ([[globalobj.covariance[0],globalobj.covariance[3]],[globalobj.covariance[18],globalobj.covariance[21]]])\n #sensor_covariance = np.array([[sensorobj.covariance[0], sensorobj.covariance[3]],\n # [sensorobj.covariance[18], sensorobj.covariance[21]]])\n global_covariance = np.reshape(globalobj.covariance,(6,6))\n sensor_covariance = np.reshape(sensorobj.covariance,(6,6))\n\n maha_distance,threshold = get_statistical_distance(sensor_association_state , global_association_state , sensor_covariance , global_covariance )\n #maha_distance = distance.euclidean((globalobj.geometric.x,globalobj.geometric.y),(sensorobj.geometric.x,sensorobj.geometric.y))\n\n\n # Maha distance - mahalanobis distance\n if maha_distance > threshold:\n #print('maha',maha_distance,'thresh',threshold)\n maha_distance = 9999\n\n self.AssociationMatrix[i,c] = maha_distance\n self.ThresholdMatrix[i,c] = threshold\n self.CostMatrixB[i,i] = threshold\n print('associate matrix')\n #print(np.shape(self.AssociationMatrix))\n print(self.AssociationMatrix)\n sensorobjs,globalobjs = np.shape(self.AssociationMatrix)\n for i in range(sensorobjs):\n for j in range(globalobjs):\n if self.AssociationMatrix[i,j] == 9999:\n self.CostMatrixA[i, j] = 0\n else:\n self.CostMatrixA[i,j] = 2*self.ThresholdMatrix[i,j] - self.AssociationMatrix[i,j]\n\n self.CostMatrix = np.concatenate((self.CostMatrixA, self.CostMatrixB), axis=1)\n self.AssignmentMatrix = boss.auction_algorithm(self)[0]\n if self.breakr == 1:\n self.breakr = 0\n continue\n print('ASSISGNE',self.AssignmentMatrix)\n\n self.globaltrack_predicted = temp_alignment(self.globaltrack, self.egoveh)\n\n\n\n\n\n\ndef evaluate_time(globaltrack,sensor):\n\n time_stamp = rospy.Time.now()\n time_elapsed = float(time_stamp.to_sec())\n\n\n for i,obj in enumerate(globaltrack.obj_list):\n ids = [j.obj_id for j in sensor.obj_list]\n time = time_elapsed - float(obj.time)\n\n if time > 0.2:\n #print('this happened')\n globaltrack.obj_list.remove(obj)"
},
{
"alpha_fraction": 0.6526711583137512,
"alphanum_fraction": 0.670151948928833,
"avg_line_length": 22.098114013671875,
"blob_id": "be3f8d07766de2c19f68908ce77fc4277d17920d",
"content_id": "c87d348487f32756e6d579e1ea03760769c56c2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6121,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 265,
"path": "/devel/include/object_list/Geometric.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/Geometric.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_GEOMETRIC_H\n#define OBJECT_LIST_MESSAGE_GEOMETRIC_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct Geometric_\n{\n typedef Geometric_<ContainerAllocator> Type;\n\n Geometric_()\n : x(0.0)\n , y(0.0)\n , vx(0.0)\n , vy(0.0)\n , ax(0.0)\n , ay(0.0)\n , yaw(0.0)\n , yawrate(0.0) {\n }\n Geometric_(const ContainerAllocator& _alloc)\n : x(0.0)\n , y(0.0)\n , vx(0.0)\n , vy(0.0)\n , ax(0.0)\n , ay(0.0)\n , yaw(0.0)\n , yawrate(0.0) {\n (void)_alloc;\n }\n\n\n\n typedef double _x_type;\n _x_type x;\n\n typedef double _y_type;\n _y_type y;\n\n typedef double _vx_type;\n _vx_type vx;\n\n typedef double _vy_type;\n _vy_type vy;\n\n typedef double _ax_type;\n _ax_type ax;\n\n typedef double _ay_type;\n _ay_type ay;\n\n typedef double _yaw_type;\n _yaw_type yaw;\n\n typedef double _yawrate_type;\n _yawrate_type yawrate;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::Geometric_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::Geometric_<ContainerAllocator> const> ConstPtr;\n\n}; // struct Geometric_\n\ntypedef ::object_list::Geometric_<std::allocator<void> > Geometric;\n\ntypedef boost::shared_ptr< ::object_list::Geometric > GeometricPtr;\ntypedef boost::shared_ptr< ::object_list::Geometric const> GeometricConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::Geometric_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::Geometric_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::Geometric_<ContainerAllocator1> & lhs, const ::object_list::Geometric_<ContainerAllocator2> & rhs)\n{\n return lhs.x == rhs.x &&\n lhs.y == rhs.y &&\n lhs.vx == rhs.vx &&\n lhs.vy == rhs.vy &&\n lhs.ax == rhs.ax &&\n lhs.ay == rhs.ay &&\n lhs.yaw == rhs.yaw &&\n lhs.yawrate == rhs.yawrate;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::Geometric_<ContainerAllocator1> & lhs, const ::object_list::Geometric_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Geometric_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Geometric_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Geometric_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Geometric_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Geometric_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Geometric_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::Geometric_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"74a252effe5544c6405c61fc1ab21633\";\n }\n\n static const char* value(const ::object_list::Geometric_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0x74a252effe5544c6ULL;\n static const uint64_t static_value2 = 0x405c61fc1ab21633ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::Geometric_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/Geometric\";\n }\n\n static const char* value(const ::object_list::Geometric_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::Geometric_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"float64 x\\n\"\n\"float64 y\\n\"\n\"float64 vx\\n\"\n\"float64 vy\\n\"\n\"float64 ax\\n\"\n\"float64 ay\\n\"\n\"float64 yaw\\n\"\n\"float64 yawrate\\n\"\n;\n }\n\n static const char* value(const ::object_list::Geometric_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::Geometric_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.x);\n stream.next(m.y);\n stream.next(m.vx);\n stream.next(m.vy);\n stream.next(m.ax);\n stream.next(m.ay);\n stream.next(m.yaw);\n stream.next(m.yawrate);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct Geometric_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::Geometric_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::Geometric_<ContainerAllocator>& v)\n {\n s << indent << \"x: \";\n Printer<double>::stream(s, indent + \" \", v.x);\n s << indent << \"y: \";\n Printer<double>::stream(s, indent + \" \", v.y);\n s << indent << \"vx: \";\n Printer<double>::stream(s, indent + \" \", v.vx);\n s << indent << \"vy: \";\n Printer<double>::stream(s, indent + \" \", v.vy);\n s << indent << \"ax: \";\n Printer<double>::stream(s, indent + \" \", v.ax);\n s << indent << \"ay: \";\n Printer<double>::stream(s, indent + \" \", v.ay);\n s << indent << \"yaw: \";\n Printer<double>::stream(s, indent + \" \", v.yaw);\n s << indent << \"yawrate: \";\n Printer<double>::stream(s, indent + \" \", v.yawrate);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_GEOMETRIC_H\n"
},
{
"alpha_fraction": 0.7688171863555908,
"alphanum_fraction": 0.7849462628364563,
"avg_line_length": 45.5,
"blob_id": "a003c03eda4b02911e5febad882d50964825bfc5",
"content_id": "758e2f360e335790cc70ce326866e57a8a882966",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 558,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 12,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/osi3_bridge_publisher.dir/src/osi3_publisher.cpp.o\"\n \"CMakeFiles/osi3_bridge_publisher.dir/src/udp.c.o\"\n \"CMakeFiles/osi3_bridge_publisher.dir/src/osi_protocol_header.c.o\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/osi3_bridge/osi3_bridge_publisher.pdb\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/osi3_bridge/osi3_bridge_publisher\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang C CXX)\n include(CMakeFiles/osi3_bridge_publisher.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.7673733830451965,
"alphanum_fraction": 0.7673733830451965,
"avg_line_length": 42.53845977783203,
"blob_id": "b65b5a23d7d6449d02ac96fa447753596c40a859",
"content_id": "81029ac96e589d5bcb09d6da5289988236b9383d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 3396,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 78,
"path": "/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface_static.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"osi_version.pb.cc\"\n \"osi_version.pb.h\"\n \"osi_common.pb.cc\"\n \"osi_common.pb.h\"\n \"osi_datarecording.pb.cc\"\n \"osi_datarecording.pb.h\"\n \"osi_detectedtrafficsign.pb.cc\"\n \"osi_detectedtrafficsign.pb.h\"\n \"osi_detectedtrafficlight.pb.cc\"\n \"osi_detectedtrafficlight.pb.h\"\n \"osi_detectedroadmarking.pb.cc\"\n \"osi_detectedroadmarking.pb.h\"\n \"osi_detectedlane.pb.cc\"\n \"osi_detectedlane.pb.h\"\n \"osi_detectedobject.pb.cc\"\n \"osi_detectedobject.pb.h\"\n \"osi_detectedoccupant.pb.cc\"\n \"osi_detectedoccupant.pb.h\"\n \"osi_environment.pb.cc\"\n \"osi_environment.pb.h\"\n \"osi_groundtruth.pb.cc\"\n \"osi_groundtruth.pb.h\"\n \"osi_hostvehicledata.pb.cc\"\n \"osi_hostvehicledata.pb.h\"\n \"osi_trafficsign.pb.cc\"\n \"osi_trafficsign.pb.h\"\n \"osi_trafficlight.pb.cc\"\n \"osi_trafficlight.pb.h\"\n \"osi_roadmarking.pb.cc\"\n \"osi_roadmarking.pb.h\"\n \"osi_lane.pb.cc\"\n \"osi_lane.pb.h\"\n \"osi_featuredata.pb.cc\"\n \"osi_featuredata.pb.h\"\n \"osi_object.pb.cc\"\n \"osi_object.pb.h\"\n \"osi_occupant.pb.cc\"\n \"osi_occupant.pb.h\"\n \"osi_sensordata.pb.cc\"\n \"osi_sensordata.pb.h\"\n \"osi_sensorviewconfiguration.pb.cc\"\n \"osi_sensorviewconfiguration.pb.h\"\n \"osi_sensorspecific.pb.cc\"\n \"osi_sensorspecific.pb.h\"\n \"osi_sensorview.pb.cc\"\n \"osi_sensorview.pb.h\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_version.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_common.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_datarecording.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficsign.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedtrafficlight.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedroadmarking.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedlane.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedobject.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_detectedoccupant.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_environment.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_groundtruth.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_hostvehicledata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_trafficsign.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_trafficlight.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_roadmarking.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_lane.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_featuredata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_object.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_occupant.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_sensordata.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_sensorviewconfiguration.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_sensorspecific.pb.cc.o\"\n \"CMakeFiles/open_simulation_interface_static.dir/osi_sensorview.pb.cc.o\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_static.pdb\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/libopen_simulation_interface_static.a\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang CXX)\n include(CMakeFiles/open_simulation_interface_static.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.6926714181900024,
"alphanum_fraction": 0.6926714181900024,
"avg_line_length": 83.5999984741211,
"blob_id": "ca4558315349abd855c7fac365835e503ab4bc05",
"content_id": "d51057cae4a26faacfe2a3bcac8aad1ce8abe9b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 423,
"license_type": "no_license",
"max_line_length": 177,
"num_lines": 5,
"path": "/build/catkin_generated/order_packages.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from catkin/cmake/template/order_packages.context.py.in\nsource_root_dir = '/home/student/Desktop/Redge_Thesis/vil/src'\nwhitelisted_packages = ''.split(';') if '' != '' else []\nblacklisted_packages = ''.split(';') if '' != '' else []\nunderlay_workspaces = '/home/student/Desktop/Redge_Thesis/vil/devel;/opt/ros/melodic'.split(';') if '/home/student/Desktop/Redge_Thesis/vil/devel;/opt/ros/melodic' != '' else []\n"
},
{
"alpha_fraction": 0.5022684931755066,
"alphanum_fraction": 0.5769949555397034,
"avg_line_length": 38.44210433959961,
"blob_id": "d022f8b03dd211b5c96f0b1c97bde89b1a325105",
"content_id": "8b4c39de24d6a37f371e7e80f54f65599e5399fe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3747,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 95,
"path": "/src/sensor_model/scripts/ClassKF.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import math\nimport rospy\nimport numpy as np\n\nclass KF:\n\n\tdef __init__(self):\n\t\tself.id = 0\n\t\t### for camera first\n\t\tif rospy.get_param(\"sensortype\") == 1:\n\t\t\tself.sigma2omegax = rospy.get_param(\"posxerr\") # variance noise posx\n\t\t\tself.sigma2omegay = rospy.get_param(\"posyerr\") #variance noise posy\n\t\t\tself.c_m = np.array([[self.sigma2omegax, 0], [0, self.sigma2omegay]])\n\t\t\tself.sigma2etax = rospy.get_param(\"processnoise\") # system noise obj (x-jerk)\n\t\t\tself.sigma2egoetax = rospy.get_param(\"processnoise\") # system noise ego (x-jerk)\n\t\t\tself.sigma2etay = rospy.get_param(\"processnoise\") # system noise obj(y-jerk)\n\t\t\tself.sigma2egoetay = rospy.get_param(\"processnoise\") # system noise obj (y-jerk)\n\t\t\tself.c_s = np.array([[self.sigma2etax, 0, 0, 0],[0, self.sigma2egoetax, 0, 0],[0, 0, self.sigma2egoetay, 0] ,[0, 0, self.sigma2etay, 0]])\n\t\t\tself.c = np.array([[1, 0, 0, 0, 0, 0], [0, 0, 0, 1, 0, 0]])\n\t\t\tself.a = np.zeros((6, 6))\n\t\t\tself.b = np.zeros((6, 4))\n\t\t\tself.xnn = np.array([[0], [0], [0], [0], [0], [0]]) # colomn vector\n\t\t\tself.d = np.array([[ 0, 0, 0, 0], [0, 0, 0, 0]])\n\t\t\tself.xn_nm1 = np.array([[0], [0], [0], [0], [0], [0]]) # column vector\n\n\t\t\tself.pnn = np.zeros((6, 6))\n\t\t\tnp.fill_diagonal(self.pnn, 1)\n\t\t\tself.pn_nm1 = np.zeros((6, 6))\n\t\t\tself.gamma_n = np.zeros((2, 1))\n\t\t\tself.s_n = np.zeros((2, 2))\n\t\t\tself.k_n = np.zeros((6, 2))\n\t\t\tself.yn = np.array([[0], [0]]) # colomn vector\n\t\t\t#self.b = np.array([[0], [0], [0], [0], [0], [0]])\n\t\t\tself.u = np.array([[0], [0],[0],[0]])\n\n\t\t\tself.g = np.array([[0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0]])\n\n\t\telif rospy.get_param(\"sensortype\") == 0:\n\t\t\tself.sigma2omegax = rospy.get_param(\"rangerr\")\n\t\t\tself.sigma2omegay = rospy.get_param(\"rangerr\") # variance noise posy\n\t\t\tself.sigma2omegavx = rospy.get_param(\"velerr\")\n\t\t\tself.sigma2omegavy = rospy.get_param(\"velerr\")\n\n\t\t\tself.c_m = np.array([[self.sigma2omegax, 0,0,0], [0, self.sigma2omegay,0,0],[0,0,self.sigma2omegavx,0],[0,0,0,self.sigma2omegavy]])\n\t\t\tself.sigma2etax = rospy.get_param(\"processnoise\") # system noise (jerk)\n\t\t\tself.sigma2egoetax = rospy.get_param(\"processnoise\") # system noise (jerk)\n\t\t\tself.sigma2etay = rospy.get_param(\"processnoise\") # system noise (jerk)\n\t\t\tself.sigma2egoetay = rospy.get_param(\"processnoise\") # system noise (jerk)\n\t\t\tself.c_s = np.array([[self.sigma2etax, 0, 0, 0], [0, self.sigma2egoetax, 0, 0],\n\t\t\t\t\t\t\t\t [0, 0, self.sigma2egoetay, 0],[0, 0, self.sigma2etay, 0]])\n\t\t\tself.c = np.array([[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0,0,0,1,0,0],[0,0,0,0,1,0]])\n\t\t\tself.d = np.array([[ 0, 0, 0, 0],[ -1, 0, 0, 0],[ 0, 0, 0, 0] ,[0, 0, -1, 0]])\n\t\t\tself.a = np.zeros((6, 6))\n\t\t\tself.b = np.zeros((6, 4))\n\t\t\tself.xnn = np.array([[0], [0], [0], [0], [0], [0]]) # colomn vector\n\n\t\t\tself.xn_nm1 = np.array([[0], [0], [0], [0], [0], [0]]) # column vector\n\n\t\t\tself.pnn = np.zeros((6, 6))\n\t\t\tnp.fill_diagonal(self.pnn, 1)\n\t\t\tself.pn_nm1 = np.zeros((6, 6))\n\t\t\tself.gamma_n = np.zeros((2, 1))\n\t\t\tself.s_n = np.zeros((2, 2))\n\t\t\tself.k_n = np.zeros((6, 2))\n\t\t\tself.yn = np.array([[0], [0],[0],[0]]) # colomn vector\n\t\t\tself.g = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])\n\t\t\t#self.b = np.array([[0], [0], [0], [0], [0], [0]])\n\t\t\tself.u = np.array([[0], [0],[0],[0]])\n\n\n\t\tself.newtime= 0\n\t\tself.oldtime= 0\n\t\tself.track= 0\n\t\tself.track2 = 0\n\nclass rotatedata:\n\tdef __init__(self):\n\t\tself.posx = 0\n\t\tself.posy = 0\n\t\tself.velx = 0\n\t\tself.vely = 0\n\t\tself.accx = 0\n\t\tself.accy = 0\n\n\nclass Prob():\n\tdef __init__(self):\n\t\tself.persistance = 1\n\t\tself.existance = 1\n\t\tself.nonexistance = 1\n\t\tself.trust = 1\n\t\tself.previous_persistance = 1\n\t\tself.previousexistance = 1\n\t\tself.previousnonexistance =1\n\t\tself.time_last_update = 0\n"
},
{
"alpha_fraction": 0.6118612885475159,
"alphanum_fraction": 0.6315693259239197,
"avg_line_length": 33.03726577758789,
"blob_id": "4fbe5755572f5f66f4f437690845195b7c5f0bf3",
"content_id": "d43c81da52b7dbe2861ce72814e1ae6c112059a2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10960,
"license_type": "no_license",
"max_line_length": 213,
"num_lines": 322,
"path": "/src/fusion/src/Debug_markers_obj_list.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport roslib; roslib.load_manifest('visualization_marker_tutorials')\nimport rospy\nfrom std_msgs.msg import String\nfrom object_list.msg import ObjectsList\nfrom object_list.msg import ObjectList\n\nfrom geometry_msgs.msg import Quaternion\nfrom visualization_msgs.msg import Marker\nfrom visualization_msgs.msg import MarkerArray\nimport rospy\nimport math\nimport tf\nimport message_filters\n\nOFFSET_CAR_X = -2.3 # distance to front\ncar_ego_x = 0\ncar_ego_y = 0\ndata_alt = 0\ntopic = 'visualization_marker_array'\npublisher = rospy.Publisher(topic, MarkerArray,queue_size=10)\nrospy.init_node('Objekt_Visualization')\nbr = tf.TransformBroadcaster()\n\n#define each color to the specific class, input value ist the name(string) from the classifciation\ndef evaluateColor(Class): \n class_List = {\n\t\"car\": [1,0,0,1],\n\t\"truck\":[0,1,0,1],\n\t\"motorcycle\": [0,0,1,1],\n\t\"bicycle\": [1,1,0,1],\n\t\"pedestrian\": [1,0,1,3],\n\t\"stacionary\": [0,1,1,3],\n\t\"other\":[1,1,1,2] \n }\n return class_List.get(Class)\n \n \ndef evaluateClassification(objectClass):\n \n temp_prop = 0\n result = \"\"\n #tmp includes all Attributes of the message Classification\n tmp = [a for a in dir(objectClass) if not a.startswith('__') and not a.startswith('_') and not callable(getattr(objectClass,a))]\n \n\n for i in range(len(tmp)):\n if(getattr(objectClass, tmp[i]) > temp_prop ):\n temp_prop = getattr(objectClass, tmp[i])\n result = tmp[i]\n return (result) # return value is the name of the class whith the highest probability\n \n \n\n\ndef evaluateObject(objectData):\n marker = Marker()\n #r, g, b, typ = evaluateColor(evaluateClassification(objectData.classification))\n marker.header.frame_id = \"/ego\"\n \n marker.type = 1\n \n marker.action = marker.ADD\n marker.scale.x = objectData.dimension.length\n marker.scale.y = objectData.dimension.width\n \n marker.scale.z = 2.0\n marker.color.a = 1.0\n \n marker.color.r = 1.0\n marker.color.g = 0.0\n marker.color.b = 0.0\n\n marker.pose.orientation = Quaternion(*tf.transformations.quaternion_from_euler(0,0,objectData.geometric.yaw))\n #marker.pose.orientation.w = 1\n #print(marker.pose.orientation)\n marker.pose.position.x = objectData.geometric.x\n marker.pose.position.y = objectData.geometric.y\n marker.pose.position.z = 1.0\n marker.lifetime = rospy.Duration(0.1)\n return marker\n\ndef evaluateObjectID(objectData):\n marker = Marker()\n\n marker.header.frame_id = \"/world\"\n marker.id = i\n marker.type = typ\n\n marker.action = marker.ADD\n marker.scale.x = objectData.dimension.lenght\n marker.scale.y = objectData.dimension.width\n\n marker.scale.z = 2.0\n marker.color.a = 1.0\n\n marker.color.r = 1.0\n marker.color.g = 0.0\n marker.color.b = 0.0\n\n marker.pose.orientation.w = 1.0\n marker.pose.position.x = data.position.x\n marker.pose.position.y = data.position.y\n marker.pose.position.z = 1.0\n marker.lifetime = rospy.Duration(0.1)\n marker.text = \"ID:\" + str(objectData.obj_id)\n return marker\n\n\ndef callback_simulation(data):\n\n global car_ego_x\n global car_ego_y\n target_marker_publisher = rospy.Publisher('/target_marker', Marker, queue_size=10)\n \n\n markerArray = MarkerArray()\n\n #print('marker')\n for i,obj in enumerate(data.obj_list):\n #markerObj = evaluateObject(data.obj_list[i])\n #markerID = evaluateObjectID(data.obj_list[i])\n #markerID.id = i*2+1\n #markerObj.publishCube()\n #markerArray.markers.append(markerObj)\n #markerArray.markers.append(markerID)\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.id = obj.obj_id\n marker.lifetime = rospy.Duration(3)\n marker.type = marker.CUBE\n marker.action = marker.ADD\n marker.scale.x = obj.dimension.length\n marker.scale.y = obj.dimension.width\n marker.scale.z = 3\n marker.color.a = 1.0\n marker.color.r = 1.0\n marker.color.g = 1.0\n marker.color.b = 0.0\n \"\"\"if color == \"red\":\n marker.color.r = 1.0\n elif color == \"green\":\n marker.color.g = 1.0\n elif color == \"blue\":target_marker_publisher\n marker.color.b = 1.0\n elif color == \"yellow\":\n marker.color.r = 1.0\n marker.color.g = 1.0\n elif color == \"majenta\":\n marker.color.g = 1.0\n marker.color.b = 1.0\n elif color == \"cyan\":\n marker.color.r = 1.0\n marker.color.b = 1.0\n \"\"\"\n marker.pose.orientation.w = 1.0\n #pt = behaviour_planner.projector.forward(\n # GPSPoint(float(rospy.get_param(\"late\")), float(rospy.get_param(\"lone\"))))\n marker.pose.position.x = obj.geometric.x\n marker.pose.position.y = obj.geometric.y\n marker.pose.position.z = 0\n target_marker_publisher.publish(marker)\n #print(marker)\n \n #rospy.loginfo(markerArray)\n #publisher.publishCube(markerArray)\n\n\ndef callback_simulation2(data):\n global car_ego_x\n global car_ego_y\n target_marker_publisher2 = rospy.Publisher('/target_marker2', Marker, queue_size=10)\n\n markerArray = MarkerArray()\n\n # print('marker')\n for i, obj in enumerate(data.obj_list):\n # markerObj = evaluateObject(data.obj_list[i])\n # markerID = evaluateObjectID(data.obj_list[i])\n # markerID.id = i*2+1\n # markerObj.publishCube()\n # markerArray.markers.append(markerObj)\n # markerArray.markers.append(markerID)\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.id = obj.obj_id\n marker.lifetime = rospy.Duration(3)\n marker.type = marker.CUBE\n marker.action = marker.ADD\n marker.scale.x = obj.dimension.length\n marker.scale.y = obj.dimension.width\n marker.scale.z = 3\n marker.color.a = 1.0\n marker.color.r = 1.0\n marker.color.g = 0.0\n marker.color.b = 0.0\n \"\"\"if color == \"red\":\n marker.color.r = 1.0\n elif color == \"green\":\n marker.color.g = 1.0\n elif color == \"blue\":target_marker_publisher\n marker.color.b = 1.0\n elif color == \"yellow\":\n marker.color.r = 1.0\n marker.color.g = 1.0\n elif color == \"majenta\":\n marker.color.g = 1.0\n marker.color.b = 1.0\n elif color == \"cyan\":\n marker.color.r = 1.0\n marker.color.b = 1.0\n \"\"\"\n marker.pose.orientation.w = 1.0\n # pt = behaviour_planner.projector.forward(\n # GPSPoint(float(rospy.get_param(\"late\")), float(rospy.get_param(\"lone\"))))\n marker.pose.position.x = obj.geometric.x\n marker.pose.position.y = obj.geometric.y\n marker.pose.position.z = 0\n target_marker_publisher2.publish(marker)\n # print(marker)\n\n # rospy.loginfo(markerArray)\n # publisher.publishCube(markerArray)\ndef callback_simulation3(data):\n global car_ego_x\n global car_ego_y\n target_marker_publisher2 = rospy.Publisher('/target_marker3', Marker, queue_size=10)\n\n markerArray = MarkerArray()\n\n # print('marker')\n for i, obj in enumerate(data.obj_list):\n # markerObj = evaluateObject(data.obj_list[i])\n # markerID = evaluateObjectID(data.obj_list[i])\n # markerID.id = i*2+1\n # markerObj.publishCube()\n # markerArray.markers.append(markerObj)\n # markerArray.markers.append(markerID)\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.id = obj.obj_id\n marker.lifetime = rospy.Duration(3)\n marker.type = marker.CUBE\n marker.action = marker.ADD\n marker.scale.x = obj.dimension.length\n marker.scale.y = obj.dimension.width\n marker.scale.z = 3\n marker.color.a = 1.0\n marker.color.r = 0.0\n marker.color.g = 0.0\n marker.color.b = 1.0\n \"\"\"if color == \"red\":\n marker.color.r = 1.0\n elif color == \"green\":\n marker.color.g = 1.0\n elif color == \"blue\":target_marker_publisher\n marker.color.b = 1.0\n elif color == \"yellow\":\n marker.color.r = 1.0\n marker.color.g = 1.0\n elif color == \"majenta\":\n marker.color.g = 1.0\n marker.color.b = 1.0\n elif color == \"cyan\":\n marker.color.r = 1.0\n marker.color.b = 1.0\n \"\"\"\n marker.pose.orientation.w = 1.0\n # pt = behaviour_planner.projector.forward(\n # GPSPoint(float(rospy.get_param(\"late\")), float(rospy.get_param(\"lone\"))))\n marker.pose.position.x = obj.geometric.x\n marker.pose.position.y = obj.geometric.y\n marker.pose.position.z = 0\n target_marker_publisher2.publish(marker)\ndef callback_egovehicle(data):\n global car_ego_x\n global car_ego_y\n\n car_ego_x = data.object.position.x\n car_ego_y = data.object.position.y\n\n br.sendTransform((car_ego_x,car_ego_y,0),tf.transformations.quaternion_from_euler(data.object.orientation.roll,data.object.orientation.pitch,data.object.orientation.yaw),rospy.Time.now(),\"chassis\",\"base_link\")\ndef callback3(fused_data, fused_data2, fused_data3):\n print('RUN')\n callback_simulation(fused_data)\n callback_simulation2(fused_data2)\n callback_simulation3(fused_data3)\n\n#def callback32(fused_data,fused_data2,fused_data3):\n\n\n\ndef listener():\n\n # In ROS, nodes are uniquely named. If two nodes with the same\n # name are launched, the previous one is kicked off. The\n # anonymous=True flag means that rospy will choose a unique\n # name for our 'listener' node so that multiple listeners can\n # run simultaneously.\n \n\n #rospy.Subscriber(\"chatter\", String, callback)\n #rospy.Subscriber('/sensor0/afterKF', ObjectsList, callback_simulation)\n #fused_data = rospy.Subscriber('/sensor0/obj_list_egoframe', ObjectsList, callback_simulation)\n #fused_data2 = rospy.Subscriber('/sensor1/obj_list_egoframe', ObjectsList, callback_simulation2)\n #fused_data3 = rospy.Subscriber('/fused_data', ObjectsList, callback_simulation3)\n fused_data = message_filters.Subscriber('/sensor0/obj_list_egoframe', ObjectsList )\n fused_data2 = message_filters.Subscriber('/sensor1/obj_list_egoframe', ObjectsList )\n fused_data3 = message_filters.Subscriber('/fused_data', ObjectsList)\n #fused_data = message_filters.Subscriber('/sensor0/obj_list_egoframe', ObjectsList)\n #fused_data2 = message_filters.Subscriber('/sensor1/obj_list_egoframe', ObjectsList)\n #fused_data3= message_filters.Subscriber('/fused_data', ObjectsList)\n ts = message_filters.ApproximateTimeSynchronizer([fused_data, fused_data2, fused_data3], 10,1)\n ts.registerCallback(callback3)\n\n\n #print(fused_data)\n # spin() simply keeps python from exiting until this node is stopped\n rospy.spin()\n\nif __name__ == '__main__':\n listener()\n"
},
{
"alpha_fraction": 0.6962298154830933,
"alphanum_fraction": 0.7118492126464844,
"avg_line_length": 23.64601707458496,
"blob_id": "097e41a73af71d08615ea910dc427c2f3c336964",
"content_id": "2cdd599331f99e67fe31ef970976235e56a3f743",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5570,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 226,
"path": "/devel/include/object_list/Dimension.h",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "// Generated by gencpp from file object_list/Dimension.msg\n// DO NOT EDIT!\n\n\n#ifndef OBJECT_LIST_MESSAGE_DIMENSION_H\n#define OBJECT_LIST_MESSAGE_DIMENSION_H\n\n\n#include <string>\n#include <vector>\n#include <map>\n\n#include <ros/types.h>\n#include <ros/serialization.h>\n#include <ros/builtin_message_traits.h>\n#include <ros/message_operations.h>\n\n\nnamespace object_list\n{\ntemplate <class ContainerAllocator>\nstruct Dimension_\n{\n typedef Dimension_<ContainerAllocator> Type;\n\n Dimension_()\n : length(0.0)\n , width(0.0)\n , length_variance(0.0)\n , width_variance(0.0) {\n }\n Dimension_(const ContainerAllocator& _alloc)\n : length(0.0)\n , width(0.0)\n , length_variance(0.0)\n , width_variance(0.0) {\n (void)_alloc;\n }\n\n\n\n typedef double _length_type;\n _length_type length;\n\n typedef double _width_type;\n _width_type width;\n\n typedef double _length_variance_type;\n _length_variance_type length_variance;\n\n typedef double _width_variance_type;\n _width_variance_type width_variance;\n\n\n\n\n\n typedef boost::shared_ptr< ::object_list::Dimension_<ContainerAllocator> > Ptr;\n typedef boost::shared_ptr< ::object_list::Dimension_<ContainerAllocator> const> ConstPtr;\n\n}; // struct Dimension_\n\ntypedef ::object_list::Dimension_<std::allocator<void> > Dimension;\n\ntypedef boost::shared_ptr< ::object_list::Dimension > DimensionPtr;\ntypedef boost::shared_ptr< ::object_list::Dimension const> DimensionConstPtr;\n\n// constants requiring out of line definition\n\n\n\ntemplate<typename ContainerAllocator>\nstd::ostream& operator<<(std::ostream& s, const ::object_list::Dimension_<ContainerAllocator> & v)\n{\nros::message_operations::Printer< ::object_list::Dimension_<ContainerAllocator> >::stream(s, \"\", v);\nreturn s;\n}\n\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator==(const ::object_list::Dimension_<ContainerAllocator1> & lhs, const ::object_list::Dimension_<ContainerAllocator2> & rhs)\n{\n return lhs.length == rhs.length &&\n lhs.width == rhs.width &&\n lhs.length_variance == rhs.length_variance &&\n lhs.width_variance == rhs.width_variance;\n}\n\ntemplate<typename ContainerAllocator1, typename ContainerAllocator2>\nbool operator!=(const ::object_list::Dimension_<ContainerAllocator1> & lhs, const ::object_list::Dimension_<ContainerAllocator2> & rhs)\n{\n return !(lhs == rhs);\n}\n\n\n} // namespace object_list\n\nnamespace ros\n{\nnamespace message_traits\n{\n\n\n\n\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Dimension_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsFixedSize< ::object_list::Dimension_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Dimension_<ContainerAllocator> >\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct IsMessage< ::object_list::Dimension_<ContainerAllocator> const>\n : TrueType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Dimension_<ContainerAllocator> >\n : FalseType\n { };\n\ntemplate <class ContainerAllocator>\nstruct HasHeader< ::object_list::Dimension_<ContainerAllocator> const>\n : FalseType\n { };\n\n\ntemplate<class ContainerAllocator>\nstruct MD5Sum< ::object_list::Dimension_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"01a8c17587dd313244e2f2fd574d1415\";\n }\n\n static const char* value(const ::object_list::Dimension_<ContainerAllocator>&) { return value(); }\n static const uint64_t static_value1 = 0x01a8c17587dd3132ULL;\n static const uint64_t static_value2 = 0x44e2f2fd574d1415ULL;\n};\n\ntemplate<class ContainerAllocator>\nstruct DataType< ::object_list::Dimension_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"object_list/Dimension\";\n }\n\n static const char* value(const ::object_list::Dimension_<ContainerAllocator>&) { return value(); }\n};\n\ntemplate<class ContainerAllocator>\nstruct Definition< ::object_list::Dimension_<ContainerAllocator> >\n{\n static const char* value()\n {\n return \"float64 length\\n\"\n\"float64 width\\n\"\n\"float64 length_variance\\n\"\n\"float64 width_variance\\n\"\n\"\\n\"\n;\n }\n\n static const char* value(const ::object_list::Dimension_<ContainerAllocator>&) { return value(); }\n};\n\n} // namespace message_traits\n} // namespace ros\n\nnamespace ros\n{\nnamespace serialization\n{\n\n template<class ContainerAllocator> struct Serializer< ::object_list::Dimension_<ContainerAllocator> >\n {\n template<typename Stream, typename T> inline static void allInOne(Stream& stream, T m)\n {\n stream.next(m.length);\n stream.next(m.width);\n stream.next(m.length_variance);\n stream.next(m.width_variance);\n }\n\n ROS_DECLARE_ALLINONE_SERIALIZER\n }; // struct Dimension_\n\n} // namespace serialization\n} // namespace ros\n\nnamespace ros\n{\nnamespace message_operations\n{\n\ntemplate<class ContainerAllocator>\nstruct Printer< ::object_list::Dimension_<ContainerAllocator> >\n{\n template<typename Stream> static void stream(Stream& s, const std::string& indent, const ::object_list::Dimension_<ContainerAllocator>& v)\n {\n s << indent << \"length: \";\n Printer<double>::stream(s, indent + \" \", v.length);\n s << indent << \"width: \";\n Printer<double>::stream(s, indent + \" \", v.width);\n s << indent << \"length_variance: \";\n Printer<double>::stream(s, indent + \" \", v.length_variance);\n s << indent << \"width_variance: \";\n Printer<double>::stream(s, indent + \" \", v.width_variance);\n }\n};\n\n} // namespace message_operations\n} // namespace ros\n\n#endif // OBJECT_LIST_MESSAGE_DIMENSION_H\n"
},
{
"alpha_fraction": 0.7337278127670288,
"alphanum_fraction": 0.7534517049789429,
"avg_line_length": 35.21428680419922,
"blob_id": "adba9deb6043e18db234983a144b17a635674150",
"content_id": "05cc4326b3b02218e1bd512ae90843e5e01454ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 507,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 14,
"path": "/src/matlab_gen/build/rosjava_build/osi3_bridge/build/generated-src/osi3_bridge/Dimension3d.java",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "package osi3_bridge;\n\npublic interface Dimension3d extends org.ros.internal.message.Message {\n static final java.lang.String _TYPE = \"osi3_bridge/Dimension3d\";\n static final java.lang.String _DEFINITION = \"float64 length\\nfloat64 width\\nfloat64 height\\n\";\n static final boolean _IS_SERVICE = false;\n static final boolean _IS_ACTION = false;\n double getLength();\n void setLength(double value);\n double getWidth();\n void setWidth(double value);\n double getHeight();\n void setHeight(double value);\n}\n"
},
{
"alpha_fraction": 0.5863602757453918,
"alphanum_fraction": 0.6160118579864502,
"avg_line_length": 24.94230842590332,
"blob_id": "0f0a2922bc029e18246815f967d28fdf4c6ca26b",
"content_id": "16c81fc0b1b14e0f3622df265d9a7457cb339bf4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1349,
"license_type": "no_license",
"max_line_length": 181,
"num_lines": 52,
"path": "/src/object_list/scripts/OSI_ROS_dummy.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport rospy #permit to use python \nimport numpy as np\n#from std_msgs.msg import String # permit use the message String from std_msgs\nfrom osi3_bridge.msg import MovingObject\nfrom osi3_bridge.msg import GroundTruthMovingObjects\n\n\ndef OSI_ROS_dummy():\n pub = rospy.Publisher('osi3_moving_obj_dummy', GroundTruthMovingObjects, queue_size=10) #\n rospy.init_node('osi3_dummy',anonymous=False) # Initiate the node camera and anonymous true permitt openig this node a lot of time including number in the end of the node name \n rate=rospy.Rate(100) #100 hz\n\n while not rospy.is_shutdown():\n \n\n\tb=GroundTruthMovingObjects()\n\n a1=MovingObject()\n a1.position.x = 1\n a1.position.y = 1\n a1.id = 1\n \n a2=MovingObject()\n a2.id = 2\n a2.position.x = 2\n a2.position.y = 2\n a2.type=2\n\n a3=MovingObject()\n a3.id = 3\n a3.position.x = 0\n a3.position.y = 0\n a3.type=3\n\n\n b.header.stamp = rospy.Time.now()\n b.header.frame_id = \"sensor_model_dummy\"\n b.objects = np.append(a1,a2)\n b.objects = np.append(b.objects,a3)\n\n #rospy.loginfo(b)\n pub.publish(b)\n rate.sleep()\n\n\nif __name__ == '__main__':\n try:\n OSI_ROS_dummy()\n except rospy.ROSInterruptException:\n pass\n"
},
{
"alpha_fraction": 0.6375212073326111,
"alphanum_fraction": 0.6503961682319641,
"avg_line_length": 40.57646942138672,
"blob_id": "e157ee5e7ccaa52d5b57908baa11daea10e8e813",
"content_id": "545781598c79da19316f85afe77ec1b888a28a9f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7068,
"license_type": "no_license",
"max_line_length": 210,
"num_lines": 170,
"path": "/src/vehicle_control/scripts/vehicle.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport rospy\nimport message_filters\nimport matlab.engine\nimport math\nimport rospkg\nimport numpy as np\nfrom rotate import rotate\n\nglobal old_time\nglobal old_pos\n\n\n# import ROS messages\nfrom vehicle_control.msg import Trajectory\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\ndef vehicle():\n\n global ego_data\n global eng\n global pub\n\n pub = rospy.Publisher('ego_data', TrafficUpdateMovingObject, queue_size=10,latch=True)\n\n #veh_pub = rospy.Publisher('ego_data', TrafficUpdateMovingObject, queue_size=10)\n\n print('Initializing MATLAB')\n eng = matlab.engine.start_matlab()\n #Define Matlab path\n rospack = rospkg.RosPack()\n eng.cd(rospack.get_path('vehicle_control')+'/scripts')\n print('MATLAB Initialized')\n\n #Initiate ROS node\n rospy.init_node('simulated_vehicle', anonymous=False) # Start node\n rate = rospy.Rate(25) # Define the node frequency 100hz\n\n # Receive car position from simulation Osi3 GT\n # Publish car position\n\n print('Calibrating EGO position, Please do not move the EGO nor the Digital Twin')\n #while not subscriber_connected:\n # num_connections = veh_pub.get_num_connections()\n # gt_msg = rospy.wait_for_message(\"/osi3_moving_obj\", GroundTruthMovingObjects, timeout=10)\n # ego_data = calibration(gt_msg)\n\n # if num_connections > 1:\n # subscriber_connected = True\n # print('EGO calibration complete')\n\n gt_msg = rospy.wait_for_message(\"/osi3_moving_obj\", GroundTruthMovingObjects, timeout=10)\n ego_data = calibration(gt_msg)\n ego_data.object.orientation.yaw=0.0 ##########################\n pub.publish(ego_data)\n print('EGO calibration complete')\n# veh_pub.publish(ego_data)\n# rate.sleep()\n\n # Subscribe trajectory and use ego_data as arg --> output new car position\n\n #while not rospy.is_shutdown():\n # rospy.Subscriber(\"/trajectory\", Trajectory, callback)\n # pub = rospy.Publisher('ego_data', TrafficUpdateMovingObject, queue_size=10)\n # pub.publish(ego_data)\n # rate.sleep() #keeps python from exiting until this node is stopped\n #pub.publish(ego_data)\n rospy.Subscriber(\"/trajectory\", Trajectory, callback)\n rospy.spin() #keeps python from exiting until this node is stopped\n\n\n\ndef callback(traj):\n #ego_data=arg[0]\n #eng=arg[1]\n global ego_data\n global pub\n global eng\n global old_time\n global old_pos\n # ego_data has velocity in acceleration in ego_osi frame while ego has velocities and accelerations in Map frame\n\n rXtraj = matlab.double(tuple(traj.x)) # Trajectory x-points (1xm) vector\n rYtraj = matlab.double(tuple(traj.y)) # Trajectory y-points (1xm) vector\n PsiTraj = matlab.double(tuple(traj.yaw)) # Trajectory yaw angle (1xm) vector\n tTraj = matlab.double(tuple(traj.time)) # Trajectory time_stamp (1xm) vector\n vTraj = matlab.double(tuple(traj.v)) # Trajectory velocity (1xm) vector\n\n #m=len(rXtraj)\n #tTraj = matlab.double(tuple(np.linspace(0, 2.0, num=m))) # Trajectory time_stamp ((1xm) vector)\n\n rX = ego_data.object.position.x # current x - point of EGO (1x1) scalar\n rY = ego_data.object.position.y # current y - point of EGO (1x1) scalar\n yaw = ego_data.object.orientation.yaw # current yaw angle of EGO (1x1) scalar\n v = ego_data.object.velocity.x # current velocity of EGO (1x1) scalar\n ax = ego_data.object.acceleration.x # current acceleration in x of EGO (1x1) scalar\n ay = ego_data.object.acceleration.y # current acceleration in y of EGO (1x1) scalar\n\n if v<=0.1:\n v=0.1\n\n\n\n res = eng.Vehicle(rXtraj, rYtraj, vTraj, PsiTraj, tTraj, rX, rY, yaw, v, ax, ay, nargout=6) ### run Matlab Funtion\n #res = eng.Vehicle(rXtraj, rYtraj, vTraj, PsiTraj, tTraj, rX, rY, 0.0, v, ax, 0.0, nargout=6) ### run Matlab Funtion\n\n if ego_data.header.seq == 1:\n old_time = ego_data.header.stamp.nsecs\n old_pos = res[0]\n time_step = (ego_data.header.stamp.nsecs-old_time)*1e-9\n print (\"time step \", time_step ,\" calculated velocity\", safe_div((res[0]-old_pos),time_step), \"velocity\", res[2])#, \"posx \", res[0], \" posy \",res[1], \" yaw \",res[5], \" velocity x\",res[2])\n #print (res)\n\n old_time = ego_data.header.stamp.nsecs\n old_pos = res[0]\n\n\n # Update ego_data with the output of the simulated vehicle\n ego_data.object.position.x = res[0] # Updated x - point of EGO m - Map Frame\n ego_data.object.position.y = res[1] # Updated y - point of EGO m - Map Frame\n #ego_data.object.orientation.yaw = res[5] # Updated yaw angle of EGO rad\n ego_data.object.velocity.x = res[2] # Updated velocity x on EGO frame\n ego_data.object.velocity.y = 0.0 # Lateral velocity of the EGO = 0 on EGO frame\n ego_data.object.acceleration.x = res[3] # Longitudinal acceleration on EGO frame\n ego_data.object.acceleration.y = res[4] # Lateral acc in EGO frame\n\n ego_data.header.stamp = rospy.Time.now()\n ego_osi = ego_data\n # Rotate from EGO to Map frame\n [ego_osi.object.velocity.x, ego_osi.object.velocity.y] = rotate(res[2],0,0)#-ego_osi.object.orientation.yaw) # Lateral Velocity = 0 Updated velocity of EGO m/s - Map Frame\n [ego_osi.object.acceleration.x, ego_osi.object.acceleration.y] = rotate(res[3],res[4],0)#-ego_osi.object.orientation.yaw) # Updated accel in y of EGO m/s2 Map Frame\n\n pub.publish(ego_data)\n ego_osi.header.frame_id=\"Map\"\n osipub = rospy.Publisher('osi3_traffic_update', TrafficUpdateMovingObject, queue_size=10)\n osipub.publish(ego_osi)\n\n #print(ego_data)\n\n\ndef calibration(osi_objs):\n global ego_data\n # find the smaller id number inside the list\n ID=osi_objs.objects[0].id\n IDpos=0\n for i in range(len(osi_objs.objects)):\n if osi_objs.objects[i].id < ID: # take into account that the EGO is the first spawn Object\n ID = osi_objs.objects[i].id\n IDpos = i\n\n #Assign the object with smaller ID to EGO\n ego_data = TrafficUpdateMovingObject()\n ego_data.object = osi_objs.objects[IDpos]\n ego_data.header.stamp = osi_objs.header.stamp\n ego_data.header.frame_id = \"EGO\"\n [ego_data.object.velocity.x, ego_data.object.velocity.y] = rotate(ego_data.object.velocity.x, ego_data.object.velocity.y,ego_data.object.orientation.yaw)\n [ego_data.object.acceleration.x, ego_data.object.acceleration.y] = rotate(ego_data.object.acceleration.x, ego_data.object.acceleration.y,ego_data.object.orientation.yaw)\n\n #pub = rospy.Publisher('ego_data', TrafficUpdateMovingObject, queue_size=10,latch=True)\n #veh_pub.publish(ego_data)\n return ego_data\n\ndef safe_div(x,y):\n try:\n return x/y\n except ZeroDivisionError:\n return 0\n\nif __name__ == '__main__':\n vehicle()\n"
},
{
"alpha_fraction": 0.6167147159576416,
"alphanum_fraction": 0.6330451369285583,
"avg_line_length": 27.135135650634766,
"blob_id": "f3bb87e77fe49ff00229fc7b75bef5da7fe8623b",
"content_id": "d22f2f4f360d5830dd1c254b777bbc7dc1521989",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1041,
"license_type": "no_license",
"max_line_length": 181,
"num_lines": 37,
"path": "/src/object_list/scripts/sensor_model_dummy.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# license removed for brevityi\n\nimport rospy #permit to use python \nimport numpy as np\nfrom object_list.msg import ObjectsList\nfrom object_list.msg import ObjectList\n\ndef sensor_model_dummy():\n\n pub = rospy.Publisher('camera_obj', ObjectsList, queue_size=100) #\n rospy.init_node('camera',anonymous=False) # Initiate the node camera and anonymous true permitt openinig this node a lot of times including numbers in the end of the node name \n rate=rospy.Rate(1) #1 hz\n\n while not rospy.is_shutdown():\n\n b=ObjectsList()\n a1=ObjectList()\n a1.geometric.x = 1\n a1.geometric.y = 1\n \n a2=ObjectList()\n a2.geometric.x = 2\n a2.geometric.y = 2\n \n b.header.stamp = rospy.Time.now()\n b.header.frame_id = \"sensor_model_dummy\"\n b.obj_list = np.append(a1,a2)\n\trospy.loginfo(b)\n pub.publish(b)\n rate.sleep()\n\nif __name__ == '__main__':\n try:\n sensor_model_dummy()\n except rospy.ROSInterruptException:\n pass\n"
},
{
"alpha_fraction": 0.6955193281173706,
"alphanum_fraction": 0.6985743641853333,
"avg_line_length": 34.07143020629883,
"blob_id": "4542afbce276fa8e614a2cf903c2d7463414fb80",
"content_id": "c7eacca656af2790e1d8d0380531cf5a9ce1c599",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1964,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 56,
"path": "/src/sensor_model/scripts/ego_reader.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport rospy\nimport math\n\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\nfrom rotate import rotate\n\n\ndef vehicle_reader():\n # Node initialization\n rospy.init_node('ego_reader', anonymous=False) # Start node\n rospy.Subscriber(\"/osi3_moving_obj\", GroundTruthMovingObjects, callback)\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\ndef callback(osi_objs):\n ego_data = find_ego(osi_objs)\n header = osi_objs.header\n public_ego(ego_data,header)\n\n\ndef find_ego(osi_objs):\n global ego_dataCOPY\n # find the smaller id number inside the list\n ID = osi_objs.objects[0].id\n IDpos = 0\n for i in range(len(osi_objs.objects)):\n if osi_objs.objects[i].id < ID: # take into account that the EGO is the first spawn Object\n ID = osi_objs.objects[i].id\n IDpos = i\n\n # Assign the object with smaller ID to EGO\n ego = osi_objs.objects[IDpos]\n # Assign all other ID's to the obj_list\n #osi_objs_noego = [x for x in osi_objs.objects if not x.id == ID]\n\n return ego\n\n\ndef public_ego(ego,header):\n global egoyaw #ego orientation in map frame (dont forget redge and maikol)\n\n ego_data = TrafficUpdateMovingObject()\n ego_data.header.stamp = header.stamp\n ego_data.header.frame_id = \"EGO\"\n\n #ego_data to have ego parameters in ego frame (example velocity of ego in x and y directions of ego / longitudinal and lateral velocity)\n ego_data.object = ego\n [ego_data.object.velocity.x, ego_data.object.velocity.y] = rotate(ego.velocity.x,ego.velocity.y,-ego.orientation.yaw)\n [ego_data.object.acceleration.x, ego_data.object.acceleration.y] = rotate(ego.acceleration.x,ego.acceleration.y,-ego.orientation.yaw)\n egoyaw= ego.orientation.yaw\n pub = rospy.Publisher('ego_data', TrafficUpdateMovingObject, queue_size=10)\n pub.publish(ego_data)\n\nif __name__ == '__main__':\n vehicle_reader()\n"
},
{
"alpha_fraction": 0.7392316460609436,
"alphanum_fraction": 0.7485448122024536,
"avg_line_length": 52.75,
"blob_id": "06149626e1423f04330e749feaeb01713cd929e6",
"content_id": "10ca9375d588c0f58f3668d47a2e3914f2072bdc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 859,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 16,
"path": "/build/object_list/catkin_generated/package.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "set(_CATKIN_CURRENT_PACKAGE \"object_list\")\nset(object_list_VERSION \"0.0.0\")\nset(object_list_MAINTAINER \"drechsler <[email protected]>\")\nset(object_list_PACKAGE_FORMAT \"2\")\nset(object_list_BUILD_DEPENDS \"message_generation\" \"roscpp\" \"rospy\" \"std_msgs\" \"osi3_bridge\" \"geometry_msgs\")\nset(object_list_BUILD_EXPORT_DEPENDS \"roscpp\" \"rospy\" \"std_msgs\" \"geometry_msgs\" \"osi3_bridge\")\nset(object_list_BUILDTOOL_DEPENDS \"catkin\")\nset(object_list_BUILDTOOL_EXPORT_DEPENDS )\nset(object_list_EXEC_DEPENDS \"roscpp\" \"rospy\" \"std_msgs\" \"geometry_msgs\" \"osi3_bridge\" \"message_runtime\")\nset(object_list_RUN_DEPENDS \"roscpp\" \"rospy\" \"std_msgs\" \"geometry_msgs\" \"osi3_bridge\" \"message_runtime\")\nset(object_list_TEST_DEPENDS )\nset(object_list_DOC_DEPENDS )\nset(object_list_URL_WEBSITE \"\")\nset(object_list_URL_BUGTRACKER \"\")\nset(object_list_URL_REPOSITORY \"\")\nset(object_list_DEPRECATED \"\")"
},
{
"alpha_fraction": 0.7137592434883118,
"alphanum_fraction": 0.7186732292175293,
"avg_line_length": 27.10344886779785,
"blob_id": "b989c0109c863f2694d17bb5bd0a40d2c484069a",
"content_id": "835f8b9179eccd74938b93d9b222f790a5054b58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 814,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 29,
"path": "/src/sensor_model/scripts/coordinates_rotation.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport rospy\nfrom ClassSens import Sens\n\n# import ROS messages\nfrom object_list.msg import ObjectsList\n\n# import class\nfrom ClassSens import Sens\n\n\ndef coordinates_rotation():\n # Node initialization\n rospy.init_node('coord_rotation', anonymous=False) # Start node\n rate = rospy.Rate(100) # Define the node frequency 1hz\n\n # Subscriber the data in callback function\n rospy.Subscriber(\"obj_list\", ObjectsList, callback)\n\n rospy.spin() # spin() simply keeps python from exiting until this node is stopped\n\n def callback(obj_list):\n\n # Return object list with objects inside the field of view with position origin on the sensor origin and rotation\n\n obj_list_car_origin = rotate_transpose(obj_list,sens)\n\nif __name__ == '__main__':\n coordinates_rotation()"
},
{
"alpha_fraction": 0.7789473533630371,
"alphanum_fraction": 0.7873684167861938,
"avg_line_length": 46.5,
"blob_id": "9fc0281809f398ea5c8d8f8d794d1f5b8d4ca59c",
"content_id": "fbac4efa47ceb0177f6da90a27a04beb50cf02f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 475,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 10,
"path": "/build/vehicle_control/CMakeFiles/vehicle_control_generate_messages_py.dir/cmake_clean.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "file(REMOVE_RECURSE\n \"CMakeFiles/vehicle_control_generate_messages_py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/vehicle_control/msg/_Trajectory.py\"\n \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/vehicle_control/msg/__init__.py\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang )\n include(CMakeFiles/vehicle_control_generate_messages_py.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"
},
{
"alpha_fraction": 0.75314861536026,
"alphanum_fraction": 0.763224184513092,
"avg_line_length": 40.78947448730469,
"blob_id": "fce597f47ff2bdf60f0fe9ad04b2f1a8d453fbca",
"content_id": "001fce4f88c1867586428e9f094e248b3b89fc15",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 2382,
"license_type": "no_license",
"max_line_length": 208,
"num_lines": 57,
"path": "/build/osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/DependInfo.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# The set of languages for which implicit dependencies are needed:\nset(CMAKE_DEPENDS_LANGUAGES\n \"C\"\n \"CXX\"\n )\n# The set of files for implicit dependencies of each language:\nset(CMAKE_DEPENDS_CHECK_C\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/src/osi_protocol_header.c\" \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi_protocol_header.c.o\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/src/udp.c\" \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/udp.c.o\"\n )\nset(CMAKE_C_COMPILER_ID \"GNU\")\n\n# Preprocessor definitions for this target.\nset(CMAKE_TARGET_DEFINITIONS_C\n \"ROSCONSOLE_BACKEND_LOG4CXX\"\n \"ROS_BUILD_SHARED_LIBS=1\"\n \"ROS_PACKAGE_NAME=\\\"osi3_bridge\\\"\"\n )\n\n# The include file search paths:\nset(CMAKE_C_TARGET_INCLUDE_PATH\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/open-simulation-interface\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/include\"\n \"/opt/ros/melodic/include\"\n \"/opt/ros/melodic/share/xmlrpcpp/cmake/../../../include/xmlrpcpp\"\n \"osi3_bridge/open-simulation-interface\"\n )\nset(CMAKE_DEPENDS_CHECK_CXX\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/src/osi3_publisher.cpp\" \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/CMakeFiles/osi3_bridge_publisher.dir/src/osi3_publisher.cpp.o\"\n )\nset(CMAKE_CXX_COMPILER_ID \"GNU\")\n\n# Preprocessor definitions for this target.\nset(CMAKE_TARGET_DEFINITIONS_CXX\n \"ROSCONSOLE_BACKEND_LOG4CXX\"\n \"ROS_BUILD_SHARED_LIBS=1\"\n \"ROS_PACKAGE_NAME=\\\"osi3_bridge\\\"\"\n )\n\n# The include file search paths:\nset(CMAKE_CXX_TARGET_INCLUDE_PATH\n \"/home/student/Desktop/Redge_Thesis/vil/devel/include\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/open-simulation-interface\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/include\"\n \"/opt/ros/melodic/include\"\n \"/opt/ros/melodic/share/xmlrpcpp/cmake/../../../include/xmlrpcpp\"\n \"osi3_bridge/open-simulation-interface\"\n )\n\n# Targets to which this target links.\nset(CMAKE_TARGET_LINKED_INFO_FILES\n \"/home/student/Desktop/Redge_Thesis/vil/build/osi3_bridge/open-simulation-interface/CMakeFiles/open_simulation_interface.dir/DependInfo.cmake\"\n )\n\n# Fortran module output directory.\nset(CMAKE_Fortran_TARGET_MODULE_DIR \"\")\n"
},
{
"alpha_fraction": 0.69597989320755,
"alphanum_fraction": 0.715362548828125,
"avg_line_length": 28.648935317993164,
"blob_id": "f9c5f1f3441bd80119e6c172b065a5dbf4ffb62e",
"content_id": "5b96165155c69ff1aa57fd526d8ea5313529e5ad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2786,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 94,
"path": "/src/sensor_model/scripts/KFtesting.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport numpy as np\nimport rospy\nimport math\nimport message_filters\nimport time\n## Import Funtions\nfrom rotate import rotate\nfrom scipy.spatial import distance\n## Import Objects\nfrom ClassKF import KF , rotatedata\nfrom ClassSens import Sens , Ego\nfrom std_msgs.msg import Float64\n# import all necessary ROS messages\n\nfrom object_list.msg import ObjectList, ObjectsList\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\n\ndeviationsum = 0\ndeviation = 0\ncount = 0\ndeviationy = 0\ndeviationsumy = 0\ndistanc = 0\ndistsum = 0\ndeviationsumvx =0\ndeviationsumvy =0\n\ndef sensor_rotate():\n # Node initialization\n #print('ran')\n rospy.init_node('KFtesting', anonymous=False) # Start node\n rate = rospy.Rate(rospy.get_param(\"freq\"))\n # subscribe to sensor data and ego data with time synchronization\n objs1 = message_filters.Subscriber('/sensor0/obj_list_egoframe', ObjectsList)\n objsI = message_filters.Subscriber('/sensor5/obj_list_egoframe', ObjectsList)\n\n #ts = message_filters.ApproximateTimeSynchronizer([objs1, objsI],30,0.003)\n ts = message_filters.TimeSynchronizer([objs1, objsI], 10)\n ts.registerCallback(callback)\n\n rospy.spin()\n\ndef callback(objs1,objsI):\n global deviationsum\n global deviation\n global count\n global deviationy\n global deviationsumy\n global oldtime\n global distanc\n global distsum\n global deviationsumvx\n global deviationsumvy\n\n\n if count == 0:\n oldtime = objsI.header.stamp.to_sec()\n\n newtime = objsI.header.stamp.to_sec()\n print(\"length\", len(objsI.obj_list))\n time_elapsed = (float(newtime) - float(oldtime))/1000000000\n\n deviation = objsI.obj_list[0].geometric.x - objs1.obj_list[0].geometric.x\n\n deviationsum += np.square(deviation)\n count += 1\n avg_deviation = np.sqrt(deviationsum/count)\n\n deviationy = objsI.obj_list[0].geometric.y - objs1.obj_list[0].geometric.y\n deviationsumy += np.square(deviationy)\n avg_deviationy = np.sqrt(deviationsumy / count)\n\n deviationvx = objsI.obj_list[0].geometric.vx - objs1.obj_list[0].geometric.vx\n # print('deviation',deviation )\n deviationsumvx += np.square(deviationvx)\n avg_deviationvx = np.sqrt(deviationsumvx / count)\n\n deviationvy = objsI.obj_list[0].geometric.vy - objs1.obj_list[0].geometric.vy\n # print('deviation',deviation )\n deviationsumvy += np.square(deviationvy)\n avg_deviationvy = np.sqrt(deviationsumvy / count)\n\n print('time:',time_elapsed,'x:', avg_deviation,'y:',avg_deviationy,'vx:',avg_deviationvx,'vy:',avg_deviationvy)\n distsum += distanc\n avg_dist = distsum/count\n\n\n\n\n pub = rospy.Publisher('abc', Float64, queue_size=10, latch=True)\n pub.publish(avg_deviation)\nif __name__ == '__main__':\n sensor_rotate()"
},
{
"alpha_fraction": 0.5556358098983765,
"alphanum_fraction": 0.560693621635437,
"avg_line_length": 53.9040412902832,
"blob_id": "b0480afbcb215e7eb4df46b0684e60ec7959bb3b",
"content_id": "d42e2a7bc706e2e020b372fe98d1469be5a40bec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 22144,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 396,
"path": "/src/fusion/src/ClassFusion.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "\r\n\r\nimport numpy as np\r\nimport rospy\r\nimport math\r\n\r\nimport message_filters\r\nfrom scipy.spatial import distance as di\r\nfrom scipy.stats import chi2\r\nfrom scipy.linalg import sqrtm\r\nfrom object_list.msg import ObjectList, ObjectsList\r\nfrom osi3_bridge.msg import GroundTruthMovingObjects, TrafficUpdateMovingObject\r\n\r\n\r\nimport sys\r\n# import function\r\n\r\nfrom scipy.spatial import distance\r\nfrom Association import *\r\nfrom fusion_function import *\r\nfrom ClassExistance_Objects import * \r\n\r\nglobal now\r\nglobal existance\r\n\r\n\r\nclass fusion:\r\n def __init__(self):\r\n self.egoveh = Ego() #consists of updated ego parameters\r\n\r\n self.globaltrack = ObjectsList() #Objects list of fused sensor data/ Global track\r\n\r\n self.globaltrack_predicted= ObjectsList() #global track predicted to current time\r\n\r\n self.sensorslist = [] # list of sensor objectlists\r\n self.sensorlist_previous = [] # list of sensor objectlists from previous update\r\n self.sensorslist_predicted = [] # list of sensor objectlists predicted to current time\r\n\r\n\r\n self.CostMatrixA = None\r\n self.CostMatrixB = None\r\n self.CostMatrix = None\r\n self.AssociationMatrix = None\r\n self.ThresholdMatrix = None\r\n self.AssignmentMatrix = None\r\n self.AssignmentList = []\r\n self.tolerance = 1 #tolerance for auction algorithm\r\n self.breakr = 0 #param to break from auction algorithm if auction algorithm takes too long taking too long\r\n \r\n def auction_algorithm(self):\r\n \r\n starttime = rospy.get_rostime().to_sec()\r\n\r\n \"\"\"\r\n Method to perform auction algorithm for optimal assignment of sensor level objects and global level objects.\r\n \"\"\"\r\n #now = rospy.Time.now()\r\n cost_matrix = self.CostMatrix\r\n tolerance = self.tolerance\r\n dimension_cost_matrix = cost_matrix.shape\r\n bid_price_list = [0] * dimension_cost_matrix[1]\r\n assignment_matrix_list = [-9999] * dimension_cost_matrix[0]\r\n bid_price = np.array([bid_price_list])\r\n assignment_matrix = np.array([assignment_matrix_list])\r\n\r\n assignment_complete = 0\r\n print('starting Assignment ')\r\n while assignment_complete == 0:\r\n if -9999 in assignment_matrix:\r\n not_assigned_object_index = np.where(assignment_matrix == -9999)\r\n sensor_object_index = not_assigned_object_index[1][0]\r\n\r\n sensor_object_benifit = cost_matrix[sensor_object_index] - bid_price\r\n maximum_value_index = np.argmax(sensor_object_benifit[0])\r\n\r\n if maximum_value_index in assignment_matrix:\r\n already_assigned_object_index = np.where(assignment_matrix == maximum_value_index)[1][0]\r\n assignment_matrix[0][sensor_object_index] = maximum_value_index\r\n assignment_matrix[0][already_assigned_object_index] = -9999\r\n\r\n first_best = np.partition(sensor_object_benifit[0].flatten(), -2)[-1]\r\n second_best = np.partition(sensor_object_benifit[0].flatten(), -2)[-2]\r\n object_new_bidprice = (\r\n float(bid_price[0][sensor_object_index]) + (float(first_best) - float(second_best)) + float((tolerance)))\r\n #print(object_new_bidprice)\r\n try:\r\n bid_price[0][sensor_object_index] = object_new_bidprice\r\n except:\r\n bid_price[0][sensor_object_index] =sys.maxsize\r\n else:\r\n assignment_matrix[0][sensor_object_index] = maximum_value_index\r\n endtime = rospy.get_rostime().to_sec()\r\n time = endtime - starttime\r\n if time > 1.5:\r\n self.breakr = 1\r\n break\r\n\r\n\r\n else:\r\n assignment_complete = 1\r\n return (assignment_matrix)\r\n \r\n def fuse(self):\r\n\r\n self.AssignmentList = []\r\n \r\n global now\r\n now = rospy.Time.now()\r\n\r\n\r\n for m,sensor in enumerate(self.sensorslist):\r\n \r\n self.AssociationMatrix = np.zeros((len(sensor.obj_list), len(\r\n self.globaltrack.obj_list))) # intialize Association matrix (M*N) M - objs in sensor track, N - objs in global track\r\n self.CostMatrixA = np.zeros((len(sensor.obj_list), len(\r\n self.globaltrack.obj_list))) # intialize Cost matrixA (M*N) M - objs in sensor track, N - objs in global track\r\n self.ThresholdMatrix = np.zeros((len(sensor.obj_list), len(\r\n self.globaltrack.obj_list))) # intialize Threshold matrixA (M*N) M - objs in sensor track, N - objs in global track\r\n threshold = chi2.ppf(0.95, 6) # select threshold from chi distribution usinf 2 degrees of freedom\r\n\r\n self.CostMatrixB = np.zeros((len(sensor.obj_list), len(sensor.obj_list))) # intialize Cost matrixB (M*M) M - objs in sensor track\r\n np.fill_diagonal(self.CostMatrixB, threshold)\r\n \r\n for c,globalobj in enumerate(self.globaltrack.obj_list):\r\n \r\n for i, sensorobj in enumerate(sensor.obj_list):\r\n\r\n [scenario,globalxf,globalyf,sensorxf,sensoryf,geometric] = feature_select(globalobj, sensorobj)\r\n\r\n global_association_state = np.array([[globalobj.geometric.x],[globalobj.geometric.y]])\r\n sensor_association_state = np.array([[sensorobj.geometric.x], [sensorobj.geometric.y]])\r\n\r\n global_covariance = np.array ([[globalobj.covariance[0],globalobj.covariance[3]],[globalobj.covariance[18],globalobj.covariance[21]]])\r\n sensor_covariance = np.array([[sensorobj.covariance[0], sensorobj.covariance[3]],[sensorobj.covariance[18], sensorobj.covariance[21]]])\r\n\r\n maha_distance,threshold = statistical_distance(sensor_association_state, global_association_state, sensor_covariance, global_covariance)\r\n\r\n # Maha distance - mahalanobis distance\r\n if maha_distance > threshold:\r\n \r\n maha_distance = 9999\r\n\r\n self.AssociationMatrix[i,c] = maha_distance\r\n self.ThresholdMatrix[i,c] = threshold\r\n self.CostMatrixB[i,i] = threshold\r\n \r\n sensorobjs,globalobjs = np.shape(self.AssociationMatrix)\r\n for i in range(sensorobjs):\r\n for j in range(globalobjs):\r\n if self.AssociationMatrix[i,j] == 9999:\r\n self.CostMatrixA[i, j] = 0\r\n else:\r\n self.CostMatrixA[i,j] = 2*self.ThresholdMatrix[i,j] - self.AssociationMatrix[i,j]\r\n\r\n\r\n self.CostMatrix = np.concatenate((self.CostMatrixA, self.CostMatrixB), axis=1)\r\n self.AssignmentMatrix = fusion.auction_algorithm(self)[0]\r\n if self.breakr == 1:\r\n self.breakr = 0\r\n continue\r\n \r\n self.AssignmentList.append(self.AssignmentMatrix)\r\n for l, asign in enumerate(self.AssignmentMatrix):\r\n try:\r\n sensor.obj_list[l].obj_id = self.globaltrack.obj_list[asign].obj_id\r\n\r\n except:\r\n\r\n sensor.obj_list[l].obj_id = self.globaltrack.obj_list[-1].obj_id +1\r\n\r\n self.globaltrack_predicted = temp_alignment(self.globaltrack, self.egoveh)\r\n \r\n\r\n self.sensorslist_predicted = self.sensorslist\r\n \r\n for m, predicted_sensor in enumerate(self.sensorslist_predicted): # iterate every sensor in sensors list\r\n\r\n \r\n if len(predicted_sensor.obj_list) == 0:\r\n continue\r\n global_ids = [i.obj_id for i in self.globaltrack_predicted.obj_list] # list of global object ids\r\n try:\r\n prev_obj_ids = [j.obj_id for j in self.sensorlist_previous[m].obj_list]\r\n except:\r\n prev_obj_ids = []\r\n for n, predict_obj in enumerate(predicted_sensor.obj_list):\r\n sensor_trust = predicted_sensor.sensor_property.trust_existance #get parameter from launch file\r\n property = predicted_sensor.sensor_property\r\n\r\n if predict_obj.obj_id in global_ids and predict_obj.obj_id in prev_obj_ids: \r\n #FUSION IF BOTH GLOBAL AND PREVIOUS SENSOR TRACK HAVE OBJECT ID // SENSOR OBJECT ALREADY EXIST IN GLOBAL TRACK AND PREVIOUS SENSOR TRACK\r\n l = global_ids.index(predict_obj.obj_id)\r\n glob_pred_obj = self.globaltrack_predicted.obj_list[global_ids.index(predict_obj.obj_id)]\r\n prev_obj = self.sensorlist_previous[m].obj_list[prev_obj_ids.index(predict_obj.obj_id)]\r\n prev_obj_aligned = temp_alignment_obj(prev_obj, self.egoveh,\r\n self.sensorlist_previous[m].sensor_property,self.sensorlist_previous[m])\r\n\r\n Sensor_obj = SensorObject(predict_obj, property)\r\n Sensor_obj.set_existance_probability_mass_factors()\r\n Sensor_obj.set_classification_mass_factors()\r\n\r\n\r\n Global_obj = GlobalObject(glob_pred_obj)\r\n Global_obj.set_existance_probability_mass_factors(sensor_trust)\r\n Global_obj.existance_mass_prediction(0.01)\r\n\r\n\r\n\r\n existance_fusion = ExistanceFusion(Sensor_obj,Global_obj)\r\n existance_fusion.existance_fusion_object_assosiated()\r\n #fused_existance = existance_fusion.fused_probability_existance\r\n classification_fusion = ClassificationFusion(Sensor_obj,Global_obj)\r\n classification_fusion.classification_fusion()\r\n\r\n\r\n # FUSION IF NOT 1ST UPDATE OF THIS PARTICULAR SENSOR IN GLOBAL TRACK\r\n if self.sensorlist_previous[m].sensor_property.sensor_id in glob_pred_obj.sensors_fused:\r\n\r\n\r\n\r\n\r\n\r\n [global_state, global_covariance] = information_matrix_fusion(glob_pred_obj,\r\n prev_obj_aligned,\r\n predict_obj,\r\n predicted_sensor.sensor_property.sensor_id)\r\n\r\n # FUSION IF 1ST UPDATE OF THIS PARTICULAR SENSOR IN GLOBAL TRACK\r\n else:\r\n [global_state, global_covariance] = cross_covarience_recurssion_fusion(glob_pred_obj, predict_obj)\r\n\r\n\r\n self.globaltrack.obj_list[l].prop_existence = existance_fusion.fused_probability_existance\r\n self.globaltrack.obj_list[l].prop_persistance = predict_obj.prop_persistance\r\n\r\n fused_classification_massfactors_list = [classification_fusion.fused_mass_factor_car,\r\n classification_fusion.fused_mass_factor_truck,\r\n classification_fusion.fused_mass_factor_motorcycle,\r\n classification_fusion.fused_mass_factor_bicycle,\r\n classification_fusion.fused_mass_factor_pedestrian,\r\n classification_fusion.fused_mass_factor_stationary,\r\n classification_fusion.fused_mass_factor_vehicle,\r\n classification_fusion.fused_mass_factor_vru,\r\n classification_fusion.fused_mass_factor_traffic,\r\n classification_fusion.fused_mass_factor_statvehicle,\r\n classification_fusion.fused_mass_factor_statvru,\r\n classification_fusion.fused_mass_factor_ignorance]\r\n\r\n self.globaltrack.obj_list[l].classification_mass = fused_classification_massfactors_list\r\n self.globaltrack.obj_list[l].classification.car = classification_fusion.fused_probability_car\r\n self.globaltrack.obj_list[l].classification.truck = classification_fusion.fused_probability_truck\r\n self.globaltrack.obj_list[l].classification.motorcycle = classification_fusion.fused_probability_motorcycle\r\n self.globaltrack.obj_list[l].classification.bicycle= classification_fusion.fused_probability_bicycle\r\n self.globaltrack.obj_list[l].classification.pedestrian = classification_fusion.fused_probability_pedestrian\r\n self.globaltrack.obj_list[l].classification.stacionary = classification_fusion.fused_probability_stationary\r\n self.globaltrack.obj_list[l].classification.car = classification_fusion.fused_probability_car\r\n self.globaltrack.obj_list[l].classification.other = classification_fusion.fused_probability_other\r\n\r\n\r\n self.globaltrack.obj_list[l].geometric.x = float(global_state[0])\r\n self.globaltrack.obj_list[l].geometric.vx = float(global_state[1])\r\n self.globaltrack.obj_list[l].geometric.ax = float(global_state[2])\r\n self.globaltrack.obj_list[l].geometric.y = float(global_state[3])\r\n self.globaltrack.obj_list[l].geometric.vy = float(global_state[4])\r\n self.globaltrack.obj_list[l].geometric.ay = float(global_state[5])\r\n self.globaltrack.obj_list[l].covariance = global_covariance.flatten()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n try:\r\n self.globaltrack.obj_list[l].time = predicted_sensor.header.stamp.to_sec()\r\n except:\r\n self.globaltrack.obj_list[l].time = (rospy.Time.now()).to_sec()\r\n latesttime = self.globaltrack.obj_list[l].time\r\n \r\n #Update sensor id in global track (to check in next iteration)\r\n if predicted_sensor.sensor_property.sensor_id not in self.globaltrack.obj_list[l].sensors_fused:\r\n self.globaltrack.obj_list[l].sensors_fused.append(predicted_sensor.sensor_property.sensor_id)\r\n \r\n \r\n # FUSION IF OBJECT EXIST IN GLOBAL TRACK BUT NOT IN PREVIOUS SENSOR TRACK if 1st update from sensor\r\n elif predict_obj.obj_id in global_ids and predict_obj.obj_id not in prev_obj_ids: \r\n print(\"Update 1st from sensor\")\r\n\r\n l = global_ids.index(predict_obj.obj_id)\r\n\r\n glob_pred_obj = self.globaltrack_predicted.obj_list[l]\r\n\r\n [global_state, global_covariance] = cross_covarience_recurssion_fusion(glob_pred_obj,\r\n predict_obj)\r\n\r\n Sensor_obj = SensorObject(predict_obj, property)\r\n Sensor_obj.set_existance_probability_mass_factors()\r\n Sensor_obj.set_classification_mass_factors()\r\n\r\n Global_obj = GlobalObject(glob_pred_obj)\r\n Global_obj.set_existance_probability_mass_factors(sensor_trust)\r\n Global_obj.existance_mass_prediction(0.01)\r\n\r\n existance_fusion = ExistanceFusion(Sensor_obj, Global_obj)\r\n existance_fusion.existance_fusion_object_assosiated()\r\n # fused_existance = existance_fusion.fused_probability_existance\r\n classification_fusion = ClassificationFusion(Sensor_obj, Global_obj)\r\n classification_fusion.classification_fusion()\r\n\r\n existance_fusion = ExistanceFusion(Sensor_obj, Global_obj)\r\n existance_fusion.existance_fusion_object_assosiated()\r\n\r\n self.globaltrack.obj_list[l].prop_existence = existance_fusion.fused_probability_existance\r\n self.globaltrack.obj_list[l].prop_persistance = predict_obj.prop_persistance\r\n\r\n fused_classification_massfactors_list = [classification_fusion.fused_mass_factor_car,\r\n classification_fusion.fused_mass_factor_truck,\r\n classification_fusion.fused_mass_factor_motorcycle,\r\n classification_fusion.fused_mass_factor_bicycle,\r\n classification_fusion.fused_mass_factor_pedestrian,\r\n classification_fusion.fused_mass_factor_stationary,\r\n classification_fusion.fused_mass_factor_vehicle,\r\n classification_fusion.fused_mass_factor_vru,\r\n classification_fusion.fused_mass_factor_traffic,\r\n classification_fusion.fused_mass_factor_statvehicle,\r\n classification_fusion.fused_mass_factor_statvru,\r\n classification_fusion.fused_mass_factor_ignorance]\r\n\r\n self.globaltrack.obj_list[l].classification_mass = fused_classification_massfactors_list\r\n self.globaltrack.obj_list[l].classification.car = classification_fusion.fused_probability_car\r\n self.globaltrack.obj_list[l].classification.truck = classification_fusion.fused_probability_truck\r\n self.globaltrack.obj_list[\r\n l].classification.motorcycle = classification_fusion.fused_probability_motorcycle\r\n self.globaltrack.obj_list[\r\n l].classification.bicycle = classification_fusion.fused_probability_bicycle\r\n self.globaltrack.obj_list[\r\n l].classification.pedestrian = classification_fusion.fused_probability_pedestrian\r\n self.globaltrack.obj_list[\r\n l].classification.stacionary = classification_fusion.fused_probability_stationary\r\n self.globaltrack.obj_list[l].classification.car = classification_fusion.fused_probability_car\r\n self.globaltrack.obj_list[l].classification.other = classification_fusion.fused_probability_other\r\n\r\n\r\n self.globaltrack.obj_list[l].geometric.x = float(global_state[0])\r\n self.globaltrack.obj_list[l].geometric.vx = float(global_state[1])\r\n self.globaltrack.obj_list[l].geometric.ax = float(global_state[2])\r\n self.globaltrack.obj_list[l].geometric.y = float(global_state[3])\r\n self.globaltrack.obj_list[l].geometric.vy = float(global_state[4])\r\n self.globaltrack.obj_list[l].geometric.ay = float(global_state[5])\r\n self.globaltrack.obj_list[l].covariance = global_covariance.flatten()\r\n try:\r\n self.globaltrack.obj_list[l].time = predicted_sensor.header.stamp.to_sec()\r\n except:\r\n self.globaltrack.obj_list[l].time = (rospy.Time.now()).to_sec()\r\n latesttime = self.globaltrack.obj_list[l].time\r\n if predicted_sensor.sensor_property.sensor_id not in self.globaltrack.obj_list[l].sensors_fused:\r\n self.globaltrack.obj_list[l].sensors_fused.append(predicted_sensor.sensor_property.sensor_id)\r\n #print('latesttime is ', latesttime)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n #FUSION IF NEW OBJECT FROM SENSOR (AS PER ASSOCIATION) \r\n elif predict_obj.obj_id not in global_ids: \r\n print(\"new obj\")\r\n predict_obj.obj_id = self.globaltrack.obj_list[-1].obj_id +1\r\n Sensor_obj = SensorObject(predict_obj, property)\r\n Sensor_obj.set_existance_probability_mass_factors()\r\n Sensor_obj.set_classification_mass_factors()\r\n\r\n\r\n\r\n\r\n self.sensorslist[m].obj_list[n].obj_id = predict_obj.obj_id\r\n self.globaltrack.obj_list.append(predict_obj)\r\n self.globaltrack.obj_list[-1].sensors_fused = []\r\n\r\n self.globaltrack.obj_list[-1].classification_mass = Sensor_obj.list_classification_mass_factor\r\n\r\n if predicted_sensor.sensor_property.sensor_id not in self.globaltrack.obj_list[-1].sensors_fused:\r\n self.globaltrack.obj_list[-1].sensors_fused.append(predicted_sensor.sensor_property.sensor_id)\r\n try:\r\n self.globaltrack.obj_list[-1].time = predicted_sensor.header.stamp.to_sec()\r\n except:\r\n self.globaltrack.obj_list[-1].time =(rospy.Time.now()).to_sec()\r\n\r\n self.globaltrack = evaluate_time(self.globaltrack)\r\n #self.globaltrack.header.stamp = self.sensorslist[0].header.stamp\r\n try:\r\n self.globaltrack.header.stamp = rospy.Time.from_sec(latesttime)\r\n except:\r\n self.globaltrack.header.stamp = rospy.Time.now()\r\n\r\n #print('length is ', len(self.globaltrack.obj_list))\r\n self.sensorlist_previous = self.sensorslist\r\n\r\n"
},
{
"alpha_fraction": 0.7695852518081665,
"alphanum_fraction": 0.774193525314331,
"avg_line_length": 53.25,
"blob_id": "f2a7fe08ffa204aef0200c7166109f55c30e1765",
"content_id": "d3b844a857e4ec14e763a885f07493200e09809c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 217,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 4,
"path": "/build/object_list/catkin_generated/installspace/object_list-msg-paths.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-msg-paths.cmake.installspace.in\n\n_prepend_path(\"${object_list_DIR}/..\" \"msg\" object_list_MSG_INCLUDE_DIRS UNIQUE)\nset(object_list_MSG_DEPENDENCIES std_msgs;geometry_msgs;osi3_bridge)\n"
},
{
"alpha_fraction": 0.778194010257721,
"alphanum_fraction": 0.7786471843719482,
"avg_line_length": 54.63193130493164,
"blob_id": "f3e296c39eadcee0ca0fc0c5bea504a10bafd3b9",
"content_id": "3a22497480633137ce691a7ecc46dd1bddc5dfe0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 33101,
"license_type": "no_license",
"max_line_length": 507,
"num_lines": 595,
"path": "/build/object_list/cmake/object_list-genmsg.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# generated from genmsg/cmake/pkg-genmsg.cmake.em\n\nmessage(STATUS \"object_list: 8 messages, 0 services\")\n\nset(MSG_I_FLAGS \"-Iobject_list:/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg;-Istd_msgs:/opt/ros/melodic/share/std_msgs/cmake/../msg;-Igeometry_msgs:/opt/ros/melodic/share/geometry_msgs/cmake/../msg;-Iosi3_bridge:/home/student/Desktop/Redge_Thesis/vil/src/osi3_bridge/msg\")\n\n# Find all generators\nfind_package(gencpp REQUIRED)\nfind_package(geneus REQUIRED)\nfind_package(genlisp REQUIRED)\nfind_package(gennodejs REQUIRED)\nfind_package(genpy REQUIRED)\n\nadd_custom_target(object_list_generate_messages ALL)\n\n# verify that message/service dependencies have not changed since configure\n\n\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" \"object_list/Features:object_list/Dimension:object_list/Classification:object_list/Geometric\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" \"object_list/SensorProperty:object_list/Features:object_list/ObjectList:object_list/Classification:std_msgs/Header:object_list/Geometric:object_list/Dimension\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" \"\"\n)\n\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_custom_target(_object_list_generate_messages_check_deps_${_filename}\n COMMAND ${CATKIN_ENV} ${PYTHON_EXECUTABLE} ${GENMSG_CHECK_DEPS_SCRIPT} \"object_list\" \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" \"object_list/Geometric:object_list/Dimension:std_msgs/Header\"\n)\n\n#\n# langs = gencpp;geneus;genlisp;gennodejs;genpy\n#\n\n### Section generating for lang: gencpp\n### Generating Messages\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n_generate_msg_cpp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_cpp(object_list\n ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n \"${ALL_GEN_OUTPUT_FILES_cpp}\"\n)\n\nadd_custom_target(object_list_generate_messages_cpp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_cpp}\n)\nadd_dependencies(object_list_generate_messages object_list_generate_messages_cpp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_cpp _object_list_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(object_list_gencpp)\nadd_dependencies(object_list_gencpp object_list_generate_messages_cpp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS object_list_generate_messages_cpp)\n\n### Section generating for lang: geneus\n### Generating Messages\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n_generate_msg_eus(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_eus(object_list\n ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n \"${ALL_GEN_OUTPUT_FILES_eus}\"\n)\n\nadd_custom_target(object_list_generate_messages_eus\n DEPENDS ${ALL_GEN_OUTPUT_FILES_eus}\n)\nadd_dependencies(object_list_generate_messages object_list_generate_messages_eus)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_eus _object_list_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(object_list_geneus)\nadd_dependencies(object_list_geneus object_list_generate_messages_eus)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS object_list_generate_messages_eus)\n\n### Section generating for lang: genlisp\n### Generating Messages\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n_generate_msg_lisp(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_lisp(object_list\n ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n \"${ALL_GEN_OUTPUT_FILES_lisp}\"\n)\n\nadd_custom_target(object_list_generate_messages_lisp\n DEPENDS ${ALL_GEN_OUTPUT_FILES_lisp}\n)\nadd_dependencies(object_list_generate_messages object_list_generate_messages_lisp)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_lisp _object_list_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(object_list_genlisp)\nadd_dependencies(object_list_genlisp object_list_generate_messages_lisp)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS object_list_generate_messages_lisp)\n\n### Section generating for lang: gennodejs\n### Generating Messages\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n_generate_msg_nodejs(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_nodejs(object_list\n ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n \"${ALL_GEN_OUTPUT_FILES_nodejs}\"\n)\n\nadd_custom_target(object_list_generate_messages_nodejs\n DEPENDS ${ALL_GEN_OUTPUT_FILES_nodejs}\n)\nadd_dependencies(object_list_generate_messages object_list_generate_messages_nodejs)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_nodejs _object_list_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(object_list_gennodejs)\nadd_dependencies(object_list_gennodejs object_list_generate_messages_nodejs)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS object_list_generate_messages_nodejs)\n\n### Section generating for lang: genpy\n### Generating Messages\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"${MSG_I_FLAGS}\"\n \"\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n_generate_msg_py(object_list\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"${MSG_I_FLAGS}\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg;/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg;/opt/ros/melodic/share/std_msgs/cmake/../msg/Header.msg\"\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n)\n\n### Generating Services\n\n### Generating Module File\n_generate_module_py(object_list\n ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n \"${ALL_GEN_OUTPUT_FILES_py}\"\n)\n\nadd_custom_target(object_list_generate_messages_py\n DEPENDS ${ALL_GEN_OUTPUT_FILES_py}\n)\nadd_dependencies(object_list_generate_messages object_list_generate_messages_py)\n\n# add dependencies to all check dependencies targets\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\nget_filename_component(_filename \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\" NAME_WE)\nadd_dependencies(object_list_generate_messages_py _object_list_generate_messages_check_deps_${_filename})\n\n# target for backward compatibility\nadd_custom_target(object_list_genpy)\nadd_dependencies(object_list_genpy object_list_generate_messages_py)\n\n# register target for catkin_package(EXPORTED_TARGETS)\nlist(APPEND ${PROJECT_NAME}_EXPORTED_TARGETS object_list_generate_messages_py)\n\n\n\nif(gencpp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gencpp_INSTALL_DIR}/object_list\n DESTINATION ${gencpp_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_cpp)\n add_dependencies(object_list_generate_messages_cpp std_msgs_generate_messages_cpp)\nendif()\nif(TARGET geometry_msgs_generate_messages_cpp)\n add_dependencies(object_list_generate_messages_cpp geometry_msgs_generate_messages_cpp)\nendif()\nif(TARGET osi3_bridge_generate_messages_cpp)\n add_dependencies(object_list_generate_messages_cpp osi3_bridge_generate_messages_cpp)\nendif()\n\nif(geneus_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${geneus_INSTALL_DIR}/object_list\n DESTINATION ${geneus_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_eus)\n add_dependencies(object_list_generate_messages_eus std_msgs_generate_messages_eus)\nendif()\nif(TARGET geometry_msgs_generate_messages_eus)\n add_dependencies(object_list_generate_messages_eus geometry_msgs_generate_messages_eus)\nendif()\nif(TARGET osi3_bridge_generate_messages_eus)\n add_dependencies(object_list_generate_messages_eus osi3_bridge_generate_messages_eus)\nendif()\n\nif(genlisp_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genlisp_INSTALL_DIR}/object_list\n DESTINATION ${genlisp_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_lisp)\n add_dependencies(object_list_generate_messages_lisp std_msgs_generate_messages_lisp)\nendif()\nif(TARGET geometry_msgs_generate_messages_lisp)\n add_dependencies(object_list_generate_messages_lisp geometry_msgs_generate_messages_lisp)\nendif()\nif(TARGET osi3_bridge_generate_messages_lisp)\n add_dependencies(object_list_generate_messages_lisp osi3_bridge_generate_messages_lisp)\nendif()\n\nif(gennodejs_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list)\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${gennodejs_INSTALL_DIR}/object_list\n DESTINATION ${gennodejs_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_nodejs)\n add_dependencies(object_list_generate_messages_nodejs std_msgs_generate_messages_nodejs)\nendif()\nif(TARGET geometry_msgs_generate_messages_nodejs)\n add_dependencies(object_list_generate_messages_nodejs geometry_msgs_generate_messages_nodejs)\nendif()\nif(TARGET osi3_bridge_generate_messages_nodejs)\n add_dependencies(object_list_generate_messages_nodejs osi3_bridge_generate_messages_nodejs)\nendif()\n\nif(genpy_INSTALL_DIR AND EXISTS ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list)\n install(CODE \"execute_process(COMMAND \\\"/usr/bin/python2\\\" -m compileall \\\"${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\\\")\")\n # install generated code\n install(\n DIRECTORY ${CATKIN_DEVEL_PREFIX}/${genpy_INSTALL_DIR}/object_list\n DESTINATION ${genpy_INSTALL_DIR}\n )\nendif()\nif(TARGET std_msgs_generate_messages_py)\n add_dependencies(object_list_generate_messages_py std_msgs_generate_messages_py)\nendif()\nif(TARGET geometry_msgs_generate_messages_py)\n add_dependencies(object_list_generate_messages_py geometry_msgs_generate_messages_py)\nendif()\nif(TARGET osi3_bridge_generate_messages_py)\n add_dependencies(object_list_generate_messages_py osi3_bridge_generate_messages_py)\nendif()\n"
},
{
"alpha_fraction": 0.5311427116394043,
"alphanum_fraction": 0.5610593557357788,
"avg_line_length": 37.39622497558594,
"blob_id": "544c475770bff07eff72ff1a849002e5be3af373",
"content_id": "b00386d6b69eb348c93f5840a5df4cf13d2bf29f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2039,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 53,
"path": "/src/aeb/script/ClassAeb.py",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "import rospy\nimport numpy as np\n\n## Inicialize the Aeb class\nclass Acc:\n def __init__(self):\n self.fw= abs(rospy.get_param(\"fw_acc\")) #2.5 ## Deceleration provided by a normal driver [m/s^2]\n self.stage1 = abs(rospy.get_param(\"stg1_acc\")) #2.0 ## Deceleration provided by the first severity stage [m/s^2]\n self.stage2 = abs(rospy.get_param(\"stg2_acc\")) #3.0 ## Deceleration provided by the second severity stage [m/s^2]\n self.stage3 = abs(rospy.get_param(\"stg3_acc\")) #5.0 ## Deceleration provided by the third severity stage [m/s^2]\n\nclass React:\n def __init__(self):\n self.driver = rospy.get_param(\"driver_react\") #1.2 ## Reaction time of a normal driver [s]\n self.system = rospy.get_param(\"aeb_react\") #0.5 ## Reaction time of the aeb system [s] ######## Ask Varun\n\nclass Stoptime:\n def __init__(self):\n self.fw = 0.0\n self.stage1 = 0.0\n self.stage2 = 0.0\n self.stage3 = 0.0\n\nclass Aeb:\n def __init__(self):\n self.acc = Acc()\n self.react = React()\n self.offset = rospy.get_param(\"offsetx\") #3.7 ## Offset kept as safety between the follower and the leader [m]\n self.ttc = 0.0 ## start as null\n self.stoptime = Stoptime()\n self.des_vel = rospy.get_param(\"des_vel\")/3.6\n self.final_time = rospy.get_param(\"max_traj_time\")\n self.time_step = 1 / rospy.get_param(\"freq\")\n self.amount_data = int(self.final_time / self.time_step) ## Calculate the amount of data to the trajectory\n self.timesteparray = np.linspace(self.time_step, self.final_time, num=self.amount_data) ##\n self.x = 0\n self.y = 0\n self.yaw = 0\n self.status = 0\n self.last_status = 0\n\n\nclass Features:\n def __init__(self):\n\n self.FL = 0.0\n self.FM = 0.0\n self.FR = 0.0\n self.MR = 0.0\n self.RR = 0.0\n self.RM = 0.0\n self.RL = 0.0\n self.ML = 0.0\n\n\n\n\n"
},
{
"alpha_fraction": 0.7657382488250732,
"alphanum_fraction": 0.7676459550857544,
"avg_line_length": 52.479591369628906,
"blob_id": "ced4f6aebb11c8795abe6a5e9e1e539d3a96036b",
"content_id": "614a3b75a6842cf909a5a52ad3cf9a6d0bab1027",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 5242,
"license_type": "no_license",
"max_line_length": 211,
"num_lines": 98,
"path": "/build/object_list/cmake_install.cmake",
"repo_name": "RedgeCastelino/Master_thesis_shared",
"src_encoding": "UTF-8",
"text": "# Install script for directory: /home/student/Desktop/Redge_Thesis/vil/src/object_list\n\n# Set the install prefix\nif(NOT DEFINED CMAKE_INSTALL_PREFIX)\n set(CMAKE_INSTALL_PREFIX \"/home/student/Desktop/Redge_Thesis/vil/install\")\nendif()\nstring(REGEX REPLACE \"/$\" \"\" CMAKE_INSTALL_PREFIX \"${CMAKE_INSTALL_PREFIX}\")\n\n# Set the install configuration name.\nif(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME)\n if(BUILD_TYPE)\n string(REGEX REPLACE \"^[^A-Za-z0-9_]+\" \"\"\n CMAKE_INSTALL_CONFIG_NAME \"${BUILD_TYPE}\")\n else()\n set(CMAKE_INSTALL_CONFIG_NAME \"Release\")\n endif()\n message(STATUS \"Install configuration: \\\"${CMAKE_INSTALL_CONFIG_NAME}\\\"\")\nendif()\n\n# Set the component getting installed.\nif(NOT CMAKE_INSTALL_COMPONENT)\n if(COMPONENT)\n message(STATUS \"Install component: \\\"${COMPONENT}\\\"\")\n set(CMAKE_INSTALL_COMPONENT \"${COMPONENT}\")\n else()\n set(CMAKE_INSTALL_COMPONENT)\n endif()\nendif()\n\n# Install shared libraries without execute permission?\nif(NOT DEFINED CMAKE_INSTALL_SO_NO_EXE)\n set(CMAKE_INSTALL_SO_NO_EXE \"1\")\nendif()\n\n# Is this installation the result of a crosscompile?\nif(NOT DEFINED CMAKE_CROSSCOMPILING)\n set(CMAKE_CROSSCOMPILING \"FALSE\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/object_list/msg\" TYPE FILE FILES\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Classification.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Dimension.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Features.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/Geometric.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectList.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/ObjectsList.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/EgoData.msg\"\n \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/msg/SensorProperty.msg\"\n )\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/object_list/cmake\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/build/object_list/catkin_generated/installspace/object_list-msg-paths.cmake\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/include\" TYPE DIRECTORY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/include/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/roseus/ros\" TYPE DIRECTORY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/share/roseus/ros/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/common-lisp/ros\" TYPE DIRECTORY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/share/common-lisp/ros/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/gennodejs/ros\" TYPE DIRECTORY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/share/gennodejs/ros/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n execute_process(COMMAND \"/usr/bin/python2\" -m compileall \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/python2.7/dist-packages\" TYPE DIRECTORY FILES \"/home/student/Desktop/Redge_Thesis/vil/devel/lib/python2.7/dist-packages/object_list\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/lib/pkgconfig\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/build/object_list/catkin_generated/installspace/object_list.pc\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/object_list/cmake\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/build/object_list/catkin_generated/installspace/object_list-msg-extras.cmake\")\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/object_list/cmake\" TYPE FILE FILES\n \"/home/student/Desktop/Redge_Thesis/vil/build/object_list/catkin_generated/installspace/object_listConfig.cmake\"\n \"/home/student/Desktop/Redge_Thesis/vil/build/object_list/catkin_generated/installspace/object_listConfig-version.cmake\"\n )\nendif()\n\nif(\"x${CMAKE_INSTALL_COMPONENT}x\" STREQUAL \"xUnspecifiedx\" OR NOT CMAKE_INSTALL_COMPONENT)\n file(INSTALL DESTINATION \"${CMAKE_INSTALL_PREFIX}/share/object_list\" TYPE FILE FILES \"/home/student/Desktop/Redge_Thesis/vil/src/object_list/package.xml\")\nendif()\n\n"
}
] | 111 |
JamesT-W/demoinfo | https://github.com/JamesT-W/demoinfo | cde12914e4945d189aa3e60d304347aa96c25a2a | 09ff32d5e67035307a824c201e787a527d8e1918 | 61c400e940f4ce694bf442478c36b2a709aabaca | refs/heads/master | 2020-06-17T23:11:01.743576 | 2020-04-02T17:37:51 | 2020-04-02T17:37:51 | 196,094,593 | 0 | 1 | MIT | 2019-07-09T22:51:24 | 2019-07-07T15:30:48 | 2019-03-18T12:56:30 | null | [
{
"alpha_fraction": 0.7230769395828247,
"alphanum_fraction": 0.7538461685180664,
"avg_line_length": 31.5,
"blob_id": "6e819db04efea2956a98ad159924219137e0e5e3",
"content_id": "01e752535a43a62e7772e8b3eebf9b532e07434c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 260,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 8,
"path": "/ci/profile.sh",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "set -eu\n\ndemofile=\"$1\"\noutput_pipe=\"$2\"\n\n# Release, with profiling\nmono --optimize=all \"--profile=log:alloc,calls,heapshot=10000ms,time=fast,maxframes=3,output=|./ci/analyze.sh $output_pipe\" \\\n DevNullPlayer/bin/Release/DevNullPlayer.exe \"testdemos/$demofile\"\n"
},
{
"alpha_fraction": 0.5801093578338623,
"alphanum_fraction": 0.5870479345321655,
"avg_line_length": 35.030303955078125,
"blob_id": "092e8acddb5250ac19be33def7fb45f6edad8469",
"content_id": "8aff219669085858c612caa2d2852c7559099eaa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4756,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 132,
"path": "/ci/brofiler.py",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "from __future__ import nested_scopes, generators, division, absolute_import, with_statement, print_function, unicode_literals\nimport json, base64, os, subprocess, select, requests, sys\n\nGH_TOKEN = os.environ['STATUS_APIKEY']\nCOMMIT = os.environ['TRAVIS_COMMIT']\nTESTDATA = 'testdemos'\n\ndef create_gist(text):\n res = requests.post('https://api.github.com/gists', data=json.dumps({\n \"description\": \"Autogenerated by brofiler.py\",\n \"public\": True,\n \"files\": {\n \"report.txt\": {\n \"content\": text\n }\n }\n }).encode('utf-8'))\n return json.loads(res.text)['files']['report.txt']['raw_url']\n\ndef set_status(sha, state, desc, ctx, url=None):\n request = {\n 'state': state,\n 'description': desc,\n 'context': 'profiling/' + ctx\n }\n\n if url:\n request['target_url'] = url\n\n res = requests.post('https://api.github.com/repos/moritzuehling/demoinfo-public/statuses/' + sha,\n headers={'Authorization': 'token ' + GH_TOKEN}, data=json.dumps(request).encode('utf-8'))\n return res.text\n\ndef invoke(script, dem, report_progress=False):\n pipe_rfd, pipe_wfd = os.pipe()\n p = subprocess.Popen(\n ['/bin/bash', script, dem, str(pipe_wfd)],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=False,\n close_fds=False,\n preexec_fn = lambda: os.close(pipe_rfd),\n )\n\n pipe_chunks = []\n stdout_chunks = []\n stderr_chunks = []\n stdout_rfd, stderr_rfd = p.stdout.fileno(), p.stderr.fileno()\n pending = [pipe_rfd, stdout_rfd, stderr_rfd]\n while len(pending) > 1:\n rready, _, _ = select.select(pending, [], [])\n fd = rready[0]\n chunk = os.read(fd, 4096)\n if report_progress and fd is pipe_rfd:\n print('\\r%03d%%' % (int(chunk),), end='')\n sys.stdout.flush()\n elif len(chunk) == 0:\n # end of stream\n pending.remove(fd)\n else:\n (pipe_chunks if fd is pipe_rfd else stdout_chunks if fd is stdout_rfd else stderr_chunks).append(chunk.decode('utf-8'))\n retval = p.wait()\n print('\\r%s return value %d' % (dem, retval))\n err_text = ''.join(stderr_chunks)\n out_text = ''.join(stdout_chunks)\n pipe_text = ''.join(pipe_chunks)\n return retval, out_text, err_text, pipe_text\n\nSEP_LEN = 25\nSEP_CHAR = '-'\ndef maybe_append(buf, piece, title):\n if len(piece) > 0:\n req_sep = SEP_LEN - (len(title) + 2)\n buf += '\\n' + (SEP_CHAR * (req_sep // 2)) + ' ' + title + ' ' + (SEP_CHAR * ((req_sep + 1) // 2)) + '\\n'\n buf += piece\n buf += '\\n' + (SEP_CHAR * SEP_LEN) + '\\n'\n return buf\n\ndemos = [dem for dem in os.listdir(TESTDATA) if dem.endswith('.dem')]\n\n\nif sys.argv[1] == 'cleanup':\n for dem in demos:\n set_status(COMMIT, 'error', '???', dem)\n sys.exit(0)\nelif sys.argv[1] == 'verify':\n how_many_failures = 0\n # start by setting all of them to Preparing\n for dem in demos:\n set_status(COMMIT, 'pending', 'Preparing', dem)\n\n # now run verification\n for dem in demos:\n retval, out_text, err_text, _ = invoke('ci/verify.sh', dem, True) # pipe not in use\n if retval == 0 and not out_text and not err_text:\n set_status(COMMIT, 'pending', 'Verify success', dem)\n else:\n how_many_failures += 1\n gistlink = create_gist(maybe_append(maybe_append(\n 'return code ' + str(retval), err_text, 'stderr'), out_text, 'stdout'))\n set_status(COMMIT, 'failure', 'Verification failed', dem, gistlink)\n print('Failure info posted to: ' + gistlink)\n os.unlink(TESTDATA + '/' + dem)\n\n sys.exit(how_many_failures)\nelif sys.argv[1] != 'run':\n raise ValueError('Illegal parameter')\n\n\n\nfailure_count = 0\n# now actually run profiling\nfor dem in demos:\n set_status(COMMIT, 'pending', 'Running', dem)\n retval, out_text, err_text, pipe_text = invoke('ci/profile.sh', dem)\n os.unlink(TESTDATA + '/' + dem)\n\n gist_text = '' if retval == 0 else 'return code %d' % (retval,)\n pipe_text = '\\n'.join([x for x in pipe_text.split('\\n') if not x.startswith('unmatched leave at stack pos')])\n gist_text = maybe_append(gist_text, err_text, 'stderr')\n gist_text = maybe_append(gist_text, out_text, 'stdout')\n if gist_text:\n gist_text = maybe_append(gist_text, pipe_text, 'results')\n else:\n gist_text = pipe_text # no need for bars if it's the only output\n gistlink = create_gist(gist_text)\n print('Profiling results posted to: ' + gistlink)\n set_status(COMMIT, 'success' if retval is 0 else 'failure', 'Completed', dem, gistlink)\n if retval != 0:\n failure_count += 1\n\nsys.exit(failure_count)\n"
},
{
"alpha_fraction": 0.6769230961799622,
"alphanum_fraction": 0.6769230961799622,
"avg_line_length": 15.25,
"blob_id": "9a1d6c742c2f026a90ddb0d41fd90629b89a9b41",
"content_id": "47cd1c21458c11f18c519a16dbbf862754a72bcc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 65,
"license_type": "permissive",
"max_line_length": 43,
"num_lines": 4,
"path": "/ci/nuget_set_api_key.sh",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nset -eu\n\nnuget setApiKey \"$NUGET_APIKEY\" > /dev/null\n"
},
{
"alpha_fraction": 0.671999990940094,
"alphanum_fraction": 0.7400000095367432,
"avg_line_length": 49,
"blob_id": "d7fd7ec46d67e1901666662e4b4bec3a9a2072d8",
"content_id": "6bd6785c587589609302c8e39c0cb1a562147bd3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 250,
"license_type": "permissive",
"max_line_length": 109,
"num_lines": 5,
"path": "/ci/analyze.sh",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n# adapted from http://stackoverflow.com/a/18686955/1590632\nout_fd=\"$1\"\nmprof-report --method-sort=self --traces --maxframes=3 \\\n --reports=header,jit,gc,thread,monitor,metadata,exception,sample,call,alloc,heapshot,counters - >&\"$out_fd\"\n"
},
{
"alpha_fraction": 0.6139944195747375,
"alphanum_fraction": 0.6232622861862183,
"avg_line_length": 27.773332595825195,
"blob_id": "a5533ab87baa2f8512e295e365b7e6cd96674160",
"content_id": "33d9b79b5ce871c03166530cc7f2fb00099c3daa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C#",
"length_bytes": 2160,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 75,
"path": "/DevNullPlayer/Program.cs",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "using System;\nusing System.IO;\nusing DemoInfo;\nusing System.Diagnostics;\nusing System.Collections.Generic;\n\nnamespace DevNullPlayer\n{\n\tclass MainClass\n\t{\n\t\tpublic static void Main(string[] args)\n\t\t{\n\n\t\t\tusing (var input = File.OpenRead(args[0])) {\n\t\t\t\tvar parser = new DemoParser(input);\n\t\t\t\t\n\t\t\t\tparser.ParseHeader ();\n\n\t\t\t\t#if DEBUG\n\t\t\t\tDictionary<Player, int> failures = new Dictionary<Player, int>();\n\t\t\t\tparser.TickDone += (sender, e) => {\n\t\t\t\t\t//Problem: The HP coming from CCSPlayerEvent are sent 1-4 ticks later\n\t\t\t\t\t//I guess this is because the think()-method of the CCSPlayerResource isn't called\n\t\t\t\t\t//that often. Haven't checked though.\n\t\t\t\t\tforeach(var p in parser.PlayingParticipants)\n\t\t\t\t\t{\n\t\t\t\t\t\t//Make sure the array is never empty ;)\n\t\t\t\t\t\tfailures[p] = failures.ContainsKey(p) ? failures[p] : 0;\n\n\t\t\t\t\t\tif(p.HP == p.AdditionaInformations.ScoreboardHP)\n\t\t\t\t\t\t\tfailures[p] = 0;\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tfailures[p]++; //omg this is hacky. \n\n\t\t\t\t\t\t//Okay, if it's wrong 2 seconds in a row, something's off\n\t\t\t\t\t\t//Since there should be a tick where it's right, right?\n\t\t\t\t\t\t//And if there's something off (e.g. two players are swapped)\n\t\t\t\t\t\t//there will be 2 seconds of ticks where it's wrong\n\t\t\t\t\t\t//So no problem here :)\n\t\t\t\t\t\tDebug.Assert(\n\t\t\t\t\t\t\tfailures[p] < parser.TickRate * 2, \n\t\t\t\t\t\t\tstring.Format(\n\t\t\t\t\t\t\t\t\"The player-HP({0}) of {2} (Clan: {3}) and it's Scoreboard HP ({1}) didn't match for {4} ticks. \", \n\t\t\t\t\t\t\t\tp.HP, p.AdditionaInformations.ScoreboardHP, p.Name, p.AdditionaInformations.Clantag, parser.TickRate * 2\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t);\n\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\t\n\n\t\t\t\tif (args.Length >= 2) {\n\t\t\t\t\t// progress reporting requested\n\t\t\t\t\tusing (var progressFile = File.OpenWrite(args[1]))\n\t\t\t\t\tusing (var progressWriter = new StreamWriter(progressFile) { AutoFlush = false }) {\n\t\t\t\t\t\tint lastPercentage = -1;\n\t\t\t\t\t\twhile (parser.ParseNextTick()) {\n\t\t\t\t\t\t\tvar newProgress = (int)(parser.ParsingProgess * 100);\n\t\t\t\t\t\t\tif (newProgress != lastPercentage) {\n\t\t\t\t\t\t\t\tprogressWriter.Write(lastPercentage = newProgress);\n\t\t\t\t\t\t\t\tprogressWriter.Flush();\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t#endif\n\t\t\t\t\n\t\t\t\tparser.ParseToEnd();\n\t\t\t}\n\t\t}\n\t}\n}\n"
},
{
"alpha_fraction": 0.6711041331291199,
"alphanum_fraction": 0.6773688197135925,
"avg_line_length": 21.38596534729004,
"blob_id": "1a01ce2917a414e65e877050527f7cac056c38d5",
"content_id": "f3fad2c9d442bbe7d836eaf6a84861ee1cd0adc1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C#",
"length_bytes": 1279,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 57,
"path": "/Testing/AwkwardStream.cs",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "using System;\nusing System.IO;\n\nnamespace Testing\n{\n\tpublic class AwkwardStream : Stream\n\t{\n\t\tprivate readonly Stream Underlying;\n\t\tprivate readonly Random Rng;\n\n\t\tpublic AwkwardStream(Stream underlying, Random rng)\n\t\t{\n\t\t\tUnderlying = underlying;\n\t\t\tRng = rng;\n\t\t}\n\n\t\tpublic override int Read(byte[] buffer, int offset, int count)\n\t\t{\n\t\t\t// 50% of all reads will return 1-4 bytes.\n\t\t\treturn Underlying.Read(buffer, offset, Rng.Next((Rng.Next(1) == 0) ? 4 : count) + 1);\n\t\t}\n\n\t\t#region Unsupported stuff\n\t\tpublic override void Flush()\n\t\t{\n\t\t}\n\t\tpublic override long Seek(long offset, SeekOrigin origin)\n\t\t{\n\t\t\tthrow new NotSupportedException();\n\t\t}\n\t\tpublic override void SetLength(long value)\n\t\t{\n\t\t\tthrow new NotSupportedException();\n\t\t}\n\t\tpublic override void Write(byte[] buffer, int offset, int count)\n\t\t{\n\t\t\tthrow new NotSupportedException();\n\t\t}\n\t\tpublic override bool CanRead { get { return true; } }\n\t\tpublic override bool CanSeek { get { return false; } }\n\t\tpublic override bool CanWrite { get { return false; } }\n\t\tpublic override long Length {\n\t\t\tget {\n\t\t\t\tthrow new NotSupportedException();\n\t\t\t}\n\t\t}\n\t\tpublic override long Position {\n\t\t\tget {\n\t\t\t\tthrow new NotSupportedException();\n\t\t\t}\n\t\t\tset {\n\t\t\t\tthrow new NotSupportedException();\n\t\t\t}\n\t\t}\n\t\t#endregion\n\t}\n}\n\n"
},
{
"alpha_fraction": 0.7351351380348206,
"alphanum_fraction": 0.745945930480957,
"avg_line_length": 25.428571701049805,
"blob_id": "68adadb53cb24100b325f56fa920b1b1b81289b7",
"content_id": "8add89a64e5a5848b8132cc7ca585cd6c9f92abe",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 185,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 7,
"path": "/ci/verify.sh",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "set -eu\n\ndemofile=\"$1\"\noutput_pipe=\"$2\"\n\n# Bitstream-Debugging, no profiling.\nmono --optimize=all DevNullPlayer/bin/Debug/DevNullPlayer.exe \"testdemos/$demofile\" \"/dev/fd/$output_pipe\"\n"
},
{
"alpha_fraction": 0.6430816054344177,
"alphanum_fraction": 0.6719037890434265,
"avg_line_length": 24.076086044311523,
"blob_id": "41207346d6185522784a56bc03eb02028bc72e81",
"content_id": "be883d1da1c4677fb4236b40a8ace365743df565",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C#",
"length_bytes": 9231,
"license_type": "permissive",
"max_line_length": 135,
"num_lines": 368,
"path": "/Testing/TestBitstreams.cs",
"repo_name": "JamesT-W/demoinfo",
"src_encoding": "UTF-8",
"text": "using NUnit.Framework;\nusing System;\nusing System.IO;\nusing System.Linq;\n\nusing DemoInfo;\nusing DemoInfo.BitStreamImpl;\nusing System.Collections.Generic;\n\nnamespace Testing\n{\n\t[TestFixture]\n\tpublic class TestBitstreams\n\t{\n\t\tprivate Random rng;\n\t\tprivate byte[] data;\n\t\tprivate IBitStream dbgAll;\n\n\t\tprivate IBitStream CreateBS(byte[] data)\n\t\t{\n\t\t\tIBitStream managed = new ManagedBitStream(), @unsafe = new UnsafeBitStream();\n\t\t\tmanaged.Initialize(new AwkwardStream(new MemoryStream(data), rng));\n\t\t\[email protected](new AwkwardStream(new MemoryStream(data), rng));\n\t\t\treturn new DebugBitStream(new BitArrayStream(data), new DebugBitStream(managed, @unsafe));\n\t\t}\n\n\t\t[SetUp]\n\t\tpublic void Init()\n\t\t{\n\t\t\trng = new Random(1337);\n\t\t\tdata = new byte[128 * 1024]; // 128K\n\t\t\trng.NextBytes(data);\n\n\t\t\tdbgAll = CreateBS(data);\n\t\t}\n\n\t\t[TearDown]\n\t\tpublic void Dispose()\n\t\t{\n\t\t\trng = null;\n\t\t\tdata = null;\n\t\t\tdbgAll.Dispose();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestReadInt()\n\t\t{\n\t\t\tint bitOffset = 0;\n\t\t\tint totalBits = data.Length * 8;\n\n\t\t\twhile (bitOffset < totalBits) {\n\t\t\t\tint thisTime = Math.Min(rng.Next(32) + 1, totalBits - bitOffset);\n\t\t\t\tdbgAll.ReadInt(thisTime);\n\t\t\t\tbitOffset += thisTime;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestReadSignedInt()\n\t\t{\n\t\t\tint bitOffset = 0;\n\t\t\tint totalBits = data.Length * 8;\n\n\t\t\twhile (bitOffset < totalBits) {\n\t\t\t\tint thisTime = Math.Min(rng.Next(32) + 1, totalBits - bitOffset);\n\t\t\t\tdbgAll.ReadSignedInt(thisTime);\n\t\t\t\tbitOffset += thisTime;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestReadByte()\n\t\t{\n\t\t\tint bitOffset = 0;\n\t\t\tint totalBits = data.Length * 8;\n\n\t\t\twhile (bitOffset < totalBits) {\n\t\t\t\tint thisTime = Math.Min(rng.Next(8) + 1, totalBits - bitOffset);\n\t\t\t\tdbgAll.ReadByte(thisTime);\n\t\t\t\tbitOffset += thisTime;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestReadBytes()\n\t\t{\n\t\t\tint offset = 0;\n\t\t\twhile (offset < data.Length) {\n\t\t\t\tint thisTime = rng.Next(data.Length - offset) + 1;\n\t\t\t\tdbgAll.ReadBytes(thisTime);\n\t\t\t\toffset += thisTime;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestReadBits()\n\t\t{\n\t\t\tint bitOffset = 0;\n\t\t\tint totalBits = data.Length * 8;\n\n\t\t\twhile (bitOffset < totalBits) {\n\t\t\t\tint thisTime = Math.Min(rng.Next(512) + 1, totalBits - bitOffset);\n\t\t\t\tdbgAll.ReadBits(thisTime);\n\t\t\t\tbitOffset += thisTime;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestVarintDecodingPositive()\n\t\t{\n\t\t\tAssert.AreEqual(0, CreateBS(new byte[] { 0 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(1, CreateBS(new byte[] { 1 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(150, CreateBS(new byte[] { 0x96, 0x01 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(300, CreateBS(new byte[] { 172, 2 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(200000000, CreateBS(new byte[] { 0x80, 0x84, 0xaf, 0x5f }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(2000000000, CreateBS(new byte[] { 0x80, 0xa8, 0xd6, 0xb9, 7 }).ReadProtobufVarInt());\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestVarintDecodingNegative()\n\t\t{\n\t\t\tAssert.AreEqual(-1, CreateBS(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 1 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(-200000000, CreateBS(new byte[] { 0x80, 0xfc, 0xd0, 0xa0, 0xff, 0xff, 0xff, 0xff, 0xff, 1 }).ReadProtobufVarInt());\n\t\t\tAssert.AreEqual(-2000000000, CreateBS(new byte[] { 0x80, 0xd8, 0xa9, 0xc6, 0xf8, 0xff, 0xff, 0xff, 0xff, 1 }).ReadProtobufVarInt());\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestBasicChunking()\n\t\t{\n\t\t\tAssert.AreEqual(data.First(), dbgAll.ReadByte());\n\t\t\tdbgAll.BeginChunk((128 * 1024 - 2) * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadBytes(128 * 1024 - 2);\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.AreEqual(data.Last(), dbgAll.ReadByte());\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingSmall()\n\t\t{\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadByte();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingPartial()\n\t\t{\n\t\t\tdbgAll.BeginChunk(2);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadByte(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadByte();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingLarge()\n\t\t{\n\t\t\tdbgAll.BeginChunk(4097 * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadByte();\n\n\t\t\tdbgAll.BeginChunk(4096 * 8);\n\t\t\tdbgAll.ReadByte();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadByte();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingLargePartial()\n\t\t{\n\t\t\tdbgAll.BeginChunk(8193 * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadBytes(4097);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadByte();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingRandom()\n\t\t{\n\t\t\tint bitOffset = 0;\n\t\t\tint totalBits = data.Length * 8;\n\n\t\t\twhile (bitOffset < totalBits - 16) {\n\t\t\t\tint thisTime = Math.Min(rng.Next(4096) + 16, totalBits - bitOffset - 8);\n\t\t\t\tdbgAll.BeginChunk(thisTime);\n\t\t\t\tdbgAll.ReadByte();\n\t\t\t\tdbgAll.EndChunk();\n\t\t\t\tdbgAll.ReadByte();\n\t\t\t\tbitOffset += thisTime + 8;\n\t\t\t}\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkSkippingRandomExhaustive()\n\t\t{\n\t\t\ttry {\n\t\t\t\tTestChunkSkippingRandom();\n\t\t\t} catch (Exception) {\n\t\t\t\tAssert.Inconclusive(\"Go fix TestChunkSkippingRandom()! I'll wait here for you!\");\n\t\t\t}\n\n\t\t\ttry { dbgAll.ReadBit(); }\n\t\t\tcatch (Exception) {\n\t\t\t\t// everything fine\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tAssert.Fail(\"Should have thrown\");\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkNesting()\n\t\t{\n\t\t\tdbgAll.BeginChunk(3);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadBit();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadBit();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkNestingComplex()\n\t\t{\n\t\t\tdbgAll.BeginChunk(5);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(4);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(3);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(2);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(1);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkNestingCompletely()\n\t\t{\n\t\t\tdbgAll.BeginChunk(8192 * 8);\n\t\t\tdbgAll.BeginChunk(4096 * 8);\n\t\t\tdbgAll.BeginChunk(4096 * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadBytes(4096);\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadBytes(4096);\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkNestingRandom()\n\t\t{\n\t\t\tStack<int> remainingStack = new Stack<int>();\n\t\t\tint depth = 0, remaining = data.Length * 8 - 1;\n\t\t\tdbgAll.BeginChunk(remaining);\n\n\t\t\twhile ((remainingStack.Count > 0) || (remaining > 0)) {\n\t\t\t\tswitch (rng.Next(2 + ((remainingStack.Count > 0) ? 1 : 0))) {\n\t\t\t\tcase 0: // begin new chunk\n\t\t\t\t\tint chunksize = Math.Min(rng.Next(5000 * 8), remaining);\n\t\t\t\t\tdbgAll.BeginChunk(chunksize);\n\t\t\t\t\tremainingStack.Push(remaining - chunksize);\n\t\t\t\t\tremaining = chunksize;\n\t\t\t\t\tbreak;\n\t\t\t\tcase 1: // read stuff\n\t\t\t\t\tint blocksize = Math.Min(rng.Next(5000 * 8), remaining);\n\t\t\t\t\tdbgAll.ReadBits(blocksize);\n\t\t\t\t\tremaining -= blocksize;\n\t\t\t\t\tbreak;\n\t\t\t\tcase 2: // end current chunk\n\t\t\t\t\tdbgAll.EndChunk();\n\t\t\t\t\tremaining = remainingStack.Pop();\n\t\t\t\t\tbreak;\n\t\t\t\tdefault:\n\t\t\t\t\tthrow new NotImplementedException();\n\t\t\t\t}\n\t\t\t}\n\t\t\t// tear down current depth\n\t\t\tfor (int i = 0; i < depth; i++)\n\t\t\t\tdbgAll.EndChunk();\n\n\t\t\tdbgAll.EndChunk();\n\t\t\tdbgAll.ReadBit();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestChunkNestingRandomExhaustive()\n\t\t{\n\t\t\ttry {\n\t\t\t\tTestChunkNestingRandom();\n\t\t\t} catch (Exception) {\n\t\t\t\tAssert.Inconclusive(\"Go fix TestChunkNestingRandom()! I'll wait here for you!\");\n\t\t\t}\n\n\t\t\ttry { dbgAll.ReadBit(); }\n\t\t\tcatch (Exception) {\n\t\t\t\t// everything all right\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tAssert.Fail(\"Should have thrown\");\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestStreamEndChunkBoundary()\n\t\t{\n\t\t\tdbgAll.BeginChunk(data.Length * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.BeginChunk(data.Length * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t}\n\n\t\t[Test]\n\t\tpublic void TestStreamEndChunkBoundaryData()\n\t\t{\n\t\t\tdbgAll.BeginChunk(data.Length * 8);\n\t\t\tAssert.IsFalse(dbgAll.ChunkFinished);\n\t\t\tdbgAll.ReadBytes(data.Length);\n\t\t\tAssert.IsTrue(dbgAll.ChunkFinished);\n\t\t\tdbgAll.EndChunk();\n\t\t}\n\t}\n}\n\n"
}
] | 8 |
PinkytheDev/MagicNoobBot | https://github.com/PinkytheDev/MagicNoobBot | ec016039e9e23d038fe6117047b14cee325cf5d0 | acfef73af06df6994e5eb3e02bc3d6c45a9f6e98 | 961750bf774214cd1711bc48bc1c92da9c100760 | refs/heads/master | 2020-04-22T14:24:47.462607 | 2019-05-22T22:45:44 | 2019-05-22T22:45:44 | 170,442,617 | 2 | 4 | null | null | null | null | null | [
{
"alpha_fraction": 0.828125,
"alphanum_fraction": 0.828125,
"avg_line_length": 9.666666984558105,
"blob_id": "addb0fc3f60292d075df79038f759ee499d07a9f",
"content_id": "02ec31a1ee7cba0912bc5ff974ea3390f5e2bba9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 64,
"license_type": "no_license",
"max_line_length": 17,
"num_lines": 6,
"path": "/requirements.txt",
"repo_name": "PinkytheDev/MagicNoobBot",
"src_encoding": "UTF-8",
"text": "discord.py\ndiscord.py[voice]\nyoutube_dl\nrequests\nasyncio\nPyNaCl\n"
},
{
"alpha_fraction": 0.6833255290985107,
"alphanum_fraction": 0.6855441331863403,
"avg_line_length": 30.71851921081543,
"blob_id": "d3af6b2eee84b2c2f1c1863ec755ad0a5242fa40",
"content_id": "31543d5fb1fa64c81ff0c24469e99f570b001121",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8564,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 270,
"path": "/prefixbot.py",
"repo_name": "PinkytheDev/MagicNoobBot",
"src_encoding": "UTF-8",
"text": "import discord\nimport youtube_dl\nimport requests\nimport random\nimport asyncio\nimport time\nimport aiohttp\nimport json\nimport os\nfrom discord import Game\nfrom discord.ext import commands\n\nclient = commands.Bot(command_prefix='.m')\nclient.remove_command('help')\n\nplayers = {}\nqueues = {}\n\ndef check_queue(id):\n if queues[id] != []:\n player = queues[id].pop(0)\n players[id] = player\n player.start()\n\[email protected]\nasync def on_ready():\n print('The bot is ready!')\n print('Logged in as')\n print(client.user.name)\n servers = client.servers\n await client.change_presence(game=discord.Game(name='over {} servers | .mhelp'.format(len(servers)),type = 3))\n\[email protected]\nasync def on_message(message):\n author = message.author\n content = message.content\n print('{}: {}'.format(author, content))\n await client.process_commands(message)\n\n\[email protected]\nasync def on_member_join(member):\n roles = discord.utils.get(member.server.roles, name='Members')\n await client.add_roles(member, role)\n await client.say(\n \"Hey! Server Owner. But if you don't have **Members** role. Then, I prefer you to add it to make this function work.\")\n await client.process_commands(message)\n \n\t\t\[email protected]\nasync def on_message_delete(message):\n author = message.author\n content = message.content\n channel = message.channel\n print('Deleted Message > {}: {}'.format(author, content))\n await client.process_commands(message)\n\n\[email protected]\nasync def on_reaction_add(reaction, user):\n channel = reaction.message.channel\n await client.send_message(channel, '{} has added {} to the message: {}'.format(user.name, reaction.emoji ,reaction.message.content))\n await client.process_commands(message)\n\n\[email protected]\nasync def on_reaction_remove(reaction, user):\n channel = reaction.message.channel\n await client.send_message(channel, '{} has removed {} from the message: {}'.format(user.name, reaction.emoji, reaction.message.content))\n await client.process_commands(message)\n\[email protected](pass_context=True)\nasync def ping():\n await client.send_message(f\"Pong, {round(client.latency * 1000)}ms\")\n\[email protected](pass_context=True)\nasync def join(ctx):\n channel = ctx.message.author.voice.voice_channel\n await client.join_voice_channel(channel)\n\[email protected](pass_context=True)\nasync def leave(ctx):\n server = ctx.message.server\n voice_client = client.voice_client_in(server)\n await voice_client.disconnect()\n\[email protected](pass_context=True)\nasync def play(ctx, url):\n server = ctx.message.server\n voice_client = client.voice_client_in(server)\n player = await voice_client.create_ytdl_player(url, after=lambda: check_queue(server.id))\n players[server.id] = player\n player.start()\n\[email protected](pass_context=True)\nasync def pause(ctx):\n id = ctx.message.server.id\n players[id].pause()\n\[email protected](pass_context=True)\nasync def stop(ctx):\n id = ctx.message.server.id\n players[id].stop()\n\[email protected](pass_context=True)\nasync def resume(ctx):\n id = ctx.message.server.id\n players[id].resume()\n\[email protected](pass_context=True)\nasync def queue(ctx, url):\n server = ctx.message.server\n voice_client = client.voice_client_in(server)\n player = await voice_client.create_ytdl_player(url, after=lambda: check_queue(server.id))\n if server.id in queues:\n queues[server.id].append(player)\n else:\n queues[server.id] = [player]\n await client.say('Video queued.')\n\[email protected]()\nasync def infobot():\n await client.say('Name: MagicNoob')\n await client.say('Role: Supreme')\n await client.say('Owner: Join My Minecraft Server or Mr Noob Pink')\n await client.say('Main Server: MagicNoob')\n\n\[email protected]()\[email protected]_permissions(administrator=True)\nasync def say(*args):\n output = ''\n for word in args:\n output += word\n output += ' '\n await client.say(output)\n\n\[email protected](pass_context=True)\[email protected]_permissions(manage_messages=True)\nasync def clear(ctx, amount=100):\n channel = ctx.message.channel\n messages = []\n async for message in client.logs_from(channel, limit=int(amount) + 1):\n messages.append(message)\n await client.delete_messages(messages)\n await client.say('Deleted Message(s)')\n \[email protected](pass_context=True)\[email protected]_permissions(ban_members=True)\nasync def ban(ctx, userName: discord.User):\n\ttry:\n\t\tawait client.ban(userName)\n\t\tawait client.say(\"Successfully, Banned user!\")\n\texcept:\n\t\tawait client.say(\"Failed to ban user!\")\n\t\t\[email protected](pass_context=True)\[email protected]_permissions(ban_members=True)\nasync def unban(ctx, userName: discord.User):\n\ttry:\n\t\tawait client.unban(userName)\n\t\tawait client.say(\"Successfully, Unbanned user!\")\n\texcept:\n\t\tawait client.say(\"Failed to unban user!\")\n\[email protected](pass_context=True)\[email protected]_permissions(kick_members=True)\nasync def kick(ctx, userName: discord.User):\n\ttry:\n\t\tawait client.kick(userName)\n\t\tawait client.say(\"Successfully, Kicked user!\")\n\texcept:\n\t\tawait client.say(\"Failed to kick user!\")\n\n\[email protected](name='8ball',\n description=\"Answers yes/no\",\n brief=\"Answers from the beyond. Do .m8ball for more info\",\n aliases=['eight_ball', 'eightball', '8-ball'],\n pass_context=True)\nasync def eight_ball(context):\n possible_responses = [\n 'That is a resounding no',\n 'It is not looking likely',\n 'Too hard to tell',\n 'It is quite possible',\n 'Definitely',\n 'No',\n 'This question is shit',\n 'Fuck U',\n 'Yes',\n 'Hmmm???',\n 'I dont think so!',\n 'You Know What BYE!',\n 'I dont know',\n ]\n await client.say(random.choice(possible_responses) + \", \" + context.message.author.mention)\n\n\n# Math Addition #\n\[email protected]()\nasync def add(left: int, right: int):\n \"\"\"Adds Two Numbers\"\"\"\n await client.say(left + right)\n\n\n# Math Subtraction #\n\[email protected]()\nasync def subtract(left: int, right: int):\n \"\"\"Subtracts Two Numbers.\"\"\"\n await client.say(left - right)\n\n\n# Math Multiplication #\n\[email protected]()\nasync def multiply(left: int, right: int):\n \"\"\"Multiplies Numbers.\"\"\"\n await client.say(left * right)\n\n\n# Math Division #\n\[email protected]()\nasync def divide(left: int, right: int):\n \"\"\"Divides Numbers!\"\"\"\n try:\n await client.say(left // right)\n except ZeroDivisionError:\n await client.say(\"Numbers can't be divided by ZERO\")\n\n\[email protected](pass_context=True)\nasync def help(ctx):\n author = ctx.message.author\n channel = ctx.message.channel\n\n embed = discord.Embed(\n colour=discord.Colour.green()\n )\n embed.set_author(name='Help')\n embed.add_field(name='infobot', value='Gives bot information', inline=False)\n embed.add_field(name='8ball', value='Gives 8ball Messages', inline=False)\n embed.add_field(name='join', value='Makes the bot join the voice channel where you are in', inline=False)\n embed.add_field(name='leave', value='Makes the bot leave the voice channel', inline=False)\n embed.add_field(name='play', value='Plays an Audio. Usage: .mplay (Youtube Video Url)', inline=False)\n embed.add_field(name='pause', value='Pauses the Audio', inline=False)\n embed.add_field(name='stop', value='Stops the Audio', inline=False)\n embed.add_field(name='resume', value='Resumes the Audio', inline=False)\n embed.add_field(name='queue', value='Queues an Audio. Usage: .mqueue (Youtube Url)', inline=False)\n embed.add_field(name='add', value='Adds 2 numbers', inline=False)\n embed.add_field(name='subtract', value='Subtracts 2 numbers', inline=False)\n embed.add_field(name='multiply', value='Multiplies 2 numbers', inline=False)\n embed.add_field(name='divide', value='Divides 2 numbers', inline=False)\n embed.add_field(name='ping', value='Gives you latency in milliseconds', inline=False)\n embed.add_field(name='say', value='Repeats what you have said after .msay', inline=False)\n embed.add_field(name='clear', value='Clears Messages', inline=False)\n embed.add_field(name='ban', value='Bans a user', inline=False)\n embed.add_field(name='unban', value='Unbans a user', inline=False)\n embed.add_field(name='kick', value='Kicks a user', inline=False)\n embed.add_field(name='Prefix', value='= .m', inline=False)\n\n await client.send_message(author, embed=embed)\n await client.send_message(channel, embed=embed)\n\n\nclient.run(str(os.environ.get('BOT_TOKEN')))\n"
}
] | 2 |
Kardev07/Self-Driving-Car-using-Python | https://github.com/Kardev07/Self-Driving-Car-using-Python | b3ffa6341937979a33c2f56647b18c351715319d | 3d0fadf390908651ddb248442bbcfe4e17724507 | 3faaadf782596af8f4b9ac13618399f9fb582c08 | refs/heads/main | 2023-06-05T02:12:48.641136 | 2021-06-22T04:23:23 | 2021-06-22T04:23:23 | 379,140,123 | 1 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6216505765914917,
"alphanum_fraction": 0.6463022232055664,
"avg_line_length": 20.261905670166016,
"blob_id": "2d7035cf9c49b841de1e9057bc2748338d3c821a",
"content_id": "d081a53c17052214b331280e12e58d584fa4eefa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 933,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 42,
"path": "/Self Driving Car in Python/Car_and_Pedestrian_Tracking.py",
"repo_name": "Kardev07/Self-Driving-Car-using-Python",
"src_encoding": "UTF-8",
"text": "#import opencv(cv2)\r\nimport cv2\r\n\r\n#image\r\nimg_file = 'hello.jpg'\r\nvideo = cv2.VideoCapture('Tesla Dashcam Accident.mp4')\r\n\r\n#getting the algo\r\ncar_tracker_file = 'cars.xml'\r\n\r\n#create car classifier\r\ncar_tracker = cv2.CascadeClassifier(car_tracker_file)\r\n\r\nwhile True:\r\n (read_successful, frame) = video.read()\r\n\r\n if read_successful:\r\n grayscaled_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\r\n\r\n else:\r\n break\r\n\r\n #detecting cars\r\n cars = car_tracker.detectMultiScale(grayscaled_frame)\r\n\r\n for (x,y,w,h) in cars:\r\n cv2.rectangle(frame, (x,y), (x+w, y+h), (0,0,255), 2)\r\n\r\n #displaying the image\r\n cv2.imshow('Self Driving Car in Python', frame)\r\n\r\n #method for the file to not close automatically\r\n key = cv2.waitKey(1)\r\n\r\n if key==81 or key==113:\r\n break\r\n\r\n#release the videocapture object\r\nvideo.release()\r\n\r\n#code completed\r\nprint(\"Here is your self driving car\")"
},
{
"alpha_fraction": 0.8064516186714172,
"alphanum_fraction": 0.8064516186714172,
"avg_line_length": 31,
"blob_id": "a9c04ab1af13372626a5d4996b7546bc28e8af28",
"content_id": "2d34ff4d4851450f14cde7cb9d6ec025f4610820",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 31,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 1,
"path": "/README.md",
"repo_name": "Kardev07/Self-Driving-Car-using-Python",
"src_encoding": "UTF-8",
"text": "# Self-Driving-Car-using-Python"
}
] | 2 |
Renok/kitchen | https://github.com/Renok/kitchen | ef080b435711ea0b3f7678d4f05b7fe2204e2d70 | 53eedbd3d8c0c23a452eb50f50113400c1d72142 | ebd7985a39526fab5c84cc419fba6bd90e2b4f43 | refs/heads/master | 2017-12-05T16:24:30.343451 | 2017-07-17T00:11:23 | 2017-07-17T00:11:23 | 80,233,534 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.38549938797950745,
"alphanum_fraction": 0.39114081859588623,
"avg_line_length": 37.849998474121094,
"blob_id": "69ca9c60f05f0f8723b07d70d3d824fb4a85bd8f",
"content_id": "bbe525435d8f758a61f20dd98be051e93867b50f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 4817,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 120,
"path": "/kitchen/gallery/static/gallery/js/custom.js",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "\r\n\r\n/*=============================================================\r\n Authour URI: www.binarytheme.com\r\n License: Commons Attribution 3.0\r\n\r\n http://creativecommons.org/licenses/by/3.0/\r\n\r\n 100% To use For Personal And Commercial Use.\r\n IN EXCHANGE JUST GIVE US CREDITS AND TELL YOUR FRIENDS ABOUT US\r\n\r\n ======================================================== */\r\n\r\n(function ($) {\r\n \"use strict\";\r\n var mainApp = {\r\n\r\n main_fun: function () {\r\n /*====================================\r\n CUSTOM LINKS SCROLLING FUNCTION\r\n ======================================*/\r\n\r\n $('header a[href*=#]').click(function () {\r\n if (location.pathname.replace(/^\\//, '') == this.pathname.replace(/^\\//, '')\r\n && location.hostname == this.hostname) {\r\n var $target = $(this.hash);\r\n $target = $target.length && $target\r\n || $('[name=' + this.hash.slice(1) + ']');\r\n if ($target.length) {\r\n var targetOffset = $target.offset().top;\r\n $('html,body')\r\n .animate({ scrollTop: targetOffset }, 800); //set scroll speed here\r\n return false;\r\n }\r\n }\r\n });\r\n\r\n\r\n /*====================================\r\n WRITE YOUR SCRIPTS BELOW\r\n ======================================*/\r\n\r\n $('.album').masonry({\r\n columnWidth: '.kitchen-box',\r\n itemSelector: '.kitchen-box',\r\n transitionDuration: '0.5s'\r\n });\r\n\r\n $('.gallery-filters a').on('click', function(e){\r\n e.preventDefault();\r\n if(!$(this).hasClass('active')) {\r\n $('.gallery-filters a').removeClass('active');\r\n var clicked_filter = $(this).attr('class').replace('filter-', '');\r\n $(this).addClass('active');\r\n if(clicked_filter != 'all') {\r\n $('.kitchen-box:not(.' + clicked_filter + ')').css('display', 'none');\r\n $('.kitchen-box:not(.' + clicked_filter + ')').removeClass('kitchen-box');\r\n $('.' + clicked_filter).addClass('kitchen-box');\r\n $('.' + clicked_filter).css('display', 'block');\r\n $('.album').masonry();\r\n }\r\n else {\r\n $('.album > div').addClass('kitchen-box');\r\n $('.album > div').css('display', 'block');\r\n $('.album').masonry();\r\n }\r\n }\r\n });\r\n\r\n $('#top-content').backstretch('../../../static/gallery/img/backgrounds/head.jpg');\r\n $('.counters').backstretch('../../../static/gallery/img/backgrounds/wall.jpg');\r\n\r\n $(window).on('resize', function(){ $('.album').masonry(); });\r\n\r\n $('.kitchen').magnificPopup({\r\n type: 'image',\r\n gallery: {\r\n enabled: true,\r\n navigateByImgClick: true,\r\n preload: [0,1] // Will preload 0 - before current, and 1 after the current image\r\n },\r\n image: {\r\n tError: 'Изображение не может быть загружено.',\r\n titleSrc: function(item) {\r\n return item.el.find('.kitchen-description').find('p').text();\r\n }\r\n },\r\n callbacks: {\r\n elementParse: function(item) {\r\n item.src = item.el.find('img').attr('src');\r\n }\r\n }\r\n });\r\n\r\n $('.testimonial-active').html('<p>' + $('.testimonial-single:first p').html() + '</p>');\r\n $('.testimonial-single:first .testimonial-single-image img').css('opacity', '1');\r\n\r\n $('.testimonial-single-image img').on('click', function() {\r\n $('.testimonial-single-image img').css('opacity', '0.5');\r\n $(this).css('opacity', '1');\r\n var new_testimonial_text = $(this).parent('.testimonial-single-image').siblings('p').html();\r\n $('.testimonial-active p').fadeOut(300, function() {\r\n $(this).html(new_testimonial_text);\r\n $(this).fadeIn(400);\r\n });\r\n });\r\n\r\n },\r\n\r\n initialization: function () {\r\n mainApp.main_fun();\r\n\r\n }\r\n\r\n };\r\n // Initializing ///\r\n\r\n $(document).ready(function () {\r\n mainApp.main_fun();\r\n });\r\n\r\n}(jQuery));\r\n"
},
{
"alpha_fraction": 0.635814905166626,
"alphanum_fraction": 0.6428571343421936,
"avg_line_length": 33.27586364746094,
"blob_id": "5c8f9e71e068e5c05b91d20ccc8fe03c68238647",
"content_id": "f46b9f47ca6e6b71787bf812d2e6a26027d4e049",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1028,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 29,
"path": "/kitchen/gallery/forms.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "from django import forms\nfrom django.core.mail import send_mail\n\nfrom django.conf import settings\n\nfrom .models import Kitchen\n\n\nclass ContactForm(forms.Form):\n name = forms.CharField(max_length=20, label=\"\", widget=forms.TextInput(attrs={\"placeholder\": \"Ваше имя\"}))\n phone = forms.CharField(max_length=20, label=\"\", widget=forms.TextInput(attrs={\"placeholder\": \"Номер телефона\"}))\n message = forms.CharField(max_length=300, label=\"\", required=False,\n widget=forms.Textarea(attrs={\"placeholder\": \"Сообщение\"}))\n\n def create_message(self):\n message = \"\"\n for field in self.cleaned_data:\n message += field + \": \" + self.cleaned_data[field] + \"\\n\"\n return message\n\n def send_mail(self):\n message = self.create_message()\n send_mail(\"Заказ\", message, settings.EMAIL_HOST_USER, [\"[email protected]\"])\n\n\nclass SearchForm(forms.ModelForm):\n class Meta:\n model = Kitchen\n fields = [\"styles\", \"materials\"]\n"
},
{
"alpha_fraction": 0.5683975219726562,
"alphanum_fraction": 0.5845896005630493,
"avg_line_length": 44.92307662963867,
"blob_id": "90f5ee70671ca284ecf974351f1750372bd5985d",
"content_id": "d5f7c62462f71564a00414ceaf7dfc80ba449149",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1881,
"license_type": "no_license",
"max_line_length": 271,
"num_lines": 39,
"path": "/kitchen/gallery/migrations/0001_initial.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-07-15 23:58\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport multiselectfield.db.fields\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Kitchen',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=20)),\n ('description', models.TextField(blank=True, max_length=1000)),\n ('styles', multiselectfield.db.fields.MultiSelectField(choices=[('classic', 'Классика'), ('modern', 'Модерн'), ('provence', 'Прованс'), ('country', 'Кантри'), ('england', 'Английский'), ('italian', 'Итальянский'), ('neo', 'Неоклассика')], max_length=51)),\n ('materials', multiselectfield.db.fields.MultiSelectField(choices=[('block', 'Массив'), ('enamel', 'Эмаль'), ('veneer', 'Шпон'), ('mdf', 'МДФ'), ('plastic', 'Пластик'), ('membrane', 'Пленка')], max_length=40)),\n ('price', models.IntegerField(blank=True, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='KitchenImage',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=30)),\n ('image', models.ImageField(blank=True, upload_to='')),\n ('promo', models.BooleanField()),\n ('kitchen', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gallery.Kitchen')),\n ],\n ),\n ]\n"
},
{
"alpha_fraction": 0.6438508629798889,
"alphanum_fraction": 0.644407331943512,
"avg_line_length": 29.982759475708008,
"blob_id": "dab4c573b6c1bc52dbf1e909b17f327529f9f79d",
"content_id": "10938f08f919769e4872828d1d27edb42fee2a30",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1797,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 58,
"path": "/kitchen/gallery/views.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "from django.views.generic import FormView, ListView, TemplateView\nfrom django.db.models import Q\n\nfrom .models import Kitchen\nfrom .forms import ContactForm, SearchForm\n\n\nclass ContactView(FormView):\n form_class = ContactForm\n success_url = \"/thanks/\"\n\n def form_valid(self, form):\n form.send_mail()\n return super(ContactView, self).form_valid(form)\n\n\nclass GalleryView(ContactView):\n template_name = \"gallery/index.html\"\n\n def get_context_data(self, **kwargs):\n context = super(GalleryView, self).get_context_data(**kwargs)\n context[\"kitchens\"] = Kitchen.objects.all()\n return context\n\n\nclass CatalogView(ListView, ContactView):\n template_name = \"gallery/catalog.html\"\n context_object_name = \"kitchens\"\n paginate_by = 2\n\n kitchens = Kitchen.objects.all()\n search_form = SearchForm()\n\n def get_context_data(self, **kwargs):\n context = super(CatalogView, self).get_context_data(**kwargs)\n context[\"search_form\"] = self.search_form\n return context\n\n def get_queryset(self):\n if \"styles\" in self.request.GET:\n styles = self.request.GET.getlist(\"styles\")\n q_objects = Q()\n for style in styles:\n q_objects |= Q(styles__contains=style)\n self.kitchens = Kitchen.objects.filter(q_objects)\n if \"materials\" in self.request.GET:\n materials = self.request.GET.getlist(\"materials\")\n q_objects = Q()\n for material in materials:\n q_objects |= Q(materials__contains=material)\n self.kitchens = self.kitchens.filter(q_objects)\n\n # self.search_form = SearchForm(self.request.GET)\n return self.kitchens\n\n\nclass ThanksView(TemplateView):\n template_name = \"gallery/thanks.html\"\n"
},
{
"alpha_fraction": 0.5976591110229492,
"alphanum_fraction": 0.6079005002975464,
"avg_line_length": 28.085105895996094,
"blob_id": "9269e3e412df65437c1e7fe03107013d4d764403",
"content_id": "a2bd5bcaab4cad82cef4c52d4093aed6287f7cf2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1457,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 47,
"path": "/kitchen/gallery/models.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.utils.html import mark_safe\n\nfrom multiselectfield import MultiSelectField\n\n\nclass Kitchen(models.Model):\n STYLE_CHOICES = (\n (\"classic\", \"Классика\"),\n (\"modern\", \"Модерн\"),\n (\"provence\", \"Прованс\"),\n (\"country\", \"Кантри\"),\n (\"england\", \"Английский\"),\n (\"italian\", \"Итальянский\"),\n (\"neo\", \"Неоклассика\"),\n )\n MATERIAL_CHOICES = (\n (\"block\", \"Массив\"),\n (\"enamel\", \"Эмаль\"),\n (\"veneer\", \"Шпон\"),\n (\"mdf\", \"МДФ\"),\n (\"plastic\", \"Пластик\"),\n (\"membrane\", \"Пленка\"),\n )\n name = models.CharField(max_length=20)\n description = models.TextField(max_length=1000, blank=True)\n styles = MultiSelectField(choices=STYLE_CHOICES)\n materials = MultiSelectField(choices=MATERIAL_CHOICES)\n price = models.IntegerField(blank=True, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass KitchenImage(models.Model):\n name = models.CharField(max_length=30)\n image = models.ImageField(blank=True)\n promo = models.BooleanField()\n kitchen = models.ForeignKey(\"Kitchen\", on_delete=models.CASCADE)\n\n def image_tag(self):\n return mark_safe('<img src=\"/media/%s\" width=\"250\" height=\"150\" />' % (self.image))\n\n image_tag.short_description = 'Image'\n\n def __str__(self):\n return self.name\n"
},
{
"alpha_fraction": 0.7444444298744202,
"alphanum_fraction": 0.7444444298744202,
"avg_line_length": 35,
"blob_id": "adba459d210cf8f33305b1f9039d7f6b58daf619",
"content_id": "e24b46588a7e607222ff7289ac42d32c0db2c26a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 540,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 15,
"path": "/kitchen/gallery/urls.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import url\nfrom django.conf.urls.static import static\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\n\nfrom . import views\nfrom kitchen import settings\n\napp_name = \"gallery\"\nurlpatterns = [\n url(r'^$', views.GalleryView.as_view(), name=\"main\"),\n url(r'^catalog', views.CatalogView.as_view(), name=\"catalog\"),\n url(r'^thanks', views.ThanksView.as_view(), name=\"thanks\")\n]\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\nurlpatterns += staticfiles_urlpatterns()\n"
},
{
"alpha_fraction": 0.4694656431674957,
"alphanum_fraction": 0.4770992398262024,
"avg_line_length": 28.11111068725586,
"blob_id": "97333bf35d2cc57faf34243a6a50bf20f97ecdf3",
"content_id": "343e2390fd1aa8abbf81951e7361bc6e7f93f871",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 555,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 18,
"path": "/kitchen/gallery/static/gallery/js/catalog.js",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "$(document).ready(function () {\n $('.kitchen').magnificPopup({\n type: 'image',\n gallery: {\n enabled: true,\n navigateByImgClick: true,\n preload: [0,1] // Will preload 0 - before current, and 1 after the current image\n },\n image: {\n tError: 'Изображение не может быть загружено.'\n },\n callbacks: {\n elementParse: function(item) {\n item.src = item.el.find('img').attr('src');\n }\n }\n });\n});\n"
},
{
"alpha_fraction": 0.7637130618095398,
"alphanum_fraction": 0.7637130618095398,
"avg_line_length": 20.545454025268555,
"blob_id": "5b82de896ef9e3440bf48214bb2862517a288af3",
"content_id": "f99c0b8a13a57c8ea51aa34d586debbe05f5a278",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 237,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 11,
"path": "/kitchen/gallery/admin.py",
"repo_name": "Renok/kitchen",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\nfrom .models import Kitchen, KitchenImage\n\n\[email protected](KitchenImage)\nclass KitchenImageAdmin(admin.ModelAdmin):\n save_as = True\n readonly_fields = ('image_tag',)\n\nadmin.site.register(Kitchen)\n"
}
] | 8 |
rfblue2/ProjectEulerSols | https://github.com/rfblue2/ProjectEulerSols | 4ee6b804003f9ea26cf29293dd352c4dca66f6ba | cdca6d7085f19cd0f25f2e9637fab869820d14f1 | 7e51f3e40bc0ef1a495594a906d255b494bae6f1 | refs/heads/master | 2020-04-19T00:03:43.862064 | 2019-12-26T01:33:09 | 2019-12-26T01:33:09 | 67,249,650 | 2 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4183535873889923,
"alphanum_fraction": 0.450742244720459,
"avg_line_length": 12.01754379272461,
"blob_id": "3a590455e371a547d4243fd8ccb76272e33c681d",
"content_id": "058235974c1b0b239a5399b070f0cdc05c4d387a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 741,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 57,
"path": "/7/7.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/17/12\nWhat is the 10 001st prime number?\n*/\n\n#include <iostream>\n#include <cmath>\nusing namespace std;\n\nint main()\n{\n\tint r;\n\tint n;\n\tdouble root;\n\tint a = 4;\n\tint b = 2;\n\tint prime;\n\n\tcin >> n;\n\n\twhile(true)\n\t{\n\t\troot = pow(a,0.5);\n\t\t//cout << \"The sqrt is \" << root << endl;\n\t\twhile (b <= root)\n\t\t{\n\t\t\tr = a % b;\n\t\t\t//cout << \"r = \" << r << endl;\n\t\t\tif (r == 0)\n\t\t\t{\n\t\t\t\tn++;\n\t\t\t\t//cout << \"Not Prime!\" << endl;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tb++;\n\t\t\t//cout << \"b = \" << b << endl;\n\t\t}\n\t\tif(r != 0)\n\t\t{\n\t\t\t//cout << \"Prime!\" << endl;\n\t\t\tprime = a;\n\t\t}\n\t\tn--;\n\t\ta++;\n\t\tb = 2;\n\t\tcout << n << endl; // n only\n\t\t//cout << a << endl;\n\t\tif (n == 2)\n\t\t\tbreak;\n\t}\n\n\tcout << \"The 10001st prime is \" << prime << endl;\n\n\tsystem(\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5080100893974304,
"alphanum_fraction": 0.5539628863334656,
"avg_line_length": 15.594405174255371,
"blob_id": "af3d1c9df1a62fea5a012e92521ff70195c030da",
"content_id": "231eaa44e263acc7d34cbc4b72befb374936644a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2372,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 143,
"path": "/3/3.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/11/12\nStructure should be improved.\nThe prime factors of 13195 are 5, 7, 13 and 29.\n\nWhat is the largest prime factor of the number 600851475143 ?\n*/\n#include <iostream>\n#include <cstdint>\n#include <algorithm>\nusing namespace std;\n\n#define Show\n#define Code1\n\n#ifdef Code1\nint main()\n{\n\tint64_t num1;\n\tint64_t n = 2;\n\tint64_t factor = 1;\n\tint64_t pfactor = 1;\n\tint64_t r;\n\tint64_t root;\n\n\tcin >> num1;\n\troot = pow(num1,0.5);\n\twhile (n < root)\n\t{\n\t\tr = num1 % n; //to use to check if num1 is even or not\n#ifdef Show\n\t\tcout << \"r = \" << r << endl;\n#endif\n\t\tif (r == 0) // it is even\n\t\t{\n\t\t\tif (n == 2 || n == 3) // if it is 2 or 3 it is prime\n\t\t\t{\n\t\t\t\tpfactor = n;\n#ifdef Show\n\t\t\t\tcout << \"First factor is \" << pfactor << endl;\n#endif\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tint64_t num2 = 2;\n\t\t\t\tint64_t remain = 1;\n\t\t\t\twhile(num2 < n)\n\t\t\t\t{\n\n\t\t\t\t\tremain = n % num2;\n\t\t\t\t\tif (remain == 0)\n\t\t\t\t\t{\n\t\t\t\t\t\tn = n;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tnum2 = num2 + 1;\n\t\t\t\t}\n\t\t\t\tif (remain != 0)\n\t\t\t\t{\n\t\t\t\t\tpfactor = n;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\telse //don't worry if it isn't even\n\t\t\tn = n;\n\t\tif (pfactor > factor) // is it larger than the existing largest factor?\n\t\t{\n\t\t\tfactor = pfactor;\n#ifdef Show\n\t\t\tcout << \"New factor is \" << factor << endl;\n#endif\n\t\t}\n\t\tn = n + 1;\n#ifdef Show\n\t\tcout << \"n is now \" << n << endl;\n#endif\n\t}\n\tcout << \"The largest prime factor of \" << num1 << \" is \" << factor << endl;\n\tsystem (\"pause\");\n\treturn 0;\n}\n#endif\n\n#ifndef Code1\nbool prime;\nclass PrimeCheck\n{\npublic:\nvoid ifprime(int q)\n{\n\tint64_t count1 = 2;\n\tint64_t remainder = 1;\n\t//bool prime;\n\twhile (count1 < q)\n\t{\n\t\tremainder = q % count1;\n\t\tif (remainder == 0)\n\t\t\tprime = false;\n\t\t\tcout << \"Not prime!\" << endl;\n\t\tcount1 = count1 + 1;\n\t}\n\tif (remainder != 0)\n\t\tprime = true;\n\t\tcout << \"Prime!\" << endl;\n\treturn;\n}\n};\n\nint main()\n{\n\tint64_t in;\n\tint64_t count = 2;\n\tint64_t pfactor = 1;\n\tint64_t factor = 1;\n\tint64_t r;\n\tint64_t root;\n\t\n\tcin >> in;\n\tPrimeCheck isitprime;\n\troot = pow(in,0.5);\n\twhile (count < root)\n\t{\n\t\tisitprime.ifprime(count);\n\t\tif (prime == true)\n\t\t{\n\t\t\tr = in % count;\n\t\t\tcout << \"r is \" << r << endl;\n\t\t\tif (r == 0)\n\t\t\t{\n\t\t\t\tpfactor = count;\n\t\t\t\tfactor = max(factor, pfactor);\n\t\t\t\tcout << \"New factor \" << factor << endl;\n\t\t\t}\n\t\t}\n\t\tcount = count + 1;\n\t\tcout << \"Count is \" << count << endl;\n\t}\n\tcout << \"The largest prime factor is \" << factor << endl;\n\tsystem(\"pause\");\n\treturn 0;\n}\n#endif"
},
{
"alpha_fraction": 0.4751552939414978,
"alphanum_fraction": 0.5,
"avg_line_length": 18.5625,
"blob_id": "e550b5893bfbec7d1eb9f1106ffbfaa3e76fcb63",
"content_id": "186dfe0f6682f4eedbf4f4589ec3255598bb2779",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 322,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 16,
"path": "/22/22.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "names = []\n\ndef score(name):\n s = 0 \n for c in name:\n s += ord(c) - 64\n return s\n\nwith open(\"names.txt\") as f:\n names = [x[1:len(x)-1] for x in f.readline().split(',')]\n list.sort(names)\n total = 0\n for i in range(0,len(names)):\n total += score(names[i]) * (i+1) \n\nprint(total)\n \n"
},
{
"alpha_fraction": 0.45370370149612427,
"alphanum_fraction": 0.5370370149612427,
"avg_line_length": 23.846153259277344,
"blob_id": "652ca57aeab6d14d218216937d40eb0eb93cf18b",
"content_id": "46b02757063217cbdceab31b9c33e91458c8e214",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 324,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 13,
"path": "/31/31.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/23/18\n# PE 31\n\ncoin_vals = [1, 2, 5, 10, 20, 50, 100, 200]\n \ndef pathsToSum(n, vals):\n if n <= 0:\n return [[]]\n valid = [i for i in vals if n-i >= 0]\n return [x for v in valid for x in [[v] + p for p in pathsToSum(n-v, [y for y in valid if y >= v])]]\n\nprint(len(pathsToSum(200, coin_vals)))\n\n"
},
{
"alpha_fraction": 0.4208333194255829,
"alphanum_fraction": 0.4625000059604645,
"avg_line_length": 14,
"blob_id": "f2732fa4243965ef7286908e354ab412193dfa62",
"content_id": "154ebf2b1d45fd73b00e1ece02aba8beed11acac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 240,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 16,
"path": "/21/21.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "import math\n\ndef d(n):\n s = 0\n for i in range(1,n):\n if n % i == 0:\n s += i\n return s\n\ntotal = 0\nfor i in range(1,10000):\n divsum = d(i)\n if i == d(divsum) and i != divsum:\n total += i \n\nprint(total)\n"
},
{
"alpha_fraction": 0.2935323417186737,
"alphanum_fraction": 0.4427860677242279,
"avg_line_length": 15.708333015441895,
"blob_id": "e55dfd35d5f44e9dd104041f2439f94e1ba73ea4",
"content_id": "7170fa3802c9227981728c69809f46365a4b2212",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 402,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 24,
"path": "/34/34.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/26/18\n# PE 34\n\nfacts = {\n 0: 1, 1: 1, \n 2: 2, 3: 6,\n 4: 24, 5: 120,\n 6: 720, 7: 5040,\n 8:40320, 9: 362880,\n}\n\ndef curious(n):\n i = 0\n digsum = 0\n while 10 ** i <= n:\n dig = int(n / (10**i)) % 10\n digsum += facts[dig]\n if digsum > n:\n return False\n i += 1\n return n == digsum\n\nprint(sum(filter(curious, range(3, 100000))))\n\n"
},
{
"alpha_fraction": 0.3317756950855255,
"alphanum_fraction": 0.4532710313796997,
"avg_line_length": 27.53333282470703,
"blob_id": "74d53ec9ce9f97d018510d92c2d4fa5b6c0f11f1",
"content_id": "7ac15ebd447ad82ebb340c1589bfc93c91a3ba56",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 428,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 15,
"path": "/19/19.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "def isleap(y):\n return y % 4 == 0 and y % 100 != 0 or y % 400 == 0\n\ncount = 0\ndow = 1 # 1 = Mon, 7 = Sun\nfor y in range(1900, 2000+1):\n for m in range(1, 12+1):\n days = 30 if m in [4,6,9,11] else 31\n if m == 2:\n days = 29 if isleap(y) else 28\n for d in range(1,days+1):\n if d == 1 and dow == 7 and y != 1900:\n count += 1\n dow = dow % 7 + 1\nprint(count)\n"
},
{
"alpha_fraction": 0.4270152449607849,
"alphanum_fraction": 0.4793028235435486,
"avg_line_length": 18.16666603088379,
"blob_id": "0ffe01a04f0355e79677b38160ba86beaf02158c",
"content_id": "8bd419c3bda5b57d46410729fddd16b7f3ead316",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 459,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 24,
"path": "/14/14.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "#11/20/15\n__author__ = 'rfblue2'\n\ndef collatz(seed):\n return op(seed, 0)\n\ndef op(num, count):\n count += 1\n if num == 1:\n return count\n else:\n if num % 2 == 0:\n return op(num / 2, count)\n else:\n return op(3 * num + 1, count)\n\nlargest = 0\nindex = 0\nfor i in range(1,999999):\n c = collatz(i)\n if c > largest:\n largest = c\n index = i\n print('new largest ' + str(i) + ': ' + str(c))"
},
{
"alpha_fraction": 0.31012657284736633,
"alphanum_fraction": 0.4303797483444214,
"avg_line_length": 11.15384578704834,
"blob_id": "d49b8d95eafe2f3a22383c7cfaca8617bde6988b",
"content_id": "c4e05913762868a674e6ec9e8b1db8c4e25dbf48",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 158,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 13,
"path": "/28/28.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/22/18\n# PE 28\n\ni = 1\ns = 1\ncount = 1\nwhile i <= 1001 - 2:\n for j in range(0,4):\n count += i + 1\n s += count\n i += 2\nprint(s)\n"
},
{
"alpha_fraction": 0.5782312750816345,
"alphanum_fraction": 0.6088435649871826,
"avg_line_length": 28.350000381469727,
"blob_id": "cd8a199db80ee255b817ca26b8be7bc2dbb835ed",
"content_id": "0f4a0e02165f0651ed4da87d57b32ca3d82040ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 588,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 20,
"path": "/24/24.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "digits = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']\n\n# smaller is a list of smaller permutations of size n-1\n# returns a list of permutations with a single digit (d) added to front\ndef generateTops(d, smaller):\n if len(smaller) == 0:\n return [d]\n return [d+x for x in smaller]\n\ndef generate(digs):\n perms = []\n for i in range(0, len(digs)):\n digit = digs[i]\n rest = digs[0:i]+digs[i+1:]\n perms += generateTops(digit, generate(rest))\n return perms\n\npermutations = generate(digits)\nlist.sort(permutations)\nprint(permutations[int(1e6)-1])\n\n"
},
{
"alpha_fraction": 0.4314159154891968,
"alphanum_fraction": 0.46792036294937134,
"avg_line_length": 15.759259223937988,
"blob_id": "17abec07600bdbb88873481ee321da481485856f",
"content_id": "49c3381bbaf39efab2c204f7499cde375ef56d2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 904,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 54,
"path": "/9/9.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n6/14/12\nProblem 9: \nThere exists exactly one Pythagorean triplet for which a + b + c = 1000.\nFind the product abc.\n*/\n\n#include <iostream>\n#include <cmath>\nusing namespace std;\n\nbool checkInt(double num);\n\nint main()\n{\n\tdouble a, b, c;\n\tfor(int i = 1; i < 10000; i++)\n\t{\n\t\ta = i;\n\t\t//cout << \"a = \" << a << \";\";\n\t\tfor(int j = 1; j < 10000; j++)\n\t\t{\n\t\t\tb = j;\n\t\t\tc = pow(a * a + b * b, 0.5);\n\t\t\t//cout << \"b = \" << b << endl;\n\t\t\t//cout << \"c = \" << c << endl;\n\t\t\tif(checkInt(c) == true)\n\t\t\t{\n\t\t\t\tif(a + b + c == 1000)\n\t\t\t\t{\n\t\t\t\t\tcout << \"a = \" << a << endl;\n\t\t\t\t\tcout << \"b = \" << b << endl;\n\t\t\t\t\tcout << \"c = \" << c << endl;\n\t\t\t\t\tcout << \"SUM = \" << a + b + c << endl;\n\t\t\t\t\tcout << \"PRODUCT = \" << a * b * c << endl;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tsystem(\"pause\");\n\treturn 0;\n}\n\nbool checkInt(double num)\n{\n\tint num2 = num;\n\tdouble num3 = num2;\n\n\tif(num3 == num)\n\t\treturn true;\n\treturn false;\n}"
},
{
"alpha_fraction": 0.4572649598121643,
"alphanum_fraction": 0.5683760643005371,
"avg_line_length": 16.14634132385254,
"blob_id": "cdd43adbea46b4d34c08ce7ddc692fe68f25b345",
"content_id": "85f6ad70504b09cf3adde1f8ef572129f4ffe87d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 702,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 41,
"path": "/5/5.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/17/12\n2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder.\n\nWhat is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20?\n*/\n#include <iostream>\nusing namespace std;\n\nint main()\n{\n\tint num = 11;\n\tint r5;\n\tint r7;\n\tint r9;\n\tint r11;\n\tint r13;\n\tint r16;\n\tint r17;\n\tint r19;\n\n\twhile(true)\n\t{\n\t\tr5 = num % 5;\n\t\tr7 = num % 7;\n\t\tr9 = num % 9;\n\t\tr11 = num % 11;\n\t\tr13 = num % 13;\n\t\tr16 = num % 16;\n\t\tr17 = num % 17;\n\t\tr19 = num % 19;\n\t\tif(r5 == 0 && r7 == 0 && r9 == 0 && r11 == 0 && r13 == 0 && r16 == 0 && r17 == 0 && r19 == 0)\n\t\t\tbreak;\n\t\tnum++;\n\t\t\n\t}\n\tcout << num << endl;\n\tsystem(\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.4942648708820343,
"alphanum_fraction": 0.5406673550605774,
"avg_line_length": 17.80392074584961,
"blob_id": "60d1b7831483653b575a9ec52fd13d59b4499d5e",
"content_id": "2e97d22ef32f211e83f414e4c25cff0d7b790c3b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1918,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 102,
"path": "/11/11.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n7/29/12\nGiven 20x20 grid, find greatest product of four adjacent numbers, vertical, horizontal, or diagonal\n*/\n\n#include <iostream>\n#include <fstream>\n\nusing namespace std;\n\n#define uint unsigned int\n\nvoid input(ifstream &grid, uint nums[][20]);//inputs grid nums into array\nuint rowSum(uint nums[][20]);//find largest rowsum\nuint colSum(uint nums[][20]);//find largest colsum\nuint diagSum(uint nums[][20]);//find largest diagsum\n\nint main()\n{\n\tifstream grid;\n\tgrid.open(\"Grid.txt\");\n\n\tuint nums[20][20];\n\t\n\tinput(grid, nums);\n\n\tgrid.close();\n\t\n\tcout << \"ROWSUM: \" << rowSum(nums) << endl;\n\tcout << \"COLSUM: \" << colSum(nums) << endl;\n\tcout << \"DIAGSUM: \" << diagSum(nums) << endl;\n\n\tsystem(\"pause\");\n\treturn 0;\n}\n\nvoid input(ifstream &grid, uint nums[][20])\n{\n\tfor(int r = 0; r < 20; r++)\n\t\tfor(int c = 0; c < 20; c++)\n\t\t\tgrid >> nums[r][c];\n}\nuint rowSum(uint nums[][20])\n{\n\tuint final = 0;\n\tuint prod = 0;\n\n\tfor(int r = 0; r < 20; r++)\n\t{\n\t\tfor(int c = 0; c < 20 - 3; c++)\n\t\t{\n\t\t\tprod = nums[r][c] * nums[r][c + 1] * nums[r][c + 2] * nums[r][c + 3];\n\t\t\tif(prod > final)\n\t\t\t\tfinal = prod;\n\t\t}\n\t}\n\treturn final;\n}\nuint colSum(uint nums[][20])\n{\n\tuint final = 0;\n\tuint prod = 0;\n\n\tfor(int c = 0; c < 20; c++)\n\t{\n\t\tfor(int r = 0; r < 20 - 3; r++)\n\t\t{\n\t\t\tprod = nums[r][c] * nums[r + 1][c] * nums[r + 2][c] * nums[r + 3][c];\n\t\t\tif(prod > final)\n\t\t\t\tfinal = prod;\n\t\t}\n\t}\n\treturn final;\n}\nuint diagSum(uint nums[][20])\n{\n\tuint final = 0;\n\tuint prod = 0;\n\n\t//from upper left to lower right\n\tfor(int r = 0; r < 20 - 3; r++)\n\t{\n\t\tfor(int c = 0; c < 20 - 3; c++)\n\t\t{\n\t\t\tprod = nums[r][c] * nums[r + 1][c + 1] * nums[r + 2][c + 2] * nums[r + 3][c + 3];\n\t\t\tif(prod > final)\n\t\t\t\tfinal = prod;\n\t\t}\n\t}\n\t\n\tfor(int r = 0; r < 20 - 3; r++)\n\t{\n\t\tfor(int c = 3; c < 20; c++)\n\t\t{\n\t\t\tprod = nums[r][c] * nums[r + 1][c - 1] * nums[r + 2][c - 2] * nums[r + 3][c - 3];\n\t\t\tif(prod > final)\n\t\t\t\tfinal = prod;\n\t\t}\n\t}\n\treturn final;\n}\n"
},
{
"alpha_fraction": 0.4307692348957062,
"alphanum_fraction": 0.5115384459495544,
"avg_line_length": 13.941176414489746,
"blob_id": "d9d5e4f75338ec8e1e381294ed2b025b80721c31",
"content_id": "3c6c0987453071062873ae6c89375231696157ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 260,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 17,
"path": "/25/25.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "import math\n\ndef countDig(n):\n return int(math.log(n) / math.log(10)) + 1\n\nfound = False\nf1 = 1\nf2 = 1\nidx = 3\nwhile not found:\n f3 = f1 + f2\n if countDig(f3) >= 1000:\n found = True\n print(idx)\n f1 = f2\n f2 = f3\n idx += 1\n \n"
},
{
"alpha_fraction": 0.4616297483444214,
"alphanum_fraction": 0.5102847814559937,
"avg_line_length": 19.395160675048828,
"blob_id": "c0abe52a5ca3b4258f6e993046cf8594c004aac3",
"content_id": "338c2f4f5413d132f547e10989eb0f7d1dec29a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2528,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 124,
"path": "/4/4.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/13/12\nA palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 99.\n\nFind the largest palindrome made from the product of two 3-digit numbers.\n*/\n#include <iostream>\n#include <cmath>\nusing namespace std;\n//#define SHOW\n\nint main()\n{\n\t\n\tint in;\n\tint d[6]; // 0-4 is 5 digits, 0-5 is 6 digits; why is it d[6] even though there's 0??? to not corrupt the stack\n\tfloat divide;\n\tbool digits5;\n\tbool palindrome;\n\n\t//MAIN portion: Multiply three digit numbers\n\tint n = 500;\n\tint m = 500;\n\tint largest = 0;\n\n\tfor (n < 999; n++;)\n\t{\n\t\tfor (m < 999; m++;)\n\t\t{\n\t\t\t//Part 1: Input a number and count the digits\n\t\t\tin = n * m;\n\t\t\tcout << n << \" * \" << m << \" = \" << n * m << endl;\n\t\t\tdivide = in / 100000.0;\n\t\t\tif (divide < 1)\n\t\t\t{\n\t\t\t\tdigits5 = true;\n#ifdef SHOW\n\t\t\t\tcout << \"5 Digits\" << endl;\n#endif\n\t\t\t}\n\t\t\telse if (divide >=1)\n\t\t\t{\n\t\t\t\tdigits5 = false;\n#ifdef SHOW\n\t\t\t\tcout << \"6 Digits\" << endl;\n#endif\n\t\t\t}\n\n\t\t\t//Part 2: Find the digits\n\t\t\tif (digits5 == true) // for five digit numbers\n\t\t\t{\n\t\t\t\td[0] = in % 10;\n#ifdef SHOW\n\t\t\t\tcout << d[0] << endl;\n#endif\n\t\t\t\tfor(int a = 1; a <= 4; ++a)\n\t\t\t\t{\n\t\t\t\t\tdouble power = pow (10.0, a+1);\n\t\t\t\t\tint power2 = (double) power;\n\t\t\t\t\td[a] = (in % power2);\n\t\t\t\t\tfor(int b = 1; b <= a; b++)\n\t\t\t\t\t{\n\t\t\t\t\t\td[a] = d[a] - (d[b-1] * (pow (10.0 , b-1)));\n\t\t\t\t\t}\n\t\t\t\t\td[a] = d[a] / (power2 / 10);\n#ifdef SHOW\n\t\t\t\t\tcout << d[a] << endl;\n#endif\n\t\t\t\t}\n\t\t\t}\n\n\t\t\telse if (digits5 == false) // for six digit numbers\n\t\t\t{\n\t\t\t\td[0] = in % 10;\n#ifdef SHOW\n\t\t\t\tcout << d[0] << endl;\n#endif\n\t\t\t\tfor(int a = 1; a <= 5; ++a)\n\t\t\t\t{\n\t\t\t\t\tdouble power = pow (10.0, a+1);\n\t\t\t\t\tint power2 = (double) power;\n\t\t\t\t\td[a] = (in % power2);\n\t\t\t\t\tfor(int b = 1; b <= a; b++)\n\t\t\t\t\t{\n\t\t\t\t\t\td[a] = d[a] - (d[b-1] * (pow (10.0 , b-1)));\n\t\t\t\t\t}\n\t\t\t\t\td[a] = d[a] / (power2 / 10);\n#ifdef SHOW\n\t\t\t\t\tcout << d[a] << endl;\n#endif\n\t\t\t\t}\n\t\t\t}\n\t\t\t//Part 3: Check if the number is a palindrome\n\t\t\tif (digits5 == true && d[0] == d[4] && d[1] == d[3])\n\t\t\t{\n\t\t\t\tpalindrome = true;\n\t\t\t}\n\t\t\telse if (digits5 == false && d[0] == d[5] && d[1] == d[4] && d[2] == d[3])\n\t\t\t\tpalindrome = true;\n\t\t\telse\n\t\t\t\tpalindrome = false;\n\t\t\tif (palindrome == true)\n\t\t\t{\n\t\t\t\tcout << \"Palindrome!\" << endl;\n\t\t\t\tif ( in > largest)\n\t\t\t\t\tlargest = in;\n\t\t\t}\n\t\t\tif (m > 998)\n\t\t\t{\n\t\t\t\tm = 100;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t\tif (n > 998)\n\t\t{\n\t\t\tn = 100;\n\t\t\tbreak;\n\t\t}\n\t}\n\tcout << \"The largest palindrome that is a product of two three digit numbers is \" << largest << endl;\n\tsystem(\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.47295209765434265,
"alphanum_fraction": 0.5162287354469299,
"avg_line_length": 15.175000190734863,
"blob_id": "dc020e3c3514c76f656f8997551df96ada55c166",
"content_id": "b027dd27a673265b3f313a7c88128e7131d0b586",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 647,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 40,
"path": "/37/37.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 8/2/18\n# PE 37\nimport math\n\ndef isPrime(p):\n if p <= 1:\n return False\n for i in range(2,int(math.sqrt(p)+1)):\n if p % i == 0:\n return False\n return True\n\ndef ltrunc(p):\n if not isPrime(p):\n return False\n if p < 10:\n return True\n return ltrunc(p % (10 ** int(math.log10(p))))\n\ndef rtrunc(p):\n if not isPrime(p):\n return False\n if p < 10:\n return True\n return rtrunc(int(p/10))\n\ndef truncatable(n):\n return ltrunc(n) and rtrunc(n)\n \ncount = 0\ns = 0\ni = 13\nwhile count < 11:\n if truncatable(i):\n s += i\n count += 1\n i += 1\n\nprint(s)\n"
},
{
"alpha_fraction": 0.46107783913612366,
"alphanum_fraction": 0.4910179674625397,
"avg_line_length": 22.785715103149414,
"blob_id": "cf64c73e7ee4293d7ca94809613d2e30025f7644",
"content_id": "8167637f70be3e1a35bcde8e25addeecdaa3064d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 668,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 28,
"path": "/32/32.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/24/18\n# PE 32\n\ndigits = ['1', '2', '3', '4', '5', '6', '7', '8', '9']\nlength = len(digits)\npandigitals = set()\n\ndef flatMap(f, l):\n return [y for i,x in enumerate(l) for y in f(i,x)]\n\ndef perms(digs):\n if digs == []:\n return [[]]\n return flatMap(lambda i,x: [[x] + y for y in perms(digs[:i]+digs[i+1:])], digs)\n\nallPerms = perms(digits)\n\nfor digs in allPerms:\n for i in range(1, length - 1):\n for j in range(i+1, length):\n a = int(''.join(digs[:i]))\n b = int(''.join(digs[i:j]))\n c = int(''.join(digs[j:]))\n if a * b == c:\n pandigitals.add(c)\n\nprint(sum(pandigitals))\n\n\n"
},
{
"alpha_fraction": 0.540145993232727,
"alphanum_fraction": 0.5985401272773743,
"avg_line_length": 11.545454978942871,
"blob_id": "d4db4a13a050bdf480d2c6d70067875ff24c0df4",
"content_id": "be17d53281b8f481ac751365a125e2356d635262",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 137,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 11,
"path": "/13/13.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "#11/20/15\n__author__ = 'rfblue2'\n\ndat = open('data.txt')\nnums = dat.readlines()\n\nsum = 0\nfor num in nums:\n sum += int(num)\n\nprint(sum)"
},
{
"alpha_fraction": 0.4655172526836395,
"alphanum_fraction": 0.5129310488700867,
"avg_line_length": 16.185184478759766,
"blob_id": "0d33b6ed6ed5eeddcdd44787443dae542134ebee",
"content_id": "c23e6d12f4554acff29cd347298b0c39d1b4f489",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 464,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 27,
"path": "/36/36.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "import math\n\ndef numToList(n):\n l = []\n for i in range(int(math.log10(n)) + 1):\n l.append(n % 10)\n n = int(n / 10)\n return l\n\ndef isSymm(l):\n return l == list(reversed(l))\n\n# base 10 to base 2 list\ndef tenToTwo(n):\n l = []\n while n > 0:\n l.append(n % 2)\n n = int(n/2)\n return l\n\ntotal = 0\nfor i in range(1, 1000000):\n if isSymm(numToList(i)):\n if isSymm(tenToTwo(i)):\n total += i\n\nprint(total)\n"
},
{
"alpha_fraction": 0.5540069937705994,
"alphanum_fraction": 0.5783972144126892,
"avg_line_length": 17.532258987426758,
"blob_id": "1ad25c3402281bfdb46e213f81e454819663e76f",
"content_id": "a5e9d69fa30580338ba89d964665625716f6fc6b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1148,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 62,
"path": "/12/12.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/13/14\nWhat is the value of the first triangle number to have over five hundred divisors?\n*/\n\n#include <iostream>\n#include <vector>\n\nusing namespace std;\n\n#define lint long long unsigned int\n\nint main()\n{\n\tlint tri = 1;\n\tint fact = 0;//factors\n\tvector<lint> primes;\n\n\tprimes.push_back(2);//first prime is 2\n\tprimes.push_back(3);//second prime is 3\n\n\tfor(lint i = 2; fact < 500; i++)\n\t{\n\t\ttri += i;//next triangular number\n\t\tfact = 0;\n\n\t\t//algorithm 1 - cycle through finding factors\n\t\t/*for(int i = 1; i <= tri / 2 + 1; i++)\n\t\t\tif(tri % i == 0)//factor!\n\t\t\t\tfact++;\n\n\t\tfact++;//include # itself*/\n\n\t\t//algorithm 2 - use prime factorization (faster)\n\t\tlint temp = tri;\n\t\tlint count;\n\t\tlint product = 1;\n\t\tfor(lint j = 0; temp != 1; j++)\n\t\t{\n\t\t\tcount = 0;\n\t\t\tif(j >= primes.size())\n\t\t\t{\n\t\t\t\tprimes.push_back(temp);//temp is the new prime\n\t\t\t\tcout << \"NEW PRIME: \" << temp << endl;\n\t\t\t}\n\n\t\t\twhile(temp % primes.at(j) == 0)//while you can divide out that prime\n\t\t\t{\n\t\t\t\ttemp /= primes.at(j);\n\t\t\t\tcount++;\n\t\t\t}\n\t\t\tproduct *= (count + 1);\n\t\t}\n\n\t\tfact = product;\n\n\t\tcout << tri << \": \" << fact << endl;\n\t}\n\n\tcout << \"ANSWER: \" << tri << endl;\n}"
},
{
"alpha_fraction": 0.4674796760082245,
"alphanum_fraction": 0.5379403829574585,
"avg_line_length": 13.779999732971191,
"blob_id": "c15ddf7b89131109e19da54d0e1abf1a6a797a13",
"content_id": "df394bddbbf929efcda39de8896280ce981d7bff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 738,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 50,
"path": "/1/1.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/11/12\nIf we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.\n\nFind the sum of all the multiples of 3 or 5 below 1000.\n*/\n\n#include <iostream>\nusing namespace std;\n\nint main()\n{\n\tint sum = 0;\n\tint five = 5;\n\tint three = 3;\n\tint fifteen = 15;\n\tint r1;\n\tint r2;\n\tint r3;\n\tfor(int c = 1; c < 1000; c++ )\n\t{\n\t\tr1 = c % 3;\n\t\tr2 = c % 5;\n\t\tr3 = c % 15;\n\t\tif (r1 == 0)\n\t\t{\n\t\t\tsum = sum + three;\n\t\t\tthree = three + 3;\n\t\t}\n\t\telse\n\t\t\tsum = sum;\n\t\tif (r2 == 0)\n\t\t{\n\t\t\tsum = sum + five;\n\t\t\tfive = five + 5;\n\t\t}\n\t\telse\n\t\t\tsum = sum;\n\t\tif (r3 == 0)\n\t\t{\n\t\t\tsum = sum - fifteen;\n\t\t\tfifteen = fifteen + 15;\n\t\t}\n\t\t\n\t}\n\tcout << sum << endl;\n\tsystem(\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.5228136777877808,
"alphanum_fraction": 0.5484790802001953,
"avg_line_length": 21.382978439331055,
"blob_id": "f20ce67fdb58bc64a6a814a308cdbcb9f6b5091a",
"content_id": "90a8147b0fb993c6a3afff2a3449eeb500cf228b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1052,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 47,
"path": "/35/35.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/28/18\n# PE 35\n\nfrom functools import reduce\n\ndef isPrime(n):\n if n <= 1:\n print()\n return False\n for i in range(2, int(n**0.5)+1):\n if n % i == 0:\n return False\n return True\n\n# create set of n digits chosen from digs.\ndef genSets(digs, n):\n if not n:\n return [[]]\n allSets = []\n for i, d in enumerate(digs):\n sets = genSets(digs, n-1)\n allSets += [[d] + x for x in sets]\n return allSets\n\n# from a list generate the cycles\ndef genPerms(l):\n if not l:\n return [[]]\n perms = []\n for i in range(len(l)):\n perms.append(l[i:]+l[:i])\n return perms\n\ndef listToNum(l):\n return sum(d * (10**i) for i,d in enumerate(l[::-1]))\n\ndigs = [1, 3, 7, 9]\ncirculars = set([2, 3, 5, 7])\nfor numDig in range(2,7):\n sets = genSets(digs, numDig)\n for s in sets:\n nums = set(listToNum(x) for x in genPerms(s))\n if reduce(lambda x, y: x and y, [isPrime(x) for x in nums]):\n circulars = circulars.union(nums)\n\nprint(len(circulars))\n"
},
{
"alpha_fraction": 0.7765362858772278,
"alphanum_fraction": 0.7765362858772278,
"avg_line_length": 118.33333587646484,
"blob_id": "74dc3c7b0696243e9ab597784338a52c26c64221",
"content_id": "bf021453acc5dc95073c2c5e90f672159183338f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 358,
"license_type": "no_license",
"max_line_length": 298,
"num_lines": 3,
"path": "/README.md",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# ProjectEulerSols\nMy solutions to Project Euler problems.\nDisclaimer: Some of these solutions are very old (my freshman year of high school old... they should all be dated). I'm planning on writing cleaner, more efficient code, (maybe even return and rewrite the old ones?) but for now, pretend that what you see is some kind of minimum viable concept. ;)\n"
},
{
"alpha_fraction": 0.5789473652839661,
"alphanum_fraction": 0.6507176756858826,
"avg_line_length": 33.66666793823242,
"blob_id": "6644e538ea7c7934cc4da201d5e223971b7df7b6",
"content_id": "702a0f47c770148f21e403d5c0c9af71ff7efbed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 209,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 6,
"path": "/29/29.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/22/18\n# PE 29\n\n# Brute force using list comprehension to mimic a flatmap. Fun, but never code this way.\nprint(len(set([y for x in range(2, 101) for y in map(lambda a: x ** a, range(2, 101))])))\n\n"
},
{
"alpha_fraction": 0.4153846204280853,
"alphanum_fraction": 0.4692307710647583,
"avg_line_length": 12,
"blob_id": "9a63d606fdd1dd87882df454a7091e7397cd1c5f",
"content_id": "3ec294927644f4232cfbec2c9e17306d99f4cf82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 130,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 10,
"path": "/20/20.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "def fact(i):\n if i <= 1:\n return 1\n return fact(i-1)*i\n\ns = 0\nfor c in str(fact(100)):\n s += int(c)\n \nprint(s)\n"
},
{
"alpha_fraction": 0.5157728791236877,
"alphanum_fraction": 0.5488958954811096,
"avg_line_length": 11.959183692932129,
"blob_id": "b680e96dd25be65145ede76a82d8d152bb55ea16",
"content_id": "62243740657d7640e8ac8c097e19257d9a9c1826",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 634,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 49,
"path": "/10/10.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n7/28/12\nFind the sum of all the primes under two million\n*/\n\n#include <iostream>\n#include <cmath>\n\nusing namespace std;\n\n#define lint long long unsigned int\n\nbool isPrime(lint num);\n\nint main()\n{\n\tlint sum = 0;\n\n\tfor(lint i = 0; i < 2000000; i++)\n\t{\n\t\tif(isPrime(i))\n\t\t{\n\t\t\tsum += i;\n\t\t\t//cout << \"i = \" << i << endl;\n\t\t\t//cout << \"sum = \" << sum << endl;\n\t\t}\n\t}\n\tcout << \"SUM: \" << sum << endl;\n\tsystem(\"pause\");\n\treturn 0;\n}\n\nbool isPrime(lint num)\n{\n\tif(num < 2)\n\t\treturn false;\n\n\telse if(num == 2)\n\t\treturn true;\n\n\tfor(lint i = 2; i <= (lint)pow(num, 0.5); i++)\n\t{\n\t\tif(num % i == 0)\n\t\t\treturn false;\n\t}\n\n\treturn true;\n}"
},
{
"alpha_fraction": 0.5417475700378418,
"alphanum_fraction": 0.5883495211601257,
"avg_line_length": 13.742856979370117,
"blob_id": "e93a778222d1944da931434f9fda5a27afd9b545",
"content_id": "84536b181b0ff289a6d534f501beff1fb9252f1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 515,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 35,
"path": "/6/6.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/17/12\n\nFind the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum.\n*/\n#include <iostream>\n#include <cmath>\nusing namespace std;\n\nint main()\n{\n\tint in;\n\tint square;\n\tint sum1 = 0;\n\tint sum2 = 0;\n\n\tcin >> in;\n\t//sum of squares\n\tfor (int n = 1; n < in + 1; n++)\n\t{\n\t\tsquare = pow(n,2.0);\n\t\tsum1 += square;\n\t}\n\tfor (int m = 1; m < in + 1; m++)\n\t{\n\t\tsum2 += m;\n\t}\n\tsum2 = pow(sum2,2.0);\n\n\tcout << sum1 - sum2 << endl;\n\n\tsystem(\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.3390151560306549,
"alphanum_fraction": 0.38825756311416626,
"avg_line_length": 17.85714340209961,
"blob_id": "bd476df3a59b995ad04a938d5578ce080096569b",
"content_id": "1e1414c0b2a7f8f4e964d9e6ce6b4c679cb5c304",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 528,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 28,
"path": "/27/27.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 9/18/18\n# PE 27\ndef isPrime(x):\n if x <= 1:\n return False\n for i in range(2,x):\n if x % i == 0:\n return False\n return True\n\nA = 999\nB = 1000\nbest_n = 0\nbest = (0,0)\nfor a in range(-A,A+1):\n for b in range(-B,B+1):\n n = 0\n while True:\n if isPrime(n*n + a*n + b):\n n += 1\n elif n > best_n:\n best_n = n\n best = (a,b)\n break\n else:\n break\nprint(best[0]*best[1])\n"
},
{
"alpha_fraction": 0.4099099040031433,
"alphanum_fraction": 0.5045045018196106,
"avg_line_length": 12.875,
"blob_id": "81ffcaacfc5607839cd5bd6036c213742e4b644a",
"content_id": "33384516a951c0e3bd5dd08fdfd7788d31bdf5be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 222,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 16,
"path": "/16/16.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "#11/20/15\n__author__ = 'rfblue2'\n\nnum = pow(2,1000)\n\nsum = 0\ni = 0\nwhile num > 0:\n temp = num % pow(10, i)\n num = num - temp\n digit = temp / pow(10, i - 1)\n print(digit)\n sum += digit\n i += 1\n\nprint(sum)\n"
},
{
"alpha_fraction": 0.5364372730255127,
"alphanum_fraction": 0.5597165822982788,
"avg_line_length": 26.38888931274414,
"blob_id": "088b0304532cd03177da546c5dfc227c78c111bb",
"content_id": "09af8c45839290a896256eb1244289f90ef63c9b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 988,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 36,
"path": "/20/Twenty.java",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "import java.util.*;\n/**\n * Alternate solution to Project Euler #20\n * @author Roland\n */\npublic class Twenty {\n\n public static void main(String args[]) {\n if (args.length < 1) {\n System.out.println(\"Usage: java Twenty [N]\");\n return;\n }\n int N = Integer.parseInt(args[0]);\n List<Integer> digits = new ArrayList<>();\n digits.add(1); // start multiplication from 1\n for (int i = 1; i <= N; i++) multiply(i, digits);\n\n int sum = digits.stream().reduce(0, (a, b) -> a + b);\n System.out.println(sum);\n }\n\n public static void multiply(int mult, List<Integer> digits) {\n int carry = 0;\n for (int i = 0; i < digits.size(); i++) {\n int prod = mult * digits.get(i) + carry;\n int newDig = prod;\n newDig = prod % 10;\n carry = ((prod - newDig) / 10);\n digits.set(i, newDig);\n }\n if (carry > 10) {\n digits.add(carry % 10);\n digits.add((carry - (carry%10)) / 10);\n } else if (carry != 0) digits.add(carry);\n }\n}\n\n\n"
},
{
"alpha_fraction": 0.4909909963607788,
"alphanum_fraction": 0.5315315127372742,
"avg_line_length": 21.200000762939453,
"blob_id": "ff4770f761e77221ab5990411668f7d175af51da",
"content_id": "dae52cac6487cb4a74781bbb75219f457661ae6b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 222,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 10,
"path": "/1/One.java",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "import java.util.stream.IntStream;\nclass One {\n public static void main(String[] args) {\n System.out.println(IntStream\n .range(1, 1000)\n .filter(x -> x % 3 == 0 || x % 5 == 0)\n .sum()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.4851751923561096,
"alphanum_fraction": 0.5377358198165894,
"avg_line_length": 22.15625,
"blob_id": "9a3267b40662db283eb36e6903377e8288daab73",
"content_id": "44850fa366a03e434b9e91c445313d1f2b388409",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 742,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 32,
"path": "/26/26.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "from decimal import *\n\nMAX_DIG = 10000\ngetcontext().prec = MAX_DIG\n\n# takes n as string\ndef cycleLength(n):\n n = n[2:] # remove '0.'\n if len(n) < MAX_DIG:\n return 0\n ptr1 = 0\n ptr2 = 50 # assumption that longest match will be greater than this\n while ptr2 < MAX_DIG - ptr1 + 1:\n ptr1 = 0\n ptr2 += 1\n for i in range(ptr2):\n if n[ptr1+i:ptr2] == n[ptr2:ptr2*2-i]:\n return ptr2 - ptr1 + 1\n return -1\n\nlargest_i = 0\nlargest = 0\nfor i in range(2, 1000):\n l = cycleLength((Decimal(1)/Decimal(i)).to_eng_string())\n if l == -1:\n print(\"Reached MAX_DIG on d=\" + str(i))\n exit()\n if l > largest:\n largest = l\n largest_i = i\n\nprint(largest_i) \n"
},
{
"alpha_fraction": 0.5401974320411682,
"alphanum_fraction": 0.598025381565094,
"avg_line_length": 15.904762268066406,
"blob_id": "77f79e8a040965d9f4a7772fd42d4d858bbd4678",
"content_id": "5485f44688bf3a137bca9cde60c1f17b64fce5b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 709,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 42,
"path": "/2/2.cpp",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "/*\nRoland Fong\n2/11/12\nEach new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:\n\n1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...\n\nBy considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms.\n*/\n\n#include <iostream>\nusing namespace std;\n\nint main()\n{\n\tint fib = 1;\n\tint pfib = 1;\n\tint ppfib = 1;\n\tint sum = 0;\n\tint n = 1;\n\tint r = 1;\n\n\twhile ( fib <= 4000000)\n\t{\n\t\tif (r == 0)\n\t\t{\n\t\t\tsum = sum + fib;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tsum = sum;\n\t\t}\n\t\tfib = pfib + ppfib;\n\t\tppfib = pfib;\n\t\tpfib = fib;\n\t\tn = n + 1;\n\t\tr = fib % 2;\n\t}\n\tcout << sum << endl;\n\tsystem (\"pause\");\n\treturn 0;\n}"
},
{
"alpha_fraction": 0.49677419662475586,
"alphanum_fraction": 0.5204300880432129,
"avg_line_length": 18.33333396911621,
"blob_id": "7c12708771220de473238cecfedf18e14971b348",
"content_id": "fab521116275b066c0337681c08ebd539250a116",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 465,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 24,
"path": "/23/23.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "def isAbundant(n):\n s = 0\n for i in range(1,n):\n if n % i == 0:\n s += i\n return s > n\n\nMAX = 28123\n\nabundants = [None] * MAX\n\ntotal = 0\nfor i in range(1,MAX):\n abundants[i] = isAbundant(i)\n isSummable = False \n for j in range(1,i):\n # if i is sum of two abundant numbers\n if abundants[j] and abundants[i - j]:\n isSummable = True\n break\n if not isSummable:\n total += i\n\nprint(total)\n\n"
},
{
"alpha_fraction": 0.5231788158416748,
"alphanum_fraction": 0.6026490330696106,
"avg_line_length": 24,
"blob_id": "1d994044b53816fc47238f3e019fb2cb44fcf9b3",
"content_id": "38e4bf4ec860deb4a5c59328be30c7672629acb5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 151,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 6,
"path": "/30/30.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/22/18\n# PE 30\n\n# Super inefficient silly one-liner\nprint(sum(filter(lambda i: i == sum(int(x)**5 for x in str(i)), range(2, 9**5 * 6))))\n\n"
},
{
"alpha_fraction": 0.47079646587371826,
"alphanum_fraction": 0.5451327562332153,
"avg_line_length": 24.636363983154297,
"blob_id": "684c16ed6e1b9edb147871c24a7cb9be440017bb",
"content_id": "dc0063eafafa7000b051baa77c1b7feb39afc709",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 565,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 22,
"path": "/33/33.py",
"repo_name": "rfblue2/ProjectEulerSols",
"src_encoding": "UTF-8",
"text": "# Roland Fong\n# 7/25/18\n# PE 33\n\nfracs = []\ndef areEqual(num, denom, newNum, newDenom):\n if newDenom == 0:\n return False\n return abs(float(num)/float(denom) - float(newNum)/float(newDenom)) < 1e-6\n\nfor num in range(10, 99):\n for denom in range(num+1, 100):\n n1 = int(num/10)\n n2 = int(num%10)\n d1 = int(denom/10)\n d2 = int(denom%10)\n if n1 == d2 and areEqual(num, denom, n2, d1):\n fracs.append((n2, d1))\n if n2 == d1 and areEqual(num, denom, n1, d2):\n fracs.append((n1, d2))\n\nprint(fracs)\n\n"
}
] | 36 |
TeodorG7/Give-me-a-challenge-appspotr | https://github.com/TeodorG7/Give-me-a-challenge-appspotr | 5b1ca100f102fa9aaa87a215ff049aabce106a35 | 643841c43468abe427d2247c8d11de30c9cf294d | 2b774a63d4fe88ca785444ce2e26437bdf76469b | refs/heads/master | 2018-01-08T09:25:00.266055 | 2015-09-23T09:28:11 | 2015-09-23T09:28:11 | 42,991,406 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.743534505367279,
"alphanum_fraction": 0.7532327771186829,
"avg_line_length": 27.9375,
"blob_id": "118254a160a4c49dec584a19c5dbe7c86d12a114",
"content_id": "17ce1162e4c6672426655e72dfc744d41418db96",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 928,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 32,
"path": "/givemearealchallenge.py",
"repo_name": "TeodorG7/Give-me-a-challenge-appspotr",
"src_encoding": "UTF-8",
"text": "import urllib2\nimport re\n\n#Getting the request as a string\nlongString = urllib2.urlopen(\"https://api2.appspotr.com/givemeachallenge\").read()\n\n#splitting string into a word list\nwordList = re.sub(\"[^\\w]\", \" \", longString).split()\n\n#removing everything infront of quiz:\nshavedWordList = wordList[32:len(wordList)]\n\ncounter = 0\nwordsToRemoveList = []\n#If there is a duplicate of a word add to itemsToRemoveList\nfor word in shavedWordList:\n\tcounter = 0\n\tfor duplicateWord in shavedWordList:\n\t\tif word == duplicateWord:\n\t\t\tcounter +=1\n\t\t\t#if duplicate\n\t\t\tif counter == 2:\n\t\t\t\twordsToRemoveList.append(word)\n\n\n#Remove all words that is in itemstoRemoveList from ShavedWordList. Remaining item will be the non duplicate\nfor word in wordsToRemoveList:\n while word in shavedWordList:\n shavedWordList.remove(word)\n\n#At this point we have the only word left that isnt a duplicate. It seems to be Abortion\nprint shavedWordList\n\n\n"
}
] | 1 |
majorr90/Luther | https://github.com/majorr90/Luther | d75f9fec0a9015e24f575392c2a12b7b8e850bd3 | 59623f53593a274f000ca8654a1b09072fe6854b | ede0143e747a0c011881d12d9207092b97b81f0e | refs/heads/master | 2017-09-12T13:39:02.941362 | 2017-07-07T23:31:40 | 2017-07-07T23:31:40 | 96,566,119 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5783255100250244,
"alphanum_fraction": 0.588714599609375,
"avg_line_length": 67.18055725097656,
"blob_id": "d07955987c38d1176038699d7a1fc77279b562fe",
"content_id": "02bc0061f1717302198539a80b0d9475ddd2c823",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4909,
"license_type": "no_license",
"max_line_length": 219,
"num_lines": 72,
"path": "/Webscraping/TopMovies/TopMovies/spiders/movies.py",
"repo_name": "majorr90/Luther",
"src_encoding": "UTF-8",
"text": "import scrapy\n\nclass FestivalSpider(scrapy.Spider):\n\n name = 'top_movies'\n\n custom_settings = {\n \"DOWNLOAD_DELAY\": 3,\n \"CONCURRENT_REQUESTS_PER_DOMAIN\": 3,\n \"HTTPCACHE_ENABLED\": True\n }\n\n start_urls = [\n 'https://www.rottentomatoes.com/top/'\n ]\n\n def parse(self, response):\n\n for href in response.xpath(\n '//*[@id=\"main_container\"]/div[3]/div[2]/section/div/ul/li/a/@href'\n ).extract():\n\n yield scrapy.Request(\n url='https://www.rottentomatoes.com'+href,\n callback=self.parse_page,\n )\n\n def parse_page(self, response):\n\n for href in response.xpath(\n '//*[@id=\"top_movies_main\"]/div/table/tr/td[3]/a/@href'\n ).extract():\n\n yield scrapy.Request(\n url='https://www.rottentomatoes.com'+href,\n callback=self.parse_movies,\n meta={'url': 'https://www.rottentomatoes.com'+href}\n )\n\n def parse_movies(self, response):\n\n yield {\n 'url': response.request.meta['url'],\n 'name': response.xpath('//*[@id=\"movie-title\"]/text()').extract_first(),\n 'year': response.xpath('//*[@id=\"movie-title\"]/span/text()').extract_first(),\n 'tomatometer_all': response.xpath('//*[@id=\"tomato_meter_link\"]/span[2]/span/text()').extract_first(),\n 'crit_average_all': response.xpath('//*[@id=\"scoreStats\"]/div[1]/text()').extract()[1],\n 'number_reviews_all': response.xpath('//*[@id=\"scoreStats\"]/div[2]/span[2]/text()').extract_first(),\n 'fresh_all': response.xpath('//*[@id=\"scoreStats\"]/div[3]/span[2]').extract_first(),\n 'rotten_all': response.xpath('//*[@id=\"scoreStats\"]/div[4]/span[2]').extract_first(),\n 'tomatometer_top': response.xpath('//*[@id=\"tomato_meter_link\"]/span[2]/span/text()').extract()[1],\n 'crit_average_top': response.xpath('//*[@id=\"scoreStats\"]/div[1]/text()').extract()[3],\n 'number_reviews_top': response.xpath('//*[@id=\"scoreStats\"]/div[2]/span[2]/text()').extract()[1],\n 'fresh_top': response.xpath('//*[@id=\"scoreStats\"]/div[3]/span[2]').extract()[1],\n 'rotten_top': response.xpath('//*[@id=\"scoreStats\"]/div[4]/span[2]').extract()[1],\n 'audience_score': response.xpath('//*[@id=\"scorePanel\"]/div[2]/div[1]/a/div/div[2]/div[1]/span/text()').extract_first(),\n 'avg_user_rating': response.xpath('//*[@id=\"scorePanel\"]/div[2]/div[2]/div[1]/text()').extract()[1],\n 'number_user_ratings': response.xpath('//*[@id=\"scorePanel\"]/div[2]/div[2]/div[2]/text()').extract()[1],\n 'rating': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Rating:\")]/div[2]/text()').extract_first(),\n 'genre': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Genre:\")]/div[2]/a/text()').extract(),\n 'directors': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Directed By:\")]/div[2]/a/text()').extract(),\n 'writers': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Written By:\")]/div[2]/a/text()').extract(),\n 'theatrical_release': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"In Theaters:\")]/div[2]/time/@datetime').extract_first(),\n 'release_type': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"In Theaters:\")]/div[2]/span/text()').extract_first(),\n 'on_disc_streaming': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"On Disc/Streaming:\")]/div[2]/time/@datetime').extract_first(),\n 'box_office': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Box Office:\")]/div[2]/text()').extract_first(),\n 'runtime': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Runtime:\")]/div[2]/time/@datetime').extract_first(),\n 'studio': response.xpath('//*[@id=\"mainColumn\"]/section[@class=\"panel panel-rt panel-box movie_info media\"]/div/div/ul/li[contains(.,\"Studio:\")]/div[2]/a/text()').extract_first(),\n 'number_videos': response.xpath('//*[@id=\"movie-videos-panel\"]/div/div[2]/a/div/text()').extract_first(),\n 'number_photos': response.xpath('//*[@id=\"movie-photos-panel\"]/div/div[2]/a/div/text()').extract_first(),\n\n }\n"
}
] | 1 |
OsamaAlOlabi/black-jack-card-game | https://github.com/OsamaAlOlabi/black-jack-card-game | 6ec1c69c69a2983b71237ee73df3632cb30b1c04 | 8312fd48541d0f57c7c03239ba802576e003033b | 49154a06ee2c314b5bff17cab62266a8a438f987 | refs/heads/master | 2023-01-22T12:00:31.065403 | 2020-12-10T09:49:20 | 2020-12-10T09:49:20 | 320,227,329 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7559523582458496,
"alphanum_fraction": 0.7559523582458496,
"avg_line_length": 17.66666603088379,
"blob_id": "13574a6e6c1d8fd865fb2b50adfe005ab6b218eb",
"content_id": "aae4102c2e161dfe23b642ca4c2e7d14c3d87cf4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 168,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 9,
"path": "/main.py",
"repo_name": "OsamaAlOlabi/black-jack-card-game",
"src_encoding": "UTF-8",
"text": "import art\nimport game_logic\n\nprint(art.logo)\n\ngame_logic.random_card_for_me()\ngame_logic.random_card_for_me()\ngame_logic.random_card_for_bot()\ngame_logic.black_jack()\n"
},
{
"alpha_fraction": 0.5008090734481812,
"alphanum_fraction": 0.5186083912849426,
"avg_line_length": 27.744186401367188,
"blob_id": "4e8bddf092f0fc7aa89e041734cf747a63e8a366",
"content_id": "a3e810f9ab1a061bad52bdc00553eab9d523c3c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3708,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 129,
"path": "/game_logic.py",
"repo_name": "OsamaAlOlabi/black-jack-card-game",
"src_encoding": "UTF-8",
"text": "import art\nimport random\n\n\ncards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]\nmy_cards = []\nbot_cards = []\nmy_score = 0\nbot_score = 0\n\n\n# Choose a random card for the player\ndef random_card_for_me():\n global my_score\n\n my_card_num_index = random.randrange(len(cards))\n my_card_num_value = cards[my_card_num_index]\n my_cards.append(my_card_num_value)\n my_score = sum(my_cards)\n\n # If random card is 11, check if the sum of cards > 21 then change 11 to 1\n while True:\n if 11 in my_cards and my_score > 21:\n change = my_cards.index(11)\n my_cards[change] = 1\n else:\n break\n\n my_score = sum(my_cards)\n\n\n# Choose a random card for the dealer\ndef random_card_for_bot():\n global bot_score\n\n bot_card_num_index = random.randrange(len(cards))\n bot_card_num_value = cards[bot_card_num_index]\n bot_cards.append(bot_card_num_value)\n bot_score = sum(bot_cards)\n\n # If random card is 11, check if the sum of cards > 21 then change 11 to 1\n while True:\n if 11 in bot_cards and bot_score > 21:\n change = bot_cards.index(11)\n bot_cards[change] = 1\n else:\n break\n\n bot_score = sum(bot_cards)\n\n\n# How the game functions\ndef black_jack():\n\n\n print(f\"Your cards are {my_cards}, your current score: {my_score}\")\n print(f\"Dealer starting card is: {bot_cards}\")\n\n # How the dealer functions\n def bot_logic():\n while bot_score <= my_score:\n random_card_for_bot()\n if bot_score == 21 and my_score == 21:\n print(f\"Your final hand is: {my_cards}, final score: {my_score}\")\n print(f\"Dealer final hand is: {bot_cards}, final score: {bot_score}\")\n print(\"It's a draw\")\n play_again()\n break\n elif bot_score > 21:\n print(f\"Your final hand is: {my_cards}, final score: {my_score}\")\n print(f\"Dealer final hand is: {bot_cards}, final score: {bot_score}\")\n print(f\"You win, the dealer {bot_score} is above 21\")\n play_again()\n break\n\n print(f\"Your final hand is: {my_cards}, final score: {my_score}\")\n print(f\"Dealer final hand is: {bot_cards}, final score: {bot_score}\")\n print(f\"You lose, the dealer has {bot_score} and you have {my_score}\\n\")\n play_again()\n\n if my_score == 21:\n bot_logic()\n\n elif my_score > 21:\n print(f\"\\nYou're score is {my_score} which is above 21.\")\n print(\"You lose\\n\")\n play_again()\n elif my_score < 21:\n while True:\n another_card = input(\"Would you like another card? 'Y' or 'N' \").lower()\n print(\"\")\n if another_card == \"y\":\n random_card_for_me()\n black_jack()\n break\n elif another_card == \"n\":\n bot_logic()\n break\n else:\n print(\"Please type 'Y' or 'N'\")\n\n\n# Play again function\ndef play_again():\n global my_score\n global bot_score\n global my_cards\n global bot_cards\n\n while True:\n repeat = input(\"Would you like to play again? 'Y' or 'N' \")\n if repeat == \"y\":\n my_cards = []\n bot_cards = []\n my_score = 0\n bot_score = 0\n print(\"*************************************************************\")\n print(art.logo)\n\n random_card_for_me()\n random_card_for_me()\n random_card_for_bot()\n\n black_jack()\n break\n elif repeat == \"n\":\n exit()\n else:\n print(\"Please type 'Y' or 'N'\")\n"
}
] | 2 |
jorgezafra94/AirBnB_clone_v3 | https://github.com/jorgezafra94/AirBnB_clone_v3 | bd9c1b0c082b3c667c382b1fe2ffb21ec6a21bba | acb9c336649e26cf08eb4801fa5e474135eaa102 | ed34d226173e4188c6ab52bf86caa8af6f4e6a62 | refs/heads/master | 2020-12-21T17:38:19.929675 | 2020-01-31T02:47:35 | 2020-01-31T02:47:35 | 236,505,878 | 0 | 2 | null | 2020-01-27T14:15:10 | 2020-01-31T02:01:40 | 2020-01-31T02:47:36 | Python | [
{
"alpha_fraction": 0.5135859251022339,
"alphanum_fraction": 0.5246842503547668,
"avg_line_length": 27.714284896850586,
"blob_id": "a2741c45c4f717d322f9f0ca01178023bfd3b121",
"content_id": "6909438371a50cc1531ddb06c7bfe829bfadb153",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2613,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 91,
"path": "/api/v1/views/cities.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" City APIRest\n careful by default it uses get method\n\"\"\"\n\nfrom models import storage\nfrom models.state import State\nfrom models.city import City\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/states/<state_id>/cities', methods=['GET'])\ndef list_cities(state_id):\n \"\"\" list all cities from a specified state\n \"\"\"\n lista = []\n dic = storage.all('State')\n for key in dic:\n if state_id == dic[key].id:\n cities = dic[key].cities\n for elem in cities:\n lista.append(elem.to_dict())\n return (jsonify(lista))\n abort(404)\n\n\n@app_views.route('/cities/<city_id>', methods=['GET'])\ndef city_id(city_id):\n \"\"\" return the city\n \"\"\"\n dic = storage.all('City')\n for key in dic:\n if city_id == dic[key].id:\n return (jsonify(dic[key].to_dict()))\n abort(404)\n\n\n@app_views.route('/cities/<city_id>', methods=['DELETE'])\ndef city_delete(city_id):\n \"\"\" delete the delete\n \"\"\"\n dic = storage.all('City')\n for key in dic:\n if city_id == dic[key].id:\n dic[key].delete()\n storage.save()\n return (jsonify({}))\n abort(404)\n\n\n@app_views.route('/states/<state_id>/cities', methods=['POST'])\ndef add_city(state_id):\n \"\"\" create a city of a specified state\n \"\"\"\n lista = []\n dic = storage.all('State')\n for key in dic:\n if state_id == dic[key].id:\n content = request.get_json()\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n if \"name\" not in content.keys():\n return (jsonify(\"Missing name\"), 400)\n else:\n content[\"state_id\"] = state_id\n new_city = City(**content)\n new_city.save()\n return jsonify(new_city.to_dict()), 201\n abort(404)\n\n\n@app_views.route('/cities/<city_id>', methods=['PUT'])\ndef update_city(city_id):\n \"\"\" update specified city\n \"\"\"\n dic = storage.all('City')\n for key in dic:\n if city_id == dic[key].id:\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n forbidden = [\"id\", \"update_at\", \"created_at\", \"state_id\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return jsonify(dic[key].to_dict())\n abort(404)\n"
},
{
"alpha_fraction": 0.6244411468505859,
"alphanum_fraction": 0.6274217367172241,
"avg_line_length": 22.964284896850586,
"blob_id": "fe7aa802023c7cfb7b2b8a95a85d870af00028ed",
"content_id": "cd76e3c10d24e77115848713591029ef63deeaf2",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 671,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 28,
"path": "/api/v1/views/index.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\"Creationg route for Blueprint\n\"\"\"\nfrom api.v1.views import app_views\nfrom models import storage\nfrom flask import jsonify\n\n\n@app_views.route('/status')\ndef response():\n \"\"\" get status ok\n \"\"\"\n dic = {\"status\": \"OK\"}\n return jsonify(dic)\n\n\n@app_views.route('/stats')\ndef class_counter():\n \"\"\" get a dictionary from count method\n \"\"\"\n dic = {}\n dic[\"amenities\"] = storage.count(\"Amenity\")\n dic[\"cities\"] = storage.count(\"City\")\n dic[\"places\"] = storage.count(\"Place\")\n dic[\"reviews\"] = storage.count(\"Review\")\n dic[\"states\"] = storage.count(\"State\")\n dic[\"users\"] = storage.count(\"User\")\n return jsonify(dic)\n"
},
{
"alpha_fraction": 0.5147842764854431,
"alphanum_fraction": 0.5244789123535156,
"avg_line_length": 26.87837791442871,
"blob_id": "34d9118a6d312fc237d0d45c8b8ead1215148a97",
"content_id": "5276548b51fc3ab34f01c53a6a89bbb2607ab139",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2063,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 74,
"path": "/api/v1/views/states.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" State APIRest\n\"\"\"\n\nfrom models import storage\nfrom models.state import State\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/states', methods=['GET'])\ndef list_dict():\n \"\"\" list of an objetc in a dict form\n \"\"\"\n lista = []\n dic = storage.all('State')\n for elem in dic:\n lista.append(dic[elem].to_dict())\n return (jsonify(lista))\n\n\n@app_views.route('/states/<state_id>', methods=['GET', 'DELETE'])\ndef state_id(state_id):\n \"\"\" realize the specific action depending on method\n \"\"\"\n lista = []\n dic = storage.all('State')\n for elem in dic:\n var = dic[elem].to_dict()\n if var[\"id\"] == state_id:\n if request.method == 'GET':\n return (jsonify(var))\n elif request.method == 'DELETE':\n aux = {}\n dic[elem].delete()\n storage.save()\n return (jsonify(aux))\n abort(404)\n\n\n@app_views.route('/states', methods=['POST'])\ndef add_item():\n \"\"\" add a new item\n \"\"\"\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n content = request.get_json()\n if \"name\" not in content.keys():\n return jsonify(\"Missing name\"), 400\n else:\n new_state = State(**content)\n new_state.save()\n return (jsonify(new_state.to_dict()), 201)\n\n\n@app_views.route('/states/<state_id>', methods=['PUT'])\ndef update_item(state_id):\n \"\"\" update item\n \"\"\"\n dic = storage.all(\"State\")\n for key in dic:\n if dic[key].id == state_id:\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n forbidden = [\"id\", \"update_at\", \"created_at\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return(jsonify(dic[key].to_dict()))\n abort(404)\n"
},
{
"alpha_fraction": 0.5278564691543579,
"alphanum_fraction": 0.5372993350028992,
"avg_line_length": 27.62162208557129,
"blob_id": "ee76ef185c149853900de2d66e31165832c8f27e",
"content_id": "655d9ca02fb8754fcd517ecffd9c0f2667064682",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2118,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 74,
"path": "/api/v1/views/amenities.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" Amenities APIRest\n\"\"\"\n\nfrom models import storage\nfrom models.amenity import Amenity\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/amenities', methods=['GET'])\ndef amenity_list():\n \"\"\" list of objetc in dict form\n \"\"\"\n lista = []\n dic = storage.all('Amenity')\n for elem in dic:\n lista.append(dic[elem].to_dict())\n return (jsonify(lista))\n\n\n@app_views.route('/amenities/<amenity_id>', methods=['GET', 'DELETE'])\ndef amenity_id(amenity_id):\n \"\"\" realize the specific action depending on a method\n \"\"\"\n lista = []\n dic = storage.all('Amenity')\n for elem in dic:\n var = dic[elem].to_dict()\n if var[\"id\"] == amenity_id:\n if request.method == 'GET':\n return (jsonify(var))\n elif request.method == 'DELETE':\n aux = {}\n dic[elem].delete()\n storage.save()\n return (jsonify(aux))\n abort(404)\n\n\n@app_views.route('/amenities', methods=['POST'])\ndef amenity_item():\n \"\"\" add a new item\n \"\"\"\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n content = request.get_json()\n if \"name\" not in content.keys():\n return jsonify(\"Missing name\"), 400\n else:\n new_amenity = Amenity(**content)\n new_amenity.save()\n return (jsonify(new_amenity.to_dict()), 201)\n\n\n@app_views.route('/amenities/<amenity_id>', methods=['PUT'])\ndef update_amenity(amenity_id):\n \"\"\" update item\n \"\"\"\n dic = storage.all(\"Amenity\")\n for key in dic:\n if dic[key].id == amenity_id:\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n forbidden = [\"id\", \"update_at\", \"created_at\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return(jsonify(dic[key].to_dict()))\n abort(404)\n"
},
{
"alpha_fraction": 0.5314327478408813,
"alphanum_fraction": 0.5449561476707458,
"avg_line_length": 27.5,
"blob_id": "c9d52183dd590ee7c9d827a49aaafb3df1cc8ee9",
"content_id": "9a973b6c3e707f89214b0dd1f044e88e1dcdc33e",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2736,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 96,
"path": "/api/v1/views/places_reviews.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" Place_reviews APIRest\n\"\"\"\n\nfrom models import storage\nfrom models.place import Place\nfrom models.user import User\nfrom models.review import Review\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/places/<place_id>/reviews', methods=['GET'])\ndef rev_list(place_id):\n \"\"\" list of objetc in dict form\n \"\"\"\n lista = []\n dic = storage.all('Place')\n for elem in dic:\n if dic[elem].id == place_id:\n var = dic[elem].reviews\n for i in var:\n lista.append(i.to_dict())\n return (jsonify(lista))\n abort(404)\n\n\n@app_views.route('/reviews/<review_id>', methods=['GET'])\ndef review(review_id):\n \"\"\" list of objetc in dict form\n \"\"\"\n dic = storage.all('Review')\n for elem in dic:\n if dic[elem].id == review_id:\n return (jsonify(dic[elem].to_dict()))\n abort(404)\n\n\n@app_views.route('/reviews/<review_id>', methods=['DELETE'])\ndef rev_delete(review_id):\n \"\"\" delete the delete\n \"\"\"\n dic = storage.all('Review')\n for key in dic:\n if review_id == dic[key].id:\n dic[key].delete()\n storage.save()\n return (jsonify({}))\n abort(404)\n\n\n@app_views.route('/places/<place_id>/reviews', methods=['POST'])\ndef add_rev(place_id):\n \"\"\" create a review of a specified city\n \"\"\"\n lista = []\n obj = storage.get(\"Place\", place_id)\n content = request.get_json()\n if not obj:\n abort(404)\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n if \"user_id\" not in content.keys():\n return (jsonify(\"Missing user_id\"), 400)\n obj2 = storage.get(\"User\", content[\"user_id\"])\n if not obj2:\n abort(404)\n if \"text\" not in content.keys():\n return (jsonify(\"Missing text\"), 400)\n\n content[\"place_id\"] = place_id\n new_place = Review(**content)\n new_place.save()\n return jsonify(new_place.to_dict()), 201\n\n\n@app_views.route('/reviews/<review_id>', methods=['PUT'])\ndef update_rev(review_id):\n \"\"\" update a specified place\n \"\"\"\n dic = storage.all('Review')\n for key in dic:\n if review_id == dic[key].id:\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n forbidden = [\"id\", \"update_at\", \"created_at\",\n \"place_id\", \"user_id\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return jsonify(dic[key].to_dict())\n abort(404)\n"
},
{
"alpha_fraction": 0.5227081775665283,
"alphanum_fraction": 0.5336416959762573,
"avg_line_length": 28.540372848510742,
"blob_id": "a2a342f725ce739b36413ab126e41410926bc583",
"content_id": "40f2e4a0287fd4c319908d8f6ed76ff567ec9ce3",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4756,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 161,
"path": "/api/v1/views/places.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" Place APIRest\n\"\"\"\n\nfrom models import storage\nfrom models.place import Place\nfrom models.city import City\nfrom models.user import User\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/cities/<city_id>/places', methods=['GET'])\ndef places_list(city_id):\n \"\"\" list of an objetc in a dict form\n \"\"\"\n lista = []\n dic = storage.all('City')\n for elem in dic:\n if dic[elem].id == city_id:\n var = dic[elem].places\n for i in var:\n lista.append(i.to_dict())\n return (jsonify(lista))\n abort(404)\n\n\n@app_views.route('/places/<place_id>', methods=['GET'])\ndef place(place_id):\n \"\"\" list of objetc in dict form\n \"\"\"\n dic = storage.all('Place')\n for elem in dic:\n if dic[elem].id == place_id:\n return (jsonify(dic[elem].to_dict()))\n abort(404)\n\n\n@app_views.route('/places/<place_id>', methods=['DELETE'])\ndef place_delete(place_id):\n \"\"\" delete the delete\n \"\"\"\n dic = storage.all('Place')\n for key in dic:\n if place_id == dic[key].id:\n dic[key].delete()\n storage.save()\n return (jsonify({}))\n abort(404)\n\n\n@app_views.route('/cities/<city_id>/places', methods=['POST'])\ndef add_place(city_id):\n \"\"\" create a place of a specified city\n \"\"\"\n lista = []\n obj = storage.get(\"City\", city_id)\n content = request.get_json()\n if not obj:\n abort(404)\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n if \"user_id\" not in content.keys():\n return (jsonify(\"Missing user_id\"), 400)\n obj2 = storage.get(\"User\", content[\"user_id\"])\n if not obj2:\n abort(404)\n if \"name\" not in content.keys():\n return (jsonify(\"Missing name\"), 400)\n\n content[\"city_id\"] = city_id\n new_place = Place(**content)\n new_place.save()\n return jsonify(new_place.to_dict()), 201\n\n\n@app_views.route('/places/<place_id>', methods=['PUT'])\ndef update_place(place_id):\n \"\"\" update specified place\n \"\"\"\n dic = storage.all('Place')\n for key in dic:\n if place_id == dic[key].id:\n if not request.json:\n return (jsonify(\"Not a JSON\"), 400)\n else:\n forbidden = [\"id\", \"update_at\", \"created_at\",\n \"city_id\", \"user_id\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return jsonify(dic[key].to_dict())\n abort(404)\n\n# ----------------------advanced task ---------------------------------------\n\n\n@app_views.route('/places_search', methods=['POST'])\ndef advanced():\n \"\"\" return all places per city, state or amenities\n return all places that has all amenities\n return all places that belong to city or state\n permited keys states, cities, amenities\n \"\"\"\n # rule 0\n content = request.get_json(force=True, silent=True)\n if content is None:\n return jsonify('Not a JSON'), 400\n # rule 1\n result, places = [], []\n if len(content) == 0:\n places = storage.all(\"Place\").values()\n for elem in places:\n result.append(elem.to_dict())\n return jsonify(result)\n\n flag = 0\n for key in content:\n if len(content[key]) > 0:\n flag = 1\n break\n if flag == 0:\n places = storage.all(\"Place\").values()\n for elem in places:\n result.append(elem.to_dict())\n return jsonify(result)\n # rule 2\n if \"states\" in content.keys() and len(content[\"states\"]) > 0:\n states = content[\"states\"]\n for id in states:\n st = storage.get(\"State\", id)\n if st:\n for city in st.cities:\n for pl in city.places:\n places.append(pl)\n # rule 3\n if \"cities\" in content.keys() and len(content[\"cities\"]) > 0:\n cities = content[\"cities\"]\n for id in cities:\n ct = storage.get(\"City\", id)\n if ct:\n for pl in ct.places:\n places.append(pl)\n\n places = list(set(places))\n\n if \"amenities\" in content.keys() and len(content[\"amenities\"]) > 0:\n ame = []\n for id in content[\"amenities\"]:\n ame.append(storage.get(\"Amenity\", id))\n places = [pl for pl in places if all([a in pl.amenities for a in ame])]\n\n for elem in places:\n var = elem.to_dict()\n if \"amenities\" in var.keys():\n del var[\"amenities\"]\n result.append(var)\n return jsonify(result)\n"
},
{
"alpha_fraction": 0.6493184566497803,
"alphanum_fraction": 0.6679058074951172,
"avg_line_length": 20.810810089111328,
"blob_id": "b52073b9b0ac8be4a8fcc783f1e6fa2115f9e20e",
"content_id": "0b7d683380ce3ce990e500307bedb6df2da80878",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 807,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 37,
"path": "/api/v1/app.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\"\nA python script that starts a Flask web application\n\"\"\"\nfrom flask import Flask, Blueprint, abort, jsonify\nfrom models import storage\nfrom api.v1.views import app_views\nfrom os import getenv\nfrom flask_cors import CORS\n\n\napp = Flask(__name__)\napp.url_map.strict_slashes = False\napp.register_blueprint(app_views)\nCORS(app, resources={r\"/*\": {\"origins\": \"0.0.0.0\"}})\n\n\[email protected](404)\ndef page_not_found(error):\n \"\"\"\n Function that shows a 404 error\n \"\"\"\n return (jsonify(error=\"Not found\"), 404)\n\n\[email protected]_appcontext\ndef teardown(exception=None):\n \"\"\"\n Function closes the current session\n \"\"\"\n storage.close()\n\n\nif __name__ == '__main__':\n h = getenv('HBNB_API_HOST')\n p = getenv('HBNB_API_PORT')\n app.run(host=h, port=p, threaded=True)\n"
},
{
"alpha_fraction": 0.5129990577697754,
"alphanum_fraction": 0.523676872253418,
"avg_line_length": 27.342105865478516,
"blob_id": "03e7e2e6be28d26266ccac0777077c982a3af4bc",
"content_id": "3b2e28148d7aa2f1817e32fb034a1dcdeeb3fb3e",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2154,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 76,
"path": "/api/v1/views/users.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" User APIRest\n\"\"\"\n\nfrom models import storage\nfrom models.user import User\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\n\n\n@app_views.route('/users', methods=['GET'])\ndef user_list():\n \"\"\" list of an objetc in a dict form\n \"\"\"\n lista = []\n dic = storage.all('User')\n for elem in dic:\n lista.append(dic[elem].to_dict())\n return (jsonify(lista))\n\n\n@app_views.route('/users/<user_id>', methods=['GET', 'DELETE'])\ndef user_id(user_id):\n \"\"\" realize the specific action depending on method\n \"\"\"\n lista = []\n dic = storage.all('User')\n for elem in dic:\n var = dic[elem].to_dict()\n if var[\"id\"] == user_id:\n if request.method == 'GET':\n return (jsonify(var))\n elif request.method == 'DELETE':\n aux = {}\n dic[elem].delete()\n storage.save()\n return (jsonify(aux))\n abort(404)\n\n\n@app_views.route('/users', methods=['POST'])\ndef user_item():\n \"\"\" add a new item\n \"\"\"\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n content = request.get_json()\n if \"email\" not in content.keys():\n return jsonify(\"Missing email\"), 400\n if \"password\" not in content.keys():\n return jsonify(\"Missing password\"), 400\n else:\n new_user = User(**content)\n new_user.save()\n return (jsonify(new_user.to_dict()), 201)\n\n\n@app_views.route('/users/<user_id>', methods=['PUT'])\ndef update_user(user_id):\n \"\"\" update an item\n \"\"\"\n dic = storage.all(\"User\")\n for key in dic:\n if dic[key].id == user_id:\n if not request.json:\n return jsonify(\"Not a JSON\"), 400\n else:\n forbidden = [\"id\", \"email\", \"update_at\", \"created_at\"]\n content = request.get_json()\n for k in content:\n if k not in forbidden:\n setattr(dic[key], k, content[k])\n dic[key].save()\n return(jsonify(dic[key].to_dict()))\n abort(404)\n"
},
{
"alpha_fraction": 0.5625,
"alphanum_fraction": 0.5713541507720947,
"avg_line_length": 25.30137062072754,
"blob_id": "21713c7160a313760a7d94ed15e36d6e4458826d",
"content_id": "e7b45d549140c5693e4fa868a4ca91fb3aa7a427",
"detected_licenses": [
"LicenseRef-scancode-public-domain"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1920,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 73,
"path": "/api/v1/views/places_amenities.py",
"repo_name": "jorgezafra94/AirBnB_clone_v3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\"\"\" Place_amenities Restful API\n\"\"\"\n\nfrom models import storage\nfrom models.place import Place\nfrom models.user import User\nfrom models.amenity import Amenity\nfrom models.review import Review\nfrom api.v1.views import app_views\nfrom flask import jsonify, abort, request\nfrom os import getenv\n\ntype = getenv('HBNB_TYPE_STORAGE')\n\n\n@app_views.route('/places/<place_id>/amenities', methods=['GET'])\ndef am_list(place_id):\n \"\"\" list of an objetc in dict form\n \"\"\"\n lista = []\n dic = storage.all('Place')\n for elem in dic:\n if dic[elem].id == place_id:\n var = dic[elem].amenities\n for i in var:\n lista.append(i.to_dict())\n return (jsonify(lista))\n abort(404)\n\n\n@app_views.route('/places/<place_id>/amenities/<amenity_id>',\n methods=['DELETE'])\ndef am_delete(place_id, amenity_id):\n \"\"\" delete the object\n \"\"\"\n p = storage.get(\"Place\", place_id)\n a = storage.get(\"Amenity\", amenity_id)\n if not a or not p:\n abort(404)\n for elem in p.amenities:\n if elem.id == a.id:\n if type == 'db':\n p.amenities.remove(a)\n else:\n p.amenity_ids.remove(a)\n p.save()\n return jsonify({})\n abort(404)\n\n\n@app_views.route('/places/<place_id>/amenities/<amenity_id>', methods=['POST'])\ndef add_am(place_id, amenity_id):\n \"\"\" create an amenity of a city\n \"\"\"\n lista = []\n obj = storage.get(\"Place\", place_id)\n p = storage.get(\"Place\", place_id)\n a = storage.get(\"Amenity\", amenity_id)\n print(a)\n print(\"+++++++++++\")\n print(p)\n if not a or not p:\n abort(404)\n for elem in p.amenities:\n if elem.id == a.id:\n return jsonify(a.to_dict())\n if type == 'db':\n p.amenities.append(a)\n else:\n p.amenity_id.append(a)\n p.save()\n return jsonify(a.to_dict()), 201\n"
}
] | 9 |
aleksandr-rakov/workdays_calendar | https://github.com/aleksandr-rakov/workdays_calendar | eeb2d66dc1bfd5ce10b6206b1cb932a280bba93b | 00aa27caee8b1659bc988fe5092bfe64c3cf0c51 | 8d59ba918601fbb260a675c97a8e39821e0e01b9 | refs/heads/master | 2020-02-26T15:01:55.423475 | 2018-10-24T11:34:16 | 2018-10-24T11:34:16 | 83,237,799 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.660612940788269,
"alphanum_fraction": 0.6662883162498474,
"avg_line_length": 21.024999618530273,
"blob_id": "3d64c0b7d0b2a953a50dad6d350ca0ae2b6563cb",
"content_id": "4ac5ab94b5af6452cb6b8b305bdc81f8d2f0c72d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 881,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 40,
"path": "/workdays_calendar/scripts/init_db.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport sys\nimport os\nfrom pyramid.paster import bootstrap\nfrom pymongo import MongoClient\nimport workdays_calendar.users\nimport workdays_calendar.tags\nimport workdays_calendar.days_calendar\n\ndb=None\n\ndef setup(settings):\n global db\n db=MongoClient(settings['mongo.uri'])[settings['mongo.db']]\n\ndef usage(argv):\n cmd = os.path.basename(argv[0])\n print('usage: %s <config_uri>\\n'\n '(example: \"%s development.ini\")' % (cmd, cmd))\n sys.exit(1)\n\ndef run(settings):\n\n workdays_calendar.users.init_db(db,settings)\n \n workdays_calendar.tags.init_db(db,settings)\n\n workdays_calendar.days_calendar.init_db(db,settings)\n \ndef main():\n if 2!=len(sys.argv):\n usage(sys.argv)\n env = bootstrap(sys.argv[1])\n settings=env['registry'].settings\n setup(settings)\n\n run(settings)\n\nif __name__=='__main__':\n main()\n"
},
{
"alpha_fraction": 0.5311004519462585,
"alphanum_fraction": 0.5330143570899963,
"avg_line_length": 26.5,
"blob_id": "18361cf860bb4c19a8daa0a1d02a26d56ec58560",
"content_id": "a1ec44f8b71682bf23dd9751ed24f1505c1c1ae2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2131,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 76,
"path": "/workdays_calendar/tags.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport workdays_calendar.api as api\nimport colander\nfrom bson import ObjectId\nfrom workdays_calendar.collection_names import TAGS_COLLECTION\n\nHOLIDAY_TAG='holiday'\n\ndef init_db(db,settings):\n required_tags=[HOLIDAY_TAG]\n for tag in required_tags:\n if db[TAGS_COLLECTION].find_one({'name':tag}) is None:\n db[TAGS_COLLECTION].insert({\n 'name': tag,\n 'color': 'red'\n })\n\[email protected]\ndef name_validator(node,kw):\n db=kw['db']\n tagid=kw['tagid']\n def validator(form, value):\n colander.Length(max=50)(form, value)\n if db[TAGS_COLLECTION].find_one({'name':value,'_id':{'$ne':tagid}}):\n raise colander.Invalid(\n form, \n u'Тег с таким именем уже есть'\n )\n return validator\n\nclass TagsSchema(colander.Schema):\n name = colander.SchemaNode(\n colander.String(),\n validator=name_validator,\n )\n color = colander.SchemaNode(\n colander.String(),\n )\n\nclass TagsViews(api.BaseViews):\n\n @api.view(path='tags', method='GET')\n def view_tags(self):\n result=list(self.db[TAGS_COLLECTION].find({},{'password':0}).sort('name'))\n \n return result\n\n @api.view(path='tags', method='PUT')\n def view_tag_create(self):\n schema=TagsSchema().bind(\n db=self.db,\n tagid=None\n )\n data=self.validated_data(schema)\n self.db[TAGS_COLLECTION].insert(\n data\n )\n return {\n 'message': u'Тег создан'\n }\n\n @api.view(path='tags/{tag_id}', method='POST')\n def view_tag_update(self):\n tagid=ObjectId(self.params['tag_id'])\n schema=TagsSchema().bind(\n db=self.db,\n tagid=tagid\n )\n data=self.validated_data(schema)\n self.db[TAGS_COLLECTION].update(\n {'_id': tagid},\n {'$set': data}\n )\n return {\n 'message': u'Тег изменен'\n }\n"
},
{
"alpha_fraction": 0.6218035221099854,
"alphanum_fraction": 0.6352624297142029,
"avg_line_length": 20.852941513061523,
"blob_id": "d8e9024684f76c1eecf0950c6aa489c8f9325368",
"content_id": "1d7927df85999a1b48c9942b9dcdf1cd6d78fa4f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 743,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 34,
"path": "/workdays_calendar/scripts/gen_year.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport sys\nimport os\nfrom pyramid.paster import bootstrap\nfrom pymongo import MongoClient\nfrom workdays_calendar.days_calendar import gen_year\n\ndb=None\n\ndef setup(settings):\n global db\n db=MongoClient(settings['mongo.uri'])[settings['mongo.db']]\n\ndef usage(argv):\n cmd = os.path.basename(argv[0])\n print('usage: %s <year> <config_uri>\\n'\n '(example: \"%s 2017 development.ini\")' % (cmd, cmd))\n sys.exit(1)\n\ndef run(settings,year):\n gen_year(db,year)\n \ndef main():\n if 3!=len(sys.argv):\n usage(sys.argv)\n env = bootstrap(sys.argv[2])\n settings=env['registry'].settings\n setup(settings)\n\n year=int(sys.argv[1])\n run(settings,year)\n\nif __name__=='__main__':\n main()\n"
},
{
"alpha_fraction": 0.5814778804779053,
"alphanum_fraction": 0.5876089334487915,
"avg_line_length": 30.948453903198242,
"blob_id": "afb08ef5a7e977827bdb265f4dd45f0016e71f4d",
"content_id": "bd6785560b4e4596634cb1c46aa4ba75a5f224ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3099,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 97,
"path": "/workdays_calendar/api.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport colander\nfrom pyramid.security import NO_PERMISSION_REQUIRED\nfrom pyramid.view import view_config\nfrom pyramid.decorator import reify\nfrom pyramid.i18n import get_localizer\n\ndef translate(request):\n def translator(msg):\n if msg==\"\":\n return \"\"\n elif hasattr(msg, 'interpolate'):\n return get_localizer(request).translate(msg)\n else:\n return msg\n return translator\n\nAPI_VERSION='1'\nAPI_PREFIX='api/v%s/'%API_VERSION\n\nclass view(view_config):\n\n def __call__(self, wrapped):\n settings = self.__dict__.copy()\n depth = settings.pop('_depth', 0)\n\n def callback(context, name, ob):\n config = context.config.with_package(info.module)\n \n # ===========our part start============\n route_name=\"%s_%s\"%(name,wrapped.__name__)\n view_keys={\n 'permission':'permission',\n }\n view_settings=dict((key2,settings[key1]) for key1,key2 in view_keys.items() if key1 in settings)\n if config.registry.settings.get('check_xsrf')=='true':\n view_settings['check_xsrf']=settings.get('check_xsrf',True)\n\n route_keys={\n 'method': 'request_method'\n }\n route_settings=dict((key2,settings[key1]) for key1,key2 in route_keys.items() if key1 in settings)\n route_settings['pattern']=\"%s%s\"%(API_PREFIX,settings['path'])\n\n config.add_route(route_name, **route_settings)\n config.add_view(view=ob, attr=wrapped.__name__, route_name=route_name, renderer='json', **view_settings)\n # ===========our part end==============\n\n info = self.venusian.attach(wrapped, callback, category='pyramid',\n depth=depth + 1)\n\n if info.scope == 'class':\n if settings.get('attr') is None:\n settings['attr'] = wrapped.__name__\n\n settings['_info'] = info.codeinfo # fbo \"action_method\"\n return wrapped\n\nclass BaseViews(object):\n def __init__(self, context, request):\n self.context = context\n self.request = request\n self.db=request.db\n self.initialize()\n \n def initialize(self):\n pass\n\n @reify\n def modifers(self):\n return self.request.GET\n\n @reify\n def params(self):\n return self.request.matchdict\n\n @reify\n def data(self):\n return self.request.body and self.request.json_body or {}\n\n def validate(self,schema,data):\n try:\n return schema.deserialize(data)\n except colander.Invalid as e:\n raise ValidationFailure(e.asdict(translate(self.request)))\n \n def validated_data(self,schema):\n return self.validate(schema,self.data)\n \nclass ValidationFailure(Exception):\n def __init__(self, data):\n self.data = data\n\n@view_config(context=ValidationFailure, renderer='json', permission=NO_PERMISSION_REQUIRED)\ndef failed_validation(exc, request):\n request.response.status_int = 422\n return {'errors': exc.data}\n"
},
{
"alpha_fraction": 0.5157013535499573,
"alphanum_fraction": 0.517096996307373,
"avg_line_length": 25.537036895751953,
"blob_id": "e063bd988fbf722bc4677fc643b1613a110ff13b",
"content_id": "81dc772fd479372870a65014f2d90c04bf03ef4d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1433,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 54,
"path": "/src/auth.js",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "import router from './router.js'\nimport store from './store'\nimport axios from 'axios'\n\nexport default {\n // authentication status\n authenticated: false,\n profile: null,\n checked: false,\n\n // Send a request to the login URL and save the returned JWT\n login(context, creds, redirect) {\n return axios.post('/api/v1/login', creds)\n .then(\n (response) => {\n const message=response.data.message;\n\n if (!message){\n this.authenticated = true\n localStorage.setItem('user',response.data.token)\n }else{\n context.errors=message;\n }\n // Redirect to a specified route\n if (redirect) {\n router.push(redirect)\n }\n },\n (errors) => {\n context.errors = errors;\n })\n },\n get_profile() {\n return axios.get('/api/v1/profile')\n .then(\n (response) => {\n this.profile=response.data;\n this.authenticated=true;\n this.checked=true;\n store.commit('set_profile',response.data)\n },\n (errors) => {\n })\n },\n // To log out\n logout: function() {\n this.authenticated = false;\n this.profile=null;\n this.checked=false;\n localStorage.removeItem('user')\n store.commit('set_profile',null)\n router.push('/login')\n }\n}\n"
},
{
"alpha_fraction": 0.5930232405662537,
"alphanum_fraction": 0.5930232405662537,
"avg_line_length": 13.25,
"blob_id": "cbddd99880f3b29afb34db8b5a06d38e7efd05a7",
"content_id": "c83da9dbe318ee4e030ebfec56915d69adb35bb7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 172,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 12,
"path": "/src/modules/auth.js",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "\nconst auth_store = {\n state: {\n profile: null,\n },\n mutations: {\n set_profile(state,profile){\n state.profile=profile;\n }\n }\n}\n\nexport default auth_store\n"
},
{
"alpha_fraction": 0.6387832760810852,
"alphanum_fraction": 0.644866943359375,
"avg_line_length": 25.836734771728516,
"blob_id": "d98efa2746faba12f21d267cfe81be8c21215a1b",
"content_id": "8c0984fd25e90fd56f2b00b8ceb8ce78b5452d0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1315,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 49,
"path": "/workdays_calendar/app_api.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nfrom workdays_calendar.collection_names import CALENDAR_COLLECTION,TAGS_COLLECTION\nfrom workdays_calendar.days_calendar import get_day_int\nfrom workdays_calendar.tags import HOLIDAY_TAG\nimport datetime\n\ndef get_tag(db,tag_name):\n return db[TAGS_COLLECTION].find_one({'name': tag_name})\n\ndef day_has_tag(db,day,tag):\n if not tag:\n return False\n\n day_int=get_day_int(day)\n stored_day=db[CALENDAR_COLLECTION].find_one({'day_int': day_int})\n if stored_day:\n return str(tag['_id']) in stored_day['tags']\n return False\n\ndef is_holiday_today(db):\n holiday_tag=get_tag(db,HOLIDAY_TAG)\n return day_has_tag(db,datetime.date.today(),holiday_tag)\n\ndef get_workdays_interval(db,start,num_days):\n holiday_tag=get_tag(db,HOLIDAY_TAG)\n\n holidays_used=False\n total_days=0\n\n while day_has_tag(db,start,holiday_tag):\n start+=datetime.timedelta(days=1)\n holidays_used=True\n total_days+=1\n\n day=start\n drive_days=num_days\n while drive_days>0:\n day+=datetime.timedelta(days=1)\n total_days+=1\n if day_has_tag(db,day,holiday_tag):\n holidays_used=True\n else:\n drive_days-=1\n\n return {\n 'end': day,\n 'total_days': total_days,\n 'holidays_used': holidays_used\n }\n"
},
{
"alpha_fraction": 0.6545661091804504,
"alphanum_fraction": 0.6585366129875183,
"avg_line_length": 27.370967864990234,
"blob_id": "d9d6e31bfa0255ec0398fe90539058c92899efaa",
"content_id": "af4afd02dc957357ab817e9bc6343018c75a8416",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1787,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 62,
"path": "/workdays_calendar/auth.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nfrom pyramid.authorization import ACLAuthorizationPolicy\nfrom pyramid.security import Allow\nimport hashlib\nfrom bson import ObjectId\n\nLOGGED_IN_PERMISSION='admin'\n\nclass Hasher:\n #работа с подсолеными хешами\n @classmethod\n def salt(cls):\n return unicode(ObjectId())\n @classmethod\n def generate(cls,pw,salt=None):\n if salt==None:\n salt=ObjectId()\n return unicode(salt).encode('utf-8')+hashlib.md5(unicode(salt).encode('utf-8')+unicode(pw).encode('utf-8')).hexdigest()\n @classmethod\n def check(cls,pw_with_salt,pw):\n salt=pw_with_salt[:-32]\n return pw_with_salt==cls.generate(pw,salt)\n\ndef add_role_principals(userid, request):\n return ['role:%s'%LOGGED_IN_PERMISSION]\n\nclass RootFactory(object):\n def __init__(self, request):\n pass\n __acl__ = [\n (Allow, 'role:%s'%LOGGED_IN_PERMISSION, LOGGED_IN_PERMISSION),\n ]\n\ndef userid(request):\n try:\n return request.authenticated_userid\n except:\n return None\n\ndef has_perm(request):\n def has_perm(perm,context=None):\n if context is None:\n return request.has_permission(perm, request.context)\n else:\n return request.has_permission(perm, context)\n return has_perm\n\ndef includeme(config):\n\n config.set_root_factory(RootFactory)\n \n config.set_authorization_policy(ACLAuthorizationPolicy())\n config.include('pyramid_jwt')\n config.set_jwt_authentication_policy(\n 'secret',\n http_header='X-Token',\n callback=add_role_principals\n )\n config.set_default_permission(LOGGED_IN_PERMISSION)\n\n config.add_request_method(userid,'userid', True, True)\n config.add_request_method(has_perm,'has_perm', True, True)\n "
},
{
"alpha_fraction": 0.7461538314819336,
"alphanum_fraction": 0.7538461685180664,
"avg_line_length": 25,
"blob_id": "ef91817b941aabb311f8d616ef1aea6097cbb2bd",
"content_id": "ce6c83ec72a5fc866d2c341c4bf1a20f7dbdd21e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 130,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 5,
"path": "/workdays_calendar/collection_names.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\n\nCALENDAR_COLLECTION='workdays_calendar'\nTAGS_COLLECTION='workdays_tags'\nUSERS_COLLECTION='workdays_users'\n"
},
{
"alpha_fraction": 0.4871309697628021,
"alphanum_fraction": 0.5049205422401428,
"avg_line_length": 28.685392379760742,
"blob_id": "3eb4ba48e84a1546d176a51c37efbd58d81df240",
"content_id": "9b183b03775cd07cd8f9343ad7daec9cb3a547d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2642,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 89,
"path": "/workdays_calendar/days_calendar.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport workdays_calendar.api as api\nimport calendar\nimport colander\nimport datetime\nfrom workdays_calendar.collection_names import CALENDAR_COLLECTION,TAGS_COLLECTION\n\n\ndef init_db(db,settings):\n gen_year(db,datetime.datetime.now().year)\n\ndef chunkify(lst,n):\n return [ lst[i*n:i*n+n] for i in xrange(len(lst)/n) ]\n\ndef get_day_int(date):\n return date.year*10000+date.month*100+date.day\n\ndef gen_year(db,year):\n c=calendar.Calendar(0)\n\n holiday_tag=db[TAGS_COLLECTION].find_one({'name':'holiday'})\n\n if holiday_tag is None:\n print \"Tag not found\"\n return\n\n for m in xrange(1,13):\n for d,wd in c.itermonthdays2(year,m):\n if d and wd>=5:\n day_int=year*10000+m*100+d\n\n old=db[CALENDAR_COLLECTION].find_one({'day_int': day_int})\n if not old:\n db[CALENDAR_COLLECTION].insert({\n 'day_int': day_int,\n 'tags': [str(holiday_tag['_id'])],\n })\n\nclass StringList(colander.SequenceSchema):\n items = colander.SchemaNode(\n colander.String()\n )\n\nclass dayUpdataSchema(colander.Schema):\n tags = StringList()\n\nclass CalendarViews(api.BaseViews):\n\n @api.view(path='calendar/{year}', method='GET')\n def view_calendar(self):\n\n c=calendar.Calendar(0)\n year=int(self.params['year'])\n result=[]\n\n days=self.db[CALENDAR_COLLECTION].find({'$and':[\n {'day_int': {'$gte':year*10000}},\n {'day_int': {'$lte':(year+1)*10000}}\n ]})\n days=dict((x['day_int'],x.get('tags',[])) for x in days)\n for m in xrange(1,13):\n month=[]\n for d in c.itermonthdays(year,m):\n day_int=year*10000+m*100+d\n month.append({\n 'day': d,\n 'day_int': day_int,\n 'tags': days.get(day_int,[])\n })\n weeks=chunkify(month,7)\n result.append({\n 'month': m,\n 'weeks': weeks\n })\n return result\n\n @api.view(path='calendar/day/{day_int}', method='POST')\n def view_day_change(self):\n schema=dayUpdataSchema()\n data=self.validated_data(schema)\n day_int=int(self.params['day_int'])\n self.db[CALENDAR_COLLECTION].update(\n {'day_int': day_int},\n {'$set': {\n 'tags': data['tags'],\n 'manual': True\n }},\n upsert=True\n )\n"
},
{
"alpha_fraction": 0.5706899762153625,
"alphanum_fraction": 0.5724796056747437,
"avg_line_length": 29.113773345947266,
"blob_id": "27f0f7e50c8e2b64a36a1c220afc10d6b678d09b",
"content_id": "2c782b46e2918dc673d861490baabfde0fd7f995",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5233,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 167,
"path": "/workdays_calendar/users.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nimport workdays_calendar.api as api\nimport colander\nfrom pyramid.httpexceptions import HTTPNotFound,HTTPForbidden\nfrom bson import ObjectId\nfrom time import sleep\nfrom workdays_calendar.auth import Hasher\nfrom pyramid.security import NO_PERMISSION_REQUIRED\nfrom workdays_calendar.collection_names import USERS_COLLECTION\n\n\ndef init_db(db,settings):\n if db[USERS_COLLECTION].find_one() is None:\n new_user={\n 'name': 'admin',\n 'login': 'admin',\n 'disabled': False,\n 'password': Hasher.generate('admin')\n }\n print \"Creating new user admin with passord admin\"\n db[USERS_COLLECTION].insert(new_user)\n\ndef authenticate(request,login,password):\n userid=None\n message=''\n if request.userid:\n message='Already logged in'\n if login and password:\n user=request.db[USERS_COLLECTION].find_one({'login':login})\n if user and Hasher.check(user['password'],password):\n if user['disabled']:\n message = u'Аккаунт заблокирован'\n else:\n userid=str(user['_id'])\n message=''\n else:\n message = u'Ошибка! Проверьте правильнось ввода логина и пароля'\n sleep(1) #затрудним перебор пароля\n else:\n message=u'Введите логин и пароль'\n\n return userid,message\n\nclass updatePasswordSchema(colander.Schema):\n password = colander.SchemaNode(\n colander.String(),\n validator=colander.Length(min=5)\n )\n\[email protected]\ndef login_validator(node,kw):\n db=kw['db']\n userid=kw['userid']\n def validator(form, value):\n colander.Length(max=50)(form, value)\n \"\"\"Проверяем не занят ли логин\"\"\"\n if db[USERS_COLLECTION].find_one({'login':value,'_id':{'$ne':userid}}):\n raise colander.Invalid(\n form, \n u'Этот логин уже зарегистрирован'\n )\n return validator\n\nclass updateUserSchema(colander.Schema):\n name = colander.SchemaNode(\n colander.String(),\n )\n login = colander.SchemaNode(\n colander.String(),\n validator=login_validator,\n )\n disabled = colander.SchemaNode(\n colander.Bool(),\n )\n\nclass createUserSchema(updateUserSchema,updatePasswordSchema):\n pass\n\nclass UsersViews(api.BaseViews):\n\n @api.view(path='login', method='POST',permission=NO_PERMISSION_REQUIRED)\n def login(self):\n login = self.data['login']\n password = self.data['password']\n message=''\n\n user_id,message = authenticate(self.request, login, password)\n if user_id:\n return {\n 'token': self.request.create_jwt_token(user_id)\n }\n if message:\n self.request.response.status=401\n return {\n 'message': message\n }\n\n @api.view(path='profile', method='GET')\n def profile(self):\n userid=self.request.userid\n user=self.db[USERS_COLLECTION].find_one({'_id':ObjectId(userid)})\n if user is None:\n raise HTTPForbidden()\n return {\n 'userid': userid,\n 'name': user['name']\n }\n\n @api.view(path='users', method='GET')\n def view_users(self):\n\n result=list(self.db[USERS_COLLECTION].find({},{'password':0}).sort('name'))\n \n return result\n\n @api.view(path='users/{user_id}', method='GET')\n def view_user(self):\n user=self.db[USERS_COLLECTION].find_one({'_id': ObjectId(self.params['user_id'])})\n user['password']=''\n if user is None:\n raise HTTPNotFound()\n return user\n\n @api.view(path='users', method='PUT')\n def view_user_create(self):\n schema=createUserSchema().bind(\n db=self.db,\n userid=None\n )\n data=self.validated_data(schema)\n data['password']=Hasher.generate(data['password'])\n self.db[USERS_COLLECTION].insert(\n data\n )\n return {\n 'message': u'Пользователь создан'\n }\n\n @api.view(path='users/{user_id}', method='POST')\n def view_user_update(self):\n userid=ObjectId(self.params['user_id'])\n schema=updateUserSchema().bind(\n db=self.db,\n userid=userid\n )\n data=self.validated_data(schema)\n self.db[USERS_COLLECTION].update(\n {'_id': userid},\n {'$set': data}\n )\n return {\n 'message': u'Пользователь изменен'\n }\n\n @api.view(path='users/{user_id}/change_password', method='POST')\n def view_user_change_pasword(self):\n userid=ObjectId(self.params['user_id'])\n schema=updatePasswordSchema()\n data=self.validated_data(schema)\n data['password']=Hasher.generate(data['password'])\n self.db[USERS_COLLECTION].update(\n {'_id': userid},\n {'$set': data}\n )\n return {\n 'message': u'Пароль изменен'\n }\n"
},
{
"alpha_fraction": 0.5206971764564514,
"alphanum_fraction": 0.5220043659210205,
"avg_line_length": 20.448598861694336,
"blob_id": "cd9816e44a699c1a963b2b3555566d44d4c8f13d",
"content_id": "1a835bea1f48329d37aa5672afe3fda3a3906318",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 2385,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 107,
"path": "/src/router.js",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "import Vue from 'vue'\nimport Router from 'vue-router'\nimport home from './components/home'\nimport users from './components/users'\nimport user from './components/user'\nimport calendar from './components/calendar'\nimport tags from './components/tags'\nimport login from './components/login'\nimport notfound from './components/notfound'\nimport auth from './auth'\n\nVue.use(Router)\n\n// application routes\nconst routes = [\n { \n path: '/',\n component: home\n },\n { path: '/login',\n component: login\n },\n {\n path: '/users/',\n component: users,\n meta: { auth: true },\n },\n {\n path: '/users/add',\n component: user,\n meta: { auth: true },\n },\n {\n path: '/users/:id',\n component: user,\n meta: { auth: true },\n },\n {\n path: '/tags',\n component: tags,\n meta: { auth: true },\n },\n { \n path: '/calendar', \n redirect: to => { \n // в функцию в качестве аргумента передаётся путь\n // возвращаемым значением должна быть строка или объект пути\n return '/calendar/'+(new Date()).getFullYear()\n }\n },\n {\n path: '/calendar/:year',\n name: 'calendar',\n component: calendar,\n meta: { auth: true },\n },\n {\n path: '*',\n name: 'notfound',\n component: notfound\n }\n]\n\nconst router=new Router({\n mode: 'history',\n routes,\n linkActiveClass: 'is-active'\n})\n\nrouter.beforeEach((to, from, next) => {\n if (to.meta.auth && !auth.profile) {\n auth.get_profile().then( ( ) => {\n if(!auth.authenticated)\n next({ path: '/login' })\n else\n next()\n },()=>{\n next({ path: '/login' })\n })\n } else {\n if(!auth.checked){\n auth.get_profile()\n }\n next()\n }\n})\n\n// document.addEventListener('click', function(e){\n// console.log(e)\n// var found=false;\n// for(let i=0;i<e.path.length;i++){\n// if(e.path[i].attributes['route-reload']){\n// found=e.path[i].href;\n// break;\n// }\n// }\n// console.log(found)\n// if(found){\n// if(location.href==found){\n// console.log(11)\n// router.push(location.pathname+'?ddd')\n// }\n// }\n// });\n\n// export router instance\nexport default router;\n"
},
{
"alpha_fraction": 0.7230246663093567,
"alphanum_fraction": 0.7247238755226135,
"avg_line_length": 30.810810089111328,
"blob_id": "813dd55a9efeec1135ba389f1f7709bbfad1d806",
"content_id": "20bf96af3526bd52e050ba2cfbaf0537260a0e29",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1224,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 37,
"path": "/workdays_calendar/__init__.py",
"repo_name": "aleksandr-rakov/workdays_calendar",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf8 -*-\nfrom pyramid.config import Configurator\nfrom bson.objectid import ObjectId\nimport datetime\nfrom pyramid.renderers import JSON\nfrom pyramid.events import NewRequest\nfrom pymongo import MongoClient\n\ndef add_request_properties(event):\n \"\"\"Сделаем некоторые параметры конфигурации атрибутами request\"\"\"\n reg = event.request.registry\n event.request.db=reg.db\n\ndef main(global_config, **settings):\n \"\"\" This function returns a Pyramid WSGI application.\n \"\"\"\n config = Configurator(settings=settings)\n\n mongo_conn = MongoClient(settings['mongo.uri'])\n mongo_db=mongo_conn[settings['mongo.db']]\n config.registry.db=mongo_db\n config.add_subscriber(add_request_properties,NewRequest)\n\n def datetime_adapter(obj, request):\n return obj.isoformat()+'Z'\n def objectid_adapter(obj, request):\n return str(obj)\n\n renderer = JSON(ensure_ascii=False,indent=4)\n renderer.add_adapter(datetime.datetime, datetime_adapter)\n renderer.add_adapter(ObjectId, objectid_adapter)\n config.add_renderer('json', renderer)\n\n config.include('workdays_calendar.auth')\n config.scan()\n \n return config.make_wsgi_app()\n"
}
] | 13 |
systemetric/pacbot-v2 | https://github.com/systemetric/pacbot-v2 | 112e1a59fd4f4d3b12c10b61284c27082111b574 | 2cd28d9d150b016a55926b4eb469ac401c5492af | a4c4b6aca234d334ebbc93e9410da020537c18a1 | refs/heads/master | 2020-03-07T11:30:32.148698 | 2018-03-30T18:04:09 | 2018-03-30T18:04:09 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.48261821269989014,
"alphanum_fraction": 0.5211399793624878,
"avg_line_length": 28.841121673583984,
"blob_id": "28ac207331e1791bdac07ec7db7ab83e1b5db01b",
"content_id": "055dc515c9d2a8e0526a222b8d41c4b967ec9efc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3193,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 107,
"path": "/ArmControl.py",
"repo_name": "systemetric/pacbot-v2",
"src_encoding": "UTF-8",
"text": "import smbus\n\n\nclass RobotArm(object):\n def __init__(self):\n bus = smbus.SMBus(1)\n self.gpio = BlackJackBoardGPIO(bus)\n self.servos = BlackJackBoardPWM(bus)\n gpio.pin_mode(1, OUTPUT)\n servos.setitem(1,0)\n\n def set(self, pos):\n servos.setitem(1, pos)\n\n def suck(self, state):\n gpio.digital_write(1, state)\n\n\nclass BlackJackBoardPWM(object):\n def __init__(self, bus):\n self._bus = bus\n self._pwm_pin_map = {\n 1: 3,\n 2: 1,\n 3: 2,\n 4: 4\n }\n\n def getitem(self, key):\n if key < 0 or key > 3:\n raise IndexError(\"PWM index must be between 0 and 3\")\n key = self._pwm_pin_map[key + 1] - 1\n command = (2 * key) + 1\n\n value = self._bus.read_byte_data(B_I2C_ADR, command) + (self._bus.read_byte_data(B_I2C_ADR, command + 1) << 7)\n return (value - B_PWM_OFFSET) * 100.0 / B_PWM_RANGE\n\n def setitem(self, key, percent):\n if key < 0 or key > 3:\n raise IndexError(\"PWM index must be between 0 and 3\")\n key = self._pwm_pin_map[key + 1] - 1\n command = (2 * key) + 1\n\n value = int((percent / 100.0) * B_PWM_RANGE) + B_PWM_OFFSET\n\n # high = (value & 0b1111111000) >> 3\n # low = value & 0b0000000111\n\n high = value >> 7\n low = value & 0x7F\n\n # print\n # print value\n # print \"H:\", bin(high)\n # print \"L:\", bin(low)\n # print \"Setting Servo\", key + 1, \"to\", percent, \"% [ PWM:\", value, \"]\"\n\n self._bus.write_byte_data(B_I2C_ADR, command, low)\n self._bus.write_byte_data(B_I2C_ADR, command + 1, high)\n\n\nclass BlackJackBoardGPIO(object):\n def __init__(self, bus):\n self._bus = bus\n self._pin_map = {\n 4: 1,\n 3: 2,\n 2: 4,\n 1: 3,\n }\n\n def pin_mode(self, pin, mode):\n pin = self._pin_map[pin]\n if pin == 2 and mode == INPUT_ANALOG:\n raise IndexError(\"Pin 3 is NOT an ANALOG input! Use something else!\")\n\n data = 0b000\n\n if mode == INPUT:\n data = 0b001\n if mode == INPUT_PULLUP:\n data = 0b101\n if mode == INPUT_ANALOG:\n data = 0b011\n\n self._bus.write_byte_data(B_I2C_ADR, B_I2C_GPIO_CONTROL_START + pin - 1, data)\n\n def digital_read(self, pin):\n pin = self._pin_map[pin]\n return bool(self._bus.read_byte_data(B_I2C_ADR, B_I2C_GPIO_START + pin - 1))\n\n def analog_read(self, pin):\n pin = self._pin_map[pin]\n if pin == 2:\n raise IndexError(\"Pin 3 is NOT an ANALOG input! Use something else!\")\n\n # command = B_I2C_GPIO_ANALOG_START_L + (2 * (pin - 1))\n command = B_I2C_GPIO_ANALOG_START_L\n if pin == 3:\n command = B_I2C_GPIO_ANALOG_START_L + 2\n if pin == 4:\n command = B_I2C_GPIO_ANALOG_START_L + 4\n return self._bus.read_byte_data(B_I2C_ADR, command) + (self._bus.read_byte_data(B_I2C_ADR, command + 1) << 7)\n\n def digital_write(self, pin, data):\n pin = self._pin_map[pin]\n self._bus.write_byte_data(B_I2C_ADR, B_I2C_GPIO_START + pin - 1, int(data))\n"
},
{
"alpha_fraction": 0.5732064247131348,
"alphanum_fraction": 0.6156661510467529,
"avg_line_length": 21.032258987426758,
"blob_id": "2df50633d43aeb7ffae72e9ab728607c6aaba476",
"content_id": "512b85fb65a9e9c6c17a81c58c40324d0382b6eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1366,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 62,
"path": "/gamepad.py",
"repo_name": "systemetric/pacbot-v2",
"src_encoding": "UTF-8",
"text": "\"\"\"\nMostly taken from http://yameb.blogspot.co.uk/2013/01/gamepad-input-in-python.html\n\"\"\"\n\nimport pygame\n\npygame.init()\nj = pygame.joystick.Joystick(0)\nj.init()\nprint 'Initialized Joystick : %s' % j.get_name()\n\n# Input mapping for the cheap Xbox 360 knockoff controller used for testing\n# Each const corresponds to the relevant index in the array returned by get()\nLTHUMBSTICK_X = 0\nLTHUMBSTICK_Y = 1\nLTRIGGER = 2\nRTHUMBSTICK_X = 3\nRTHUMBSTICK_Y = 4\nRTRIGGER = 5\nBUTTON_A = 6\nBUTTON_B = 7\nBUTTON_X = 8\nBUTTON_Y = 9\nLBUMPER = 10\nRBUMPER = 11\nBUTTON_SELECT = 12\nBUTTON_START = 13\nBUTTON_HOME = 14\nLTHUMBSTICK_PRESS = 15\nRTHUMBSTICK_PRESS = 16\n\n\ndef get():\n out = [0, 0, -1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n it = 0 # iterator\n pygame.event.pump()\n\n # Read input from the two joysticks\n for i in range(0, j.get_numaxes()):\n out[it] = j.get_axis(i)\n it += 1\n # Read input from buttons\n for i in range(0, j.get_numbuttons()):\n out[it] = j.get_button(i)\n it += 1\n return out\n # Read input from the D-Pad - currently broken\n # for i in range(0, j.get_hatsnum()):\n # out[it] = j.get_hat(i)\n # it += 1\n\n\ndef test():\n while True:\n print get()\n\n\ndef test_tank():\n while True:\n input_raw = get()\n print input_raw[LTHUMBSTICK_Y]\n print input_raw[RTHUMBSTICK_Y]\n"
},
{
"alpha_fraction": 0.5026766657829285,
"alphanum_fraction": 0.533725917339325,
"avg_line_length": 27.738462448120117,
"blob_id": "05c534287428d1b69619f890ad7b1b0cab9a3723",
"content_id": "669bfb83bf28fea84ae8b457e5ac45ceeb331a49",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3736,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 130,
"path": "/controlservergamepadpacbot.py",
"repo_name": "systemetric/pacbot-v2",
"src_encoding": "UTF-8",
"text": "import socket\nimport json\nimport threading\nimport time\nimport RPi.GPIO as GPIO\n\nAN2 = 13\t\t\t\t# set pwm2 pin on MD10-Hat\nAN1 = 12\t\t\t\t# set pwm1 pin on MD10-hat\nDIG2 = 24\t\t\t\t# set dir2 pin on MD10-Hat\nDIG1 = 26\t\t\t\t# set dir1 pin on MD10-Hat)\n\n\nclass CytronBoard(object):\n def __init__(self):\n\n GPIO.setmode(GPIO.BCM) # GPIO numbering\n GPIO.setwarnings(False) # enable warning from GPIO\n GPIO.setup(AN2, GPIO.OUT) # set pin as output\n GPIO.setup(AN1, GPIO.OUT) # set pin as output\n GPIO.setup(DIG2, GPIO.OUT) # set pin as output\n GPIO.setup(DIG1, GPIO.OUT) # set pin as output\n\n time.sleep(1) # delay for 1 seconds\n\n self.p1 = GPIO.PWM(AN1, 100) # set pwm for M1\n self.p2 = GPIO.PWM(AN2, 100) # set pwm for M2\n\n def m1(self, speed):\n if speed <= 0:\n GPIO.output(DIG1, GPIO.LOW) # set DIG1 as LOW, to control direction\n speed = -speed\n else:\n GPIO.output(DIG1, GPIO.HIGH) # set DIG1 as HIGH, to control direction\n if speed > 100:\n speed = 100 # make sure we dont over do it!\n self.p1.start(speed)\n\n def m2(self, speed):\n if speed <= 0:\n GPIO.output(DIG2, GPIO.LOW) # set DIG2 as LOW, to control direction\n speed = -speed\n else:\n GPIO.output(DIG2, GPIO.HIGH) # set DIG2 as HIGH, to control direction\n if speed > 100:\n speed = 100 # make sure we dont over do it!\n self.p2.start(speed)\n\n\nCB = CytronBoard()\narm = RobotArm()\n\nhost = \"\"\nport = 4096\n\ns = socket.socket()\ns.bind((host, port))\n\nmotor_power = [0, 0]\n\nleft_thumbstick_y = 0\nleft_thumbstick_x = 0\nleft_bumper = 0\nright_bumper = 0\nbutton_a = 0\n\n\ndef control_motor():\n while True:\n CB.m1(int(left_thumbstick_y * 50 + left_thumbstick_x * 35)) # left\n CB.m2(int(left_thumbstick_y * 50 - left_thumbstick_x * 35)) # right\n\n # -- Theoretical stuff, if you get the other bits working on PacBot --\n\n # if left_bumper == 1 and right_bumper == 0:\n # arm.set(up)\n # elif right_bumper == 0 and right_bumper == 1:\n # arm.set(down)\n # if button_a == 1:\n # arm.suck(on)\n # elif button_a == 0:\n # arm.suck(off)\n\n print \"Thumb at {0}\".format(int(left_thumbstick_y))\n print \"Motors at {0}\".format(int(-left_thumbstick_y * 100))\n\n time.sleep(0.1)\n\n\nwhile True:\n try:\n motor_control_thread = threading.Thread(target=control_motor)\n motor_control_thread.start()\n\n while True:\n print \"Listening for connection...\"\n s.listen(1) # 1 connection at a time\n\n conn, addr = s.accept()\n\n print \"Connection from \" + addr[0] + \":\" + str(addr[1]) + \".\"\n\n while True:\n try:\n data = conn.recv(1024)\n\n if not data: # connection closed\n break\n\n json_data = str(data)\n print \"Data is as follows: {0} :Data end\".format(json_data)\n data = json.loads(json_data)\n\n left_thumbstick_y = data[\"LThumbstick_Y\"]\n left_thumbstick_x = data[\"LThumbstick_X\"]\n left_bumper = data[\"LBumper\"]\n right_bumper = data[\"RBumper\"]\n button_a = data[\"Button_A\"]\n\n print data\n except socket.error, msg:\n print \"Socket Error: \" + msg[1]\n break\n\n except: # it hurts me to do this\n pass\n\n finally: # kill the connection, and stop PacBot going haywire\n conn.close()\n CB.m1(0)\n CB.m2(0)\n"
},
{
"alpha_fraction": 0.6277145743370056,
"alphanum_fraction": 0.6421923637390137,
"avg_line_length": 22.0238094329834,
"blob_id": "31ff8a2c41cf358ba935f4c4883346ab85def9f0",
"content_id": "bf9366e7fba686ec26cce4be11fd201a6a21d0b9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 967,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 42,
"path": "/controlgamepadpacbot.py",
"repo_name": "systemetric/pacbot-v2",
"src_encoding": "UTF-8",
"text": "import socket\nimport io\nfrom urllib2 import urlopen\nimport threading\nimport json\nimport time\nimport gamepad # make sure gamepad.py is in the same folder as this\n\nrandom.seed()\n\nhost = \"robot.sr\" # 127.0.0.1\nport = 4096\ns = socket.socket()\n\nwhile True:\n try:\n s.connect((host, port))\n break\n except socket.error, msg:\n print \"Socket Error: \" + msg[1]\n host = raw_input(\"Enter address of control server (probably robot.sr): \")\n\n\ndef update():\n send_gamepad_input()\n print \"Sending data... {}\".format(gamepad.LTHUMBSTICK_Y)\n time.sleep(0.1)\n\n\ndef send_gamepad_input():\n input_vals = gamepad.get()\n s.send(json.dumps({\n \"LThumbstick_Y\": input_vals[gamepad.LTHUMBSTICK_Y],\n \"LThumbstick_X\": input_vals[gamepad.LTHUMBSTICK_X],\n \"Button_A\": input_vals[gamepad.BUTTON_A],\n \"LBumper\": input_vals[gamepad.LBUMPER],\n \"RBumper\": input_vals[gamepad.RBUMPER],\n }))\n\n\nwhile True:\n update()\n"
}
] | 4 |
mbijou/car-repair-shop-backend | https://github.com/mbijou/car-repair-shop-backend | 660d40c61be0661629bb3373b34077c8222ef550 | f386cb69921c3e5f4e1bbbd798a9ce6705b0c91f | e75333a529492473ebf97176b1261a184cc067c8 | refs/heads/master | 2023-01-05T06:44:00.172074 | 2020-10-26T22:23:40 | 2020-10-26T22:23:40 | 306,421,151 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6577438116073608,
"alphanum_fraction": 0.6577438116073608,
"avg_line_length": 25.149999618530273,
"blob_id": "920f9d769071210251daa1e47cc323d7e32b67d7",
"content_id": "9a37507c54b5824dd4709f53d9dab561b104c9d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 523,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 20,
"path": "/company/serializers/__init__.py",
"repo_name": "mbijou/car-repair-shop-backend",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\nfrom company.models import Company\nfrom django.db.transaction import atomic\n\n\nclass CompanySerializer(serializers.ModelSerializer):\n class Meta:\n model = Company\n fields = (\"name\",)\n\n def save(self, **kwargs):\n Company.save_company()\n return super().save(**kwargs)\n\n @atomic\n def create(self, validated_data):\n company = Company.get_company()\n company.__dict__.update(validated_data)\n company.save()\n return company\n"
},
{
"alpha_fraction": 0.7365728616714478,
"alphanum_fraction": 0.7365728616714478,
"avg_line_length": 34.54545593261719,
"blob_id": "4a14e51619b1b9633046c8215d50f41ddfb5512f",
"content_id": "656457e63dc94cb5c3dcf7a467e19492e4a96380",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 782,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 22,
"path": "/company/viewsets/__init__.py",
"repo_name": "mbijou/car-repair-shop-backend",
"src_encoding": "UTF-8",
"text": "from rest_framework import views\nfrom rest_framework.mixins import ListModelMixin\nfrom rest_framework.viewsets import ModelViewSet\nfrom rest_framework.response import Response\nfrom company.models import Company\nfrom company.serializers import CompanySerializer\nfrom django.db.transaction import atomic\n\n\nclass CompanyViewSet(ModelViewSet):\n serializer_class = CompanySerializer\n queryset = Company.objects.all()\n\n # TODO Allow only one company to be created\n\n def list(self, request, *args, **kwargs):\n company_instance = Company.get_company()\n if company_instance is None:\n return super().list(request, *args, **kwargs)\n else:\n serializer = self.serializer_class(company_instance)\n return Response(serializer.data)\n"
},
{
"alpha_fraction": 0.829383909702301,
"alphanum_fraction": 0.829383909702301,
"avg_line_length": 29.14285659790039,
"blob_id": "1c34e19b8d5a09ba517c008668380d75d78e9be8",
"content_id": "0057f421a9245c0fec3adb55423a5313c4dec857",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 211,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 7,
"path": "/company/urls.py",
"repo_name": "mbijou/car-repair-shop-backend",
"src_encoding": "UTF-8",
"text": "from django.urls import path\nfrom company.viewsets import CompanyViewSet\nfrom rest_framework import routers\n\nrouter = routers.SimpleRouter()\nrouter.register(r'company', CompanyViewSet)\nurlpatterns = router.urls\n"
},
{
"alpha_fraction": 0.8130081295967102,
"alphanum_fraction": 0.8130081295967102,
"avg_line_length": 19.5,
"blob_id": "f605217745937c4f99bedfd123a2578e1dff2766",
"content_id": "66d5a9f764f7f68bc8fb8004837443e227ab1111",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 123,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 6,
"path": "/company/admin.py",
"repo_name": "mbijou/car-repair-shop-backend",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\n# Register your models here.\nfrom company.models import Company\n\nadmin.register(Company)\n"
},
{
"alpha_fraction": 0.6006944179534912,
"alphanum_fraction": 0.6111111044883728,
"avg_line_length": 23,
"blob_id": "50c7a3ec6594902983ef54c03f14f73a3702e771",
"content_id": "4ce09662352a095bca6dc9990b55eb6cc7f021f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 576,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 24,
"path": "/company/models.py",
"repo_name": "mbijou/car-repair-shop-backend",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.core.exceptions import ValidationError\n\n\n# Create your models here.\nclass Company(models.Model):\n name = models.CharField(max_length=200)\n\n def save(self, *args, **kwargs):\n self.pk = 1\n return super().save(*args, **kwargs)\n\n @classmethod\n def save_company(cls):\n obj, created = cls.objects.get_or_create(pk=1)\n return obj\n\n @classmethod\n def get_company(cls):\n try:\n obj = cls.objects.get(pk=1)\n return obj\n except cls.DoesNotExist:\n return\n"
}
] | 5 |
rfalias/needs-restart | https://github.com/rfalias/needs-restart | cf0191a96ccde95b706941dc97872d3170fd53a7 | 07bf2de24aa8cb552a5f95323017868cebba28a4 | e937d105fc392d5ea0dc4ead75c6a2fa21aabb44 | refs/heads/master | 2022-11-20T20:00:00.412796 | 2020-07-22T14:34:28 | 2020-07-22T14:34:28 | 279,611,215 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5773488283157349,
"alphanum_fraction": 0.585296630859375,
"avg_line_length": 35.31958770751953,
"blob_id": "45a95f0a7f0121ca72dd3f2e7c50177dc1274663",
"content_id": "5f68aa853b0ccc50aad5f38a1da0e37c89a7d0d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3523,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 97,
"path": "/needs-restart.py",
"repo_name": "rfalias/needs-restart",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# title :needs_restart.py\n# description :Show services that need restarting\n# author :jstevenson\n# date :07/14/2020\n# version :0.1\n# usage :python needs_restarting.py\n# notes :Uses lsof to look for pending deleted .so files, and maps the pid to service\n# python_version :> 2.x\n#==============================================================================\nimport os\nimport argparse\n\n# Setup Parser\nparser = argparse.ArgumentParser(description='Show or fix services that need restarting after updates')\nparser.add_argument('-s','--show', action=\"store_true\", default=False, help=\"Just show which services need restarting\")\nparser.add_argument('-f','--fix', action=\"store_true\", default=False, help=\"Restart services that need it\")\nparser.add_argument('-e','--exclude', nargs='+', help=\"Optional services to exclude from restarting\")\n\n\n# Show items needing restart. Maps output from lsof to systemctl\ndef show_needs_restart():\n stream = os.popen('lsof')\n output = stream.readlines()\n restart_list = list()\n for x in output:\n sp = x.split()\n op = sp[4]\n if \"DEL\" in op:\n fi = sp[8]\n if \".so\" in fi:\n svc = os.popen('systemctl status ' + sp[2])\n svc_out = svc.readlines()\n if \".service\" in svc_out[0]:\n unit = svc_out[0].split()[1]\n if unit not in restart_list:\n restart_list.append(unit)\n\n return restart_list\n\n\n# Take the list from show_needs_restart and issue the systemctl restart command\n# auditd must use service, its a known issue with RHEL.\ndef do_restart(exclude):\n restart_list = show_needs_restart()\n tmp_list = list()\n for service in restart_list:\n if (service in exclude):\n print(\"Skipping %s, in exclude list\" % service)\n continue\n if \"auditd.service\" in service:\n print(\"Restarting auditd using 'service' command. See RHEL Solution 2664811\")\n os.system(\"/sbin/service auditd stop\")\n os.system(\"/bin/systemctl start auditd\")\n else:\n\n print(\"Restarting \" + service)\n os.system(\"/bin/systemctl restart \" + service)\n tmp_list.append(service)\n if len(restart_list) > 0:\n os.system(\"/bin/systemctl restart systemd-*\")\n for x in tmp_list:\n restart_list.remove(x)\n if len(restart_list) > 0:\n print(\"Some services were not be restarted (Exclusion or failure): \")\n for y in restart_list:\n print(y)\n else:\n print(\"All services restarted successfully\")\n for x in show_needs_restart():\n print(x)\n\n\n# Check the args and setup defaults as needed\ndef check_args(args):\n if not (args.show or args.fix):\n print(\"Action required, --show or --fix\")\n exit(-1)\n if args.show:\n for x in show_needs_restart():\n print(x)\n if args.fix:\n exclude_list = list()\n if args.exclude:\n exclude_list = args.exclude\n do_restart(exclude_list)\n\n\nif __name__ == \"__main__\":\n if os.geteuid() != 0:\n exit(\"You need to have root privileges to run this script.\")\n if not (os.path.isfile('/sbin/service')\n and os.path.isfile('/bin/systemctl')\n and os.path.isfile('/sbin/lsof')):\n exit(\"Missing dependencies, check for lsof, service and systemctl\")\n args = parser.parse_args()\n check_args(args)\n"
},
{
"alpha_fraction": 0.7694805264472961,
"alphanum_fraction": 0.7711039185523987,
"avg_line_length": 31.421052932739258,
"blob_id": "b1a21c42e09e8ab10823420acad6e919ac723122",
"content_id": "49f022741f50459c9537ae996df2d891c7a0c938",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 616,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 19,
"path": "/README.md",
"repo_name": "rfalias/needs-restart",
"src_encoding": "UTF-8",
"text": "# needs-restart\nScript to check which RHEL based distro services need to be restarted after a yum update.\nRequires: lsof, systemd and service commands on RedHat 7+ based distros (CentOS, Oracle Linux, RHEL, etc...)\n\nOptionally fix them.\n```\npython needs-restart.py --show\ndbus.service\npolkit.service\n```\n\nRun with --fix to restart all services. Optional --exclude takes a list of services to exclude from restarting\n```\npython needs-restart.py --fix --exclude dbus.service\nSkipping dbus.service, in exclude list\nRestarting polkit.service\nSome services were not be restarted (Exclusion or failure):\ndbus.service\n````\n"
}
] | 2 |
lglezcas/Practica3 | https://github.com/lglezcas/Practica3 | 053dc345dc28e3fd682ea39ea77100499fb68f29 | 7a10b90de0a50495451b1ba1e4c193e4a67d0fa8 | 0bf6059d49552e788771a1616b12c1e1848df2d8 | refs/heads/master | 2023-04-12T13:23:25.503280 | 2021-05-07T07:43:46 | 2021-05-07T07:43:46 | 364,996,269 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7199327349662781,
"alphanum_fraction": 0.7317073345184326,
"avg_line_length": 36.1875,
"blob_id": "665337f8c86d0482b03c4c9d24f644ae89fcf590",
"content_id": "982ea8cd99487476ff4a599e46b3b18d624104e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 1189,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 32,
"path": "/MyProject_Thread/boards/frdmk64f_frdmcr20a/wireless_examples/thread/router_eligible_device/readme.txt",
"repo_name": "lglezcas/Practica3",
"src_encoding": "UTF-8",
"text": "Overview\n========\nThis application represents the \"Thread router eligible device\" of the Kinetis Thread Stack Demo Applications.\nA Router Eligible Device is a node which initially joins the network as an End Device, but can adaptively \nbecome a mesh Router. Such a device may also have capabilities to initialize, create, and bootstrap \na new Thread Network for the user or a management entity.\nFor more information please refer to the \"Kinetis Thread Stack Demo Applications User's Guide.pdf\" document.\n\nToolchain supported\n===================\n- IAR embedded Workbench 7.70.1\n- Kinetis Development Studio IDE 3.2.0\n\nHardware requirements\n=====================\n- Mini/micro USB cable\n- FRDM-K64F_FRDM-CR20A board\n- Personal Computer\n\nBoard settings\n==============\nNo special board setting.\n\nPrepare the Demo\n================\n1. Connect a mini/micro USB cable between the PC host and the OpenSDA USB port on the board.\n2. Download the program to the target board.\n3. Press the reset button, then any other switch button on your board to begin running the demo.\n\nRunning the demo\n================\nFollow the instructions from the \"Kinetis Thread Stack Demo Applications User's Guide.pdf\"."
},
{
"alpha_fraction": 0.49494948983192444,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 32,
"blob_id": "b4d4ee51452d467cf198ab3d7c7cfee29db37026",
"content_id": "fa6096d94a6892c5f83cf7eb4bdfbf00c7b825cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 99,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 3,
"path": "/.metadata/version.ini",
"repo_name": "lglezcas/Practica3",
"src_encoding": "UTF-8",
"text": "#Thu May 06 12:52:27 CDT 2021\norg.eclipse.core.runtime=2\norg.eclipse.platform=4.4.2.v20150204-1700\n"
},
{
"alpha_fraction": 0.5097635984420776,
"alphanum_fraction": 0.5264645218849182,
"avg_line_length": 37.47524642944336,
"blob_id": "3fee5bb8b6c136935e51dc511cf5aa9c10c0c5f6",
"content_id": "ded0ef909840d689975fe2bd2edcbc490969d989",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3892,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 101,
"path": "/python script/config_sprinkler.py",
"repo_name": "lglezcas/Practica3",
"src_encoding": "UTF-8",
"text": "#Tested with Python 3.6.6 and CoAPThon3\nfrom coapthon.client.helperclient import HelperClient\n\n#Change this IP address with the one listed as Interface 0: eth, Link local address LL64\n# or the one listed as Interface 1: 6LoWPAN, Unique local address\n#host = \"fe80::260:37ff:fe00:fa5d\"\nhostBR = \"fe80::260:37ff:fe00:fa5d\"\nhostED = \"fd01::3ead:2402:95c2:3d3f:bf20\"\nport = 5683\npath =\"led\"\n\nwhile True:\n username = input(\"Which device do you want to send data? BR (Sprinkler control) or ED (Sensor Hub): \")\n print(\"You selected: \" + username)\n\n if username == \"BR\":\n host = hostBR\n print(\"\"\"Select one command:\n 1: Change or check sensor reading period\n 2: Change operation mode\"\"\")\n command_selected = input()\n if command_selected == \"1\":\n path = \"configspr\"\n print(\"\"\"Sensor reading period menu:\n 1: Get current period\n 2: Change period\"\"\")\n inst_selected = input()\n if inst_selected == \"1\":\n client = HelperClient(server=(host, port))\n response = client.get(path)\n print(response.pretty_print())\n client.stop()\n elif inst_selected == \"2\":\n new_addres = input(\"Write new period\")\n client = HelperClient(server=(host, port))\n response = client.post(path,new_addres)\n print(response.pretty_print())\n client.stop()\n else:\n print(\"Command not valid... Please try again\")\n continue\n elif command_selected == \"2\":\n path = \"opmode\"\n op_mode = input(\"Select operation mode (auto or manual):\")\n print(\"Mode Selected is \" + op_mode)\n if op_mode == \"auto\":\n data_send = \"auto\"\n elif op_mode == \"manual\":\n data_send = \"manual\"\n else:\n data_send = op_mode\n print(\"Wrong Mode Selected - Try again\")\n client = HelperClient(server=(host, port))\n response = client.post(path,data_send)\n print(response.pretty_print())\n client.stop()\n elif username == \"ED\":\n host = hostED\n print(\"\"\"Select one command:\n 1: Change or check sampling rate\n 2: Read temperature\n 3: Read humidity\"\"\")\n command_selected = input()\n if command_selected == \"1\":\n path = \"SRconfig\"\n print(\"\"\"Sample Rate Config menu:,\n 1: Get current Sample Rate,\n 2: Change sample rate\"\"\")\n inst_selected = input()\n if inst_selected == \"1\":\n client = HelperClient(server=(host, port))\n response = client.get(path)\n print(response.pretty_print())\n client.stop()\n elif inst_selected == \"2\":\n new_SR = input(\"Write new sample rate:\")\n client = HelperClient(server=(host, port))\n response = client.post(path,new_SR)\n print(response.pretty_print())\n client.stop()\n else:\n print(\"Command not valid... Please try again\")\n continue\n elif command_selected == \"2\":\n path = \"temp\"\n client = HelperClient(server=(host, port))\n response = client.get(path)\n print(response.pretty_print())\n client.stop()\n elif command_selected == \"3\":\n path = \"humidity\"\n client = HelperClient(server=(host, port))\n response = client.get(path)\n print(response.pretty_print())\n client.stop()\n else:\n print(\"Command not valid... Please try again\")\n continue\n else:\n print(\"Command not valid... Please try again\")\n continue\n \n\n"
}
] | 3 |
zhangdavids/offer_py | https://github.com/zhangdavids/offer_py | 0f6e003a61b19bfa6f3061375765514e9032041c | 2864e56bbb82cd2f60d58c19681e382ecea4cb6d | 15f0fd8879c386dff09e4a393e44bd7b171507b4 | refs/heads/master | 2021-01-07T08:20:50.706145 | 2020-02-19T15:44:34 | 2020-02-19T15:44:34 | 241,632,456 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4613003134727478,
"alphanum_fraction": 0.4736842215061188,
"avg_line_length": 31.299999237060547,
"blob_id": "8219b2e1ce1f3488383af91abc0936d876776901",
"content_id": "3d33f5c315b18320d41bd517bafa780d18eac70d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 678,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 20,
"path": "/029.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n def minNumberInRotateArray(self, rotateArray):\n # write code here\n if rotateArray == []:\n return 0\n _len = len(rotateArray)\n left = 0\n right = _len - 1\n while left <= right:\n mid = int((left + right) >> 1)\n if rotateArray[mid] < rotateArray[mid - 1]:\n return rotateArray[mid]\n if rotateArray[mid] >= rotateArray[right]:\n # 说明在【mid,right】之间\n left = mid + 1\n else:\n # 说明在【left,mid】之间\n right = mid - 1\n return rotateArray[mid]\n"
},
{
"alpha_fraction": 0.3815789520740509,
"alphanum_fraction": 0.4111842215061188,
"avg_line_length": 24.41666603088379,
"blob_id": "c36afb671e5ce4a89825ef53d7bcc9a54bc7c38d",
"content_id": "63e09c0a2e5fb897150635ec19210305656e32b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 304,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 12,
"path": "/040.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "class Solution:\n def multiply(self, A):\n # write code here\n size = len(A)\n B = [1]*size\n for i in range(1,size):\n B[i] = B[i-1]*A[i-1]\n tmp = 1\n for i in range(size-2,-1,-1):\n tmp = tmp*A[i+1]\n B[i] = B[i]*tmp\n return B"
},
{
"alpha_fraction": 0.5335570573806763,
"alphanum_fraction": 0.5436241626739502,
"avg_line_length": 22,
"blob_id": "3b1db1ea85ca2cff33d41e988c8b67828e11512a",
"content_id": "ecd6fc6e1aec25002395b0592684a506a6bc8871",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 348,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 13,
"path": "/004.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 从尾到头打印链表\n\nclass Solution:\n # 返回从尾部到头部的列表值序列,例如[1,2,3]\n def printListFromTailToHead(self, listNode):\n # write code here\n ret = []\n head = listNode\n while(head):\n ret.append(head.val)\n head = head.next\n ret.reverse()\n return ret"
},
{
"alpha_fraction": 0.49593496322631836,
"alphanum_fraction": 0.49593496322631836,
"avg_line_length": 35.17647171020508,
"blob_id": "3fd49a6142018b02b1b2840586d29a740c3d8f06",
"content_id": "393e6c6a490d24eceb1a763c0dd00c694bdac3ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 627,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 17,
"path": "/020.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 对称的二叉树\n\nclass Solution:\n def Symmetrical(self, Lnode, Rnode):\n if Lnode == None and Rnode == None:\n return True\n if Lnode and Rnode:\n return Lnode.val == Rnode.val and self.Symmetrical(Lnode.right, Rnode.left) and self.Symmetrical(Lnode.left,\n Rnode.right)\n else:\n return False\n\n def isSymmetrical(self, pRoot):\n # write code here\n if pRoot == None:\n return True\n return self.Symmetrical(pRoot.left, pRoot.right)\n"
},
{
"alpha_fraction": 0.4149377644062042,
"alphanum_fraction": 0.42323651909828186,
"avg_line_length": 25.77777862548828,
"blob_id": "546bba9247f0b835f4c4310765ba8f7ac4cdf63e",
"content_id": "52e00e35f4cf29e1a6adad2aa7bc847943b179ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 496,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 18,
"path": "/017.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 和为S的两个数字\n\nclass Solution:\n def FindNumbersWithSum(self, array, tsum):\n # write code here\n memorys = {}\n ret = []\n for num in array:\n if tsum - num in memorys:\n if ret is []:\n ret = [tsum - num, num]\n elif ret and ret[0] * ret[1] > (tsum - num) * num:\n ret = [tsum - num, num]\n else:\n memorys[num] = 1\n return ret\n"
},
{
"alpha_fraction": 0.4214046895503998,
"alphanum_fraction": 0.49163880944252014,
"avg_line_length": 24,
"blob_id": "00d417edaeea43d5c45318a1faa3c567e9be75eb",
"content_id": "49c700f3393cfd2063ce1cb2926267763fdfcac5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 299,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 12,
"path": "/033.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "import bisect\nclass Solution:\n def InversePairs(self, data):\n data.reverse()\n L = []\n ret = 0\n for d in data:\n pos = bisect.bisect_left(L,d)\n L.insert(pos,d)\n ret+= pos\n ret = ret % 1000000007\n return ret % 1000000007"
},
{
"alpha_fraction": 0.43795621395111084,
"alphanum_fraction": 0.46715328097343445,
"avg_line_length": 20.076923370361328,
"blob_id": "ebb1207b3224b68dc6079d907e85727264cf5132",
"content_id": "a90c1577855d8dae37197822240f7e0dd71244af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 282,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 13,
"path": "/008.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 矩阵覆盖\nclass Solution:\n def rectCover(self, number):\n # write code here\n\n if number <= 2:\n return number\n dp = [1, 2]\n for i in range(number - 2):\n dp.append(dp[-1] + dp[-2])\n return dp[-1]\n"
},
{
"alpha_fraction": 0.41016948223114014,
"alphanum_fraction": 0.505084753036499,
"avg_line_length": 37.4782600402832,
"blob_id": "dea522ee16089449a91057fac32c7034fa90ce35",
"content_id": "4641acc9c9411e12f3cda0a0a5b73b5dd5d66fb5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1157,
"license_type": "no_license",
"max_line_length": 264,
"num_lines": 23,
"path": "/027.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 滑动窗口的最大值\n#\n# 给定一个数组和滑动窗口的大小,找出所有滑动窗口里数值的最大值。例如,如果输入数组{2,3,4,2,6,2,5,1}及滑动窗口的大小3,那么一共存在6个滑动窗口,他们的最大值分别为{4,4,6,6,6,5}; 针对数组{2,3,4,2,6,2,5,1}的滑动窗口有以下6个: {[2,3,4],2,6,2,5,1}, {2,[3,4,2],6,2,5,1}, {2,3,[4,2,6],2,5,1}, {2,3,4,[2,6,2],5,1}, {2,3,4,2,[6,2,5],1}, {2,3,4,2,6,[2,5,1]}。\n# 思考:假设当前窗口起始位置为start,结束位置为end,我们要构造一个stack, 使得stack[0]为区间[start,end]的最大值。\n\n\n# -*- coding:utf-8 -*-\nclass Solution:\n def maxInWindows(self, num, size):\n # write code here\n if size == 0:\n return []\n ret = []\n stack = []\n for pos in range(len(num)):\n while (stack and stack[-1][0] < num[pos]):\n stack.pop()\n stack.append((num[pos], pos))\n if pos >= size - 1:\n while (stack and stack[0][1] <= pos - size):\n stack.pop(0)\n ret.append(stack[0][0])\n return ret\n"
},
{
"alpha_fraction": 0.350649356842041,
"alphanum_fraction": 0.3974025845527649,
"avg_line_length": 21.647058486938477,
"blob_id": "82a6873c0c6b3757ee9e4fd631e1df68f7803cc2",
"content_id": "0b655a723e8df887b24989524085ac84e0d982b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 391,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 17,
"path": "/006.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 跳台阶\nclass Solution:\n def jumpFloor(self, number):\n # write code here\n \"\"\"\n n = 1 : 1\n n = 2 : 1+1 = 2\n n = 3 : dp[n-2]+dp[n-1]\n \"\"\"\n if number == 1 or number == 2:\n return number\n dp = [1, 2]\n for _ in range(number - 2):\n dp.append(dp[-1] + dp[-2])\n return dp[-1]\n"
},
{
"alpha_fraction": 0.5218659043312073,
"alphanum_fraction": 0.5451894998550415,
"avg_line_length": 30.272727966308594,
"blob_id": "748c7c51bad12b1cfa62f5d284a9c70d9a1384c9",
"content_id": "52d1cdd09d692f5d7d5e2c54b191186bbaf48eb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 343,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 11,
"path": "/031.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "class Solution:\n def FindFirstCommonNode(self, pHead1, pHead2):\n # write code here\n if pHead1== None or pHead2 == None:\n return None\n pa = pHead1\n pb = pHead2\n while(pa!=pb):\n pa = pHead2 if pa is None else pa.next\n pb = pHead1 if pb is None else pb.next\n return pa"
},
{
"alpha_fraction": 0.4892857074737549,
"alphanum_fraction": 0.5,
"avg_line_length": 27,
"blob_id": "83918496d119e3d66643244f6d8eb87ffd33bcac",
"content_id": "6661a422034e5ee82fcd788ff77b22853e37b803",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1140,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 40,
"path": "/026.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "class Solution(object):\n # 中序和先序\n def buildTree(self, pre, tin):\n \"\"\"\n :type preorder: List[int]\n :type inorder: List[int]\n :rtype: TreeNode\n \"\"\"\n if pre == []:\n return None\n val = pre[0]\n idx = tin.index(val)\n ltin = tin[0:idx]\n rtin = tin[idx + 1:]\n lpre = pre[1:1 + len(ltin)]\n rpre = pre[1 + len(ltin):]\n root = TreeNode(val)\n root.left = self.buildTree(lpre, ltin)\n root.right = self.buildTree(rpre, rtin)\n return root\n\n # 中序和后序\n def buildTree2(self, inorder, postorder):\n \"\"\"\n :type inorder: List[int]\n :type postorder: List[int]\n :rtype: TreeNode\n \"\"\"\n if postorder == []:\n return None\n val = postorder[-1]\n idx = inorder.index(val)\n lin = inorder[0:idx]\n rin = inorder[idx + 1:]\n lpos = postorder[0:len(lin)]\n rpos = postorder[len(lin):-1]\n root = TreeNode(val)\n root.left = self.buildTree(lin, lpos)\n root.right = self.buildTree(rin, rpos)\n return root\n"
},
{
"alpha_fraction": 0.3049141466617584,
"alphanum_fraction": 0.3274126648902893,
"avg_line_length": 27.627119064331055,
"blob_id": "fc736c6bdff3d8e2f7dd8e1c96ba2df1c3cf9eb4",
"content_id": "984763bd31ce9dfc05d462988857e9f08d707817",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1731,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 59,
"path": "/018.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 顺时针打印矩阵\nclass Solution:\n # matrix类型为二维列表,需要返回列表\n def printMatrix(self, matrix):\n # write code here\n m = len(matrix)\n ans = []\n if m == 0:\n return ans\n n = len(matrix[0])\n # ans = [[0 for i in range(n)] for j in range(n)]\n # print ans\n upper_i = 0\n lower_i = m - 1\n left_j = 0\n right_j = n - 1\n num = 1\n i = 0\n j = 0\n right_pointer = 1\n down_pointer = 0\n while (num <= m * n):\n ans.append(matrix[i][j])\n if right_pointer == 1:\n if j < right_j:\n j = j + 1\n else:\n right_pointer = 0\n down_pointer = 1\n upper_i = upper_i + 1\n i = i + 1\n elif down_pointer == 1:\n if i < lower_i:\n i = i + 1\n else:\n right_pointer = -1\n down_pointer = 0\n right_j = right_j - 1\n j = j - 1\n elif right_pointer == -1:\n if j > left_j:\n j = j - 1\n else:\n right_pointer = 0\n down_pointer = -1\n lower_i = lower_i - 1\n i = i - 1\n elif down_pointer == -1:\n if i > upper_i:\n i = i - 1\n else:\n right_pointer = 1\n down_pointer = 0\n left_j = left_j + 1\n j = j + 1\n num = num + 1\n return ans\n"
},
{
"alpha_fraction": 0.4332810044288635,
"alphanum_fraction": 0.43485087156295776,
"avg_line_length": 26.69565200805664,
"blob_id": "3fb0c55a03b1f60a6835365437bd278f125a6f85",
"content_id": "6ac4cd100363dd18f8f4e3073273a81283ff63aa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 657,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 23,
"path": "/003.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 删除链表中重复的节点\n\nclass Solution:\n def deleteDuplication(self, pHead):\n # write code here\n pos = pHead\n ret = ListNode(-1)\n tmp = ret\n flag = False\n while (pos and pos.next):\n if pos.val == pos.next.val:\n flag = True\n pos.next = pos.next.next\n else:\n if flag:\n flag = False\n else:\n tmp.next = ListNode(pos.val)\n tmp = tmp.next\n pos = pos.next\n if pos and flag == False:\n tmp.next = ListNode(pos.val)\n return ret.next\n"
},
{
"alpha_fraction": 0.41747573018074036,
"alphanum_fraction": 0.4563106894493103,
"avg_line_length": 22.769229888916016,
"blob_id": "ffb3530346e6593e95454a7f5530a17dc5cd4552",
"content_id": "e40540723aa29da5744a139e9c8ec93b42b2af98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 321,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 13,
"path": "/005.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 斐波那契数列\nclass Solution:\n def Fibonacci(self, n):\n if n == 0:\n return 0\n if n == 1 or n == 2:\n return 1\n memories = [1, 1]\n for _ in range(n - 2):\n memories.append(memories[-1] + memories[-2])\n return memories[-1]\n"
},
{
"alpha_fraction": 0.508152186870575,
"alphanum_fraction": 0.5163043737411499,
"avg_line_length": 27.384614944458008,
"blob_id": "6986b0250702802644b8de024ad9b2c410332f7d",
"content_id": "15c7257101b1cb7045f966b45b990a6f1c4f4a1f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 418,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 13,
"path": "/039.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n # 这里要特别注意~找到任意重复的一个值并赋值到duplication[0]\n # 函数返回True/False\n def duplicate(self, numbers, duplication):\n # write code here\n dup = dict()\n for num in numbers:\n if num not in dup:\n dup[num] = True\n else:\n duplication[0]=num\n return True"
},
{
"alpha_fraction": 0.4535714387893677,
"alphanum_fraction": 0.4714285731315613,
"avg_line_length": 22.33333396911621,
"blob_id": "fd1c3933d9a25463cd92c36996521a2401398fea",
"content_id": "593e40266d32f5d38a09587bfac7cba3d0cd0888",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 290,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 12,
"path": "/007.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 变态跳台阶\n\nclass Solution:\n def jumpFloorII(self, number):\n # write code here\n if number == 1 or number == 2:\n return number\n ret = sum_ = 3\n for i in range(number - 2):\n ret = sum_ + 1\n sum_ += ret\n return ret\n"
},
{
"alpha_fraction": 0.3719165027141571,
"alphanum_fraction": 0.40227705240249634,
"avg_line_length": 24.095237731933594,
"blob_id": "2086aa047f300f33971e21d4ef9cd2294a2daba8",
"content_id": "e31a431e898f0f2d0c3d5c2ed52f9b2eb2f7f2c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 577,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 21,
"path": "/014.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 数组中只出现一次的数字\n\nclass Solution:\n # 返回[a,b] 其中ab是出现一次的两个数字\n def FindNumsAppearOnce(self, array):\n # write code here\n ans, a1, a2, flag = 0, 0, 0, 1\n for num in array:\n ans = ans ^ num\n while (ans):\n if ans % 2 == 0:\n ans = ans >> 1\n flag = flag << 1\n else:\n break\n for num in array:\n if num & flag:\n a1 = a1 ^ num\n else:\n a2 = a2 ^ num\n return a1, a2\n"
},
{
"alpha_fraction": 0.5705521702766418,
"alphanum_fraction": 0.5787321329116821,
"avg_line_length": 27.823530197143555,
"blob_id": "3634d71fb59d10b5967a8b632446b94a2c059ef3",
"content_id": "98e37a8ff16e01a3782a66d39ec678d4fb2ff1c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 499,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 17,
"path": "/010.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 平衡二叉树\n\nclass Solution:\n def Treeheight(self,pRoot):\n if pRoot == None:\n return 0\n if pRoot.left == None and pRoot.right == None:\n return 1\n lh = self.Treeheight(pRoot.left)\n rh = self.Treeheight(pRoot.right)\n return max(rh,lh)+1\n\n def IsBalanced_Solution(self, pRoot):\n # write code here\n if pRoot == None:\n return True\n return abs(self.Treeheight(pRoot.left)-self.Treeheight(pRoot.right))<=1"
},
{
"alpha_fraction": 0.5059347152709961,
"alphanum_fraction": 0.5222551822662354,
"avg_line_length": 24.923076629638672,
"blob_id": "fa56f277365a56e5b13178c705934f5ad6ed5a4e",
"content_id": "5f3e3faa76cac1c52b6a236cb1d742b0fcc077e5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 674,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 26,
"path": "/030.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nimport heapq\n\n\nclass Solution:\n def GetUglyNumber_Solution(self, index):\n # write code here\n if index < 1:\n return 0\n heaps = []\n heapq.heappush(heaps, 1)\n lastnum = None\n idx = 1\n while (idx <= index):\n curnum = heapq.heappop(heaps)\n while (curnum == lastnum):\n curnum = heapq.heappop(heaps)\n lastnum = curnum\n idx += 1\n heapq.heappush(heaps, curnum * 2)\n heapq.heappush(heaps, curnum * 3)\n heapq.heappush(heaps, curnum * 5)\n return lastnum\n\n\nprint(Solution().GetUglyNumber_Solution(20))\n"
},
{
"alpha_fraction": 0.5484949946403503,
"alphanum_fraction": 0.5618728995323181,
"avg_line_length": 36.375,
"blob_id": "82e8ffaf5bc3b91ab20385ce68eb7b332cb713cf",
"content_id": "6a905c594a220e00985b1f58d9f15f05702f0d94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 299,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 8,
"path": "/038.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n def PrintMinNumber(self, numbers):\n # write code here\n if not numbers: return \"\"\n numbers = list(map(str, numbers))\n numbers.sort(cmp=lambda x, y: cmp(x + y, y + x))\n return '0' if numbers[0] == '0' else ''.join(numbers)\n"
},
{
"alpha_fraction": 0.45518869161605835,
"alphanum_fraction": 0.45518869161605835,
"avg_line_length": 22.55555534362793,
"blob_id": "422d5e117917b57a27b225b31e51b47086939134",
"content_id": "3ff4d1c17c2f701c7e211f3d018c65f87d259c55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 442,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 18,
"path": "/019.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 二叉树的下一个节点\n\nclass Solution:\n def GetNext(self, pNode):\n # write code here\n # left root right\n if pNode == None:\n return None\n if pNode.right:\n tmp = pNode.right\n while (tmp.left):\n tmp = tmp.left\n return tmp\n p = pNode.next\n while (p and p.right == pNode):\n pNode = p\n p = p.next\n return p\n"
},
{
"alpha_fraction": 0.44999998807907104,
"alphanum_fraction": 0.4555555582046509,
"avg_line_length": 23.545454025268555,
"blob_id": "2c4062b95489717c16fd396ba85987ec04762ec1",
"content_id": "98c799601d69a7632c4bdce73fe3565b41989c1c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 572,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 22,
"path": "/025.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 二叉平衡树中的第k小数\n\nclass Solution:\n # 返回对应节点TreeNode\n def KthNode(self, pRoot, k):\n # write code here\n stack = []\n node = pRoot\n while node:\n stack.append(node)\n node = node.left\n cnt = 1\n while (stack and cnt <= k):\n node = stack.pop()\n right = node.right\n while right:\n stack.append(right)\n right = right.left\n cnt += 1\n if node and k == cnt - 1:\n return node\n return None\n"
},
{
"alpha_fraction": 0.4386792480945587,
"alphanum_fraction": 0.4599056541919708,
"avg_line_length": 27.33333396911621,
"blob_id": "a8e9fb6a12920fdc749bed07be930fce15c24515",
"content_id": "a9ba234966cf6f2a8657a5eb427d164306b46132",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 15,
"path": "/036.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n def MoreThanHalfNum_Solution(self, numbers):\n # write code here\n if numbers == []:\n return 0\n val,cnt = None,0\n for num in numbers:\n if cnt==0:\n val,cnt = num,1\n elif val == num:\n cnt+=1\n else:\n cnt-=1\n return val if numbers.count(val)*2>len(numbers) else 0"
},
{
"alpha_fraction": 0.3700440526008606,
"alphanum_fraction": 0.4118942618370056,
"avg_line_length": 24.27777862548828,
"blob_id": "8854bb8cb7c1aca0131e48b270d43f3960dcd23d",
"content_id": "013c3e3972f211d4aa4b478c615d7f71fec6b276",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 454,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 18,
"path": "/037.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n def NumberOf1Between1AndN_Solution(self, n):\n # write code here\n if n<1: return 0\n if n==1: return 1\n last,ans,pos = 0,0,1\n while(n):\n num = n%10\n n = n/10\n ans += pos*n\n if num>1:\n ans+=pos\n elif num==1:\n ans+=(last+1)\n last = last+num*pos\n pos*=10\n return ans"
},
{
"alpha_fraction": 0.5261628031730652,
"alphanum_fraction": 0.5261628031730652,
"avg_line_length": 25.538461685180664,
"blob_id": "4b71dd4bde60a1d9dd0ab729cf34a2993b531fa4",
"content_id": "d7ca75e8c9d9d436d34d58a9d40f0a1d844f8c89",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 344,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 13,
"path": "/035.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "import heapq\nclass Solution:\n def GetLeastNumbers_Solution(self, tinput, k):\n # write code here\n heaps = []\n ret = []\n for num in tinput:\n heapq.heappush(heaps,num)\n if k>len(heaps):\n return []\n for i in range(k):\n ret.append(heapq.heappop(heaps))\n return ret"
},
{
"alpha_fraction": 0.8383838534355164,
"alphanum_fraction": 0.868686854839325,
"avg_line_length": 32.33333206176758,
"blob_id": "591701ce15c03e90c15003b575ad0330099b0ff1",
"content_id": "c544e81a82e5765cd8389d53f8e92ba8fb4f4f7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 275,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 3,
"path": "/009.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 将一个字符串转换成一个整数,要求不能使用字符串转换整数的库函数。\n# 数值为0或者字符串不是一个合法的数值则返回0\n# 思考:如果有正负号,需要在数字之前,出现其他字符或者字符串为空都非法返回0"
},
{
"alpha_fraction": 0.41565218567848206,
"alphanum_fraction": 0.42260870337486267,
"avg_line_length": 24,
"blob_id": "2ca90de492602a51ee5cea07986fed7692010068",
"content_id": "9593b7c2670f66c252521e3ce89cd628f8f4dd76",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 605,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 23,
"path": "/021.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 把二叉树打印成多行\n\nclass Solution:\n # 返回二维列表[[1,2],[4,5]]\n def Print(self, pRoot):\n # write code here\n if pRoot == None:\n return []\n stack = [pRoot]\n ret = []\n\n while (stack):\n tmpstack = []\n tmp = []\n for node in stack:\n tmp.append(node.val)\n if node.left:\n tmpstack.append(node.left)\n if node.right:\n tmpstack.append(node.right)\n ret.append(tmp[:])\n stack = tmpstack[:]\n return ret\n"
},
{
"alpha_fraction": 0.5560166239738464,
"alphanum_fraction": 0.5601660013198853,
"avg_line_length": 16.214284896850586,
"blob_id": "ae7a22fcd5f08f669f5087618c0ac4c770d56071",
"content_id": "87d23f2f428414dcb3ee09deda29c71faa026667",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 255,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 14,
"path": "/015.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 翻转单词顺序列\n\n\nclass Solution:\n def ReverseSentence(self, s):\n # write code here\n ret = s.split(\" \")\n ret.reverse()\n return ' '.join(ret)\n\n\nprint(Solution().ReverseSentence(\"i love lily\"))\n"
},
{
"alpha_fraction": 0.3421787619590759,
"alphanum_fraction": 0.37011173367500305,
"avg_line_length": 28.875,
"blob_id": "0e3a1dffb949ca20c2b16f968faabc3082336c3f",
"content_id": "e7a62f2dcb2e073760d325ec4135973c7cbaf5fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 734,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 24,
"path": "/011.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 和为S的连续正数序列\n\nclass Solution:\n def FindContinuousSequence(self, tsum):\n # write code here\n k = 2\n ret = []\n for k in range(2,tsum):\n if k%2==1 and tsum%k==0:\n tmp = []\n mid = tsum/k\n if mid-k/2>0:\n for i in range(mid-k/2,mid+k/2+1):\n tmp.append(i)\n ret.append(tmp[:])\n elif k%2==0 and (tsum%k)*2==k:\n mid = tsum/k\n tmp = []\n if mid-k/2+1>0:\n for i in range(mid-k/2+1,mid+k/2+1):\n tmp.append(i)\n ret.append(tmp[:])\n ret.sort()\n return ret"
},
{
"alpha_fraction": 0.3273727595806122,
"alphanum_fraction": 0.3466299772262573,
"avg_line_length": 26.961538314819336,
"blob_id": "0404dd8cfd48dedc5df728932bc13ec3c66a4fb5",
"content_id": "ad4b304e6aefbe7ba7019c1eb12a06aa6780f39d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 753,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 26,
"path": "/013.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\n\n# 数字在排序数组中出现的次数\nclass Solution:\n def GetNumberOfK(self, data, k):\n # write code here\n start = 0\n end = len(data) - 1\n while (start <= end):\n mid = (start + end) / 2\n if data[mid] == k:\n cnt = 0\n tmp = mid\n while (tmp >= 0 and data[tmp] == k):\n cnt += 1\n tmp -= 1\n tmp = mid + 1\n while (tmp < len(data) and data[tmp] == k):\n cnt += 1\n tmp += 1\n return cnt\n elif data[mid] > k:\n end = mid - 1\n else:\n start = mid + 1\n return 0\n"
},
{
"alpha_fraction": 0.4876404404640198,
"alphanum_fraction": 0.5056179761886597,
"avg_line_length": 30.85714340209961,
"blob_id": "9d990aba11881917dab46af3692043e3548b6a74",
"content_id": "c50fc7d14e2ebab0358fab0f675aa6d000d0b199",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 445,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 14,
"path": "/032.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\nclass Solution:\n def FirstNotRepeatingChar(self, s):\n # write code here\n queue = []\n memories = dict()\n for idx,char in enumerate(s):\n if char not in memories:\n queue.append(idx)\n memories[char]=0\n memories[char]+=1\n while(queue and memories[s[queue[0]]]>1):\n queue.pop(0)\n return queue[0] if queue else -1"
},
{
"alpha_fraction": 0.39346811175346375,
"alphanum_fraction": 0.3996889591217041,
"avg_line_length": 24.719999313354492,
"blob_id": "aac917ffed053aa9c15fdbe80c409afb1290d288",
"content_id": "3477d55103e51dbbed0dc5e1c7c34e28a2aa6f75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 665,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 25,
"path": "/022.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "# 按之字形顺序打印二叉树\n\nclass Solution:\n def Print(self, pRoot):\n # write code here\n if pRoot == None:\n return []\n stack = [pRoot]\n step = 1\n ret = []\n while (stack):\n tmpstack = []\n tmp = []\n for node in stack:\n tmp += [node.val]\n if node.left:\n tmpstack.append(node.left)\n if node.right:\n tmpstack.append(node.right)\n if step % 2 == 0:\n tmp.reverse()\n ret.append(tmp)\n step += 1\n stack = tmpstack[:]\n return ret\n"
},
{
"alpha_fraction": 0.5114753842353821,
"alphanum_fraction": 0.5245901346206665,
"avg_line_length": 29.600000381469727,
"blob_id": "a05a19e910205dcbb35963f3ef6d1d9fb2cb745a",
"content_id": "4d2c4bc1c27deb241d6aca7ea9ad1071b0d1acd8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 305,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 10,
"path": "/034.py",
"repo_name": "zhangdavids/offer_py",
"src_encoding": "UTF-8",
"text": "class Solution:\n def FindGreatestSumOfSubArray(self, array):\n # write code here\n if len(array)==1:\n return array[0]\n cur = pos = array[0]\n for i in range(1,len(array)):\n pos = max(pos+array[i],array[i])\n cur = max(cur,pos)\n return cur"
}
] | 33 |
Eve-AI/greetings-package | https://github.com/Eve-AI/greetings-package | e07f79a960cb22d17a1e4095d8fbf66db6e57ffc | 6d637d7ef1427649265a59219710372a6c8682d9 | 8eecd3799050dc49f295f151f20410f5361a7821 | refs/heads/master | 2021-03-02T13:15:10.102256 | 2020-03-08T19:01:38 | 2020-03-08T19:01:38 | 245,871,072 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6752136945724487,
"alphanum_fraction": 0.6794871687889099,
"avg_line_length": 25,
"blob_id": "89b990b1c211ed7eb42e2e91d124fb5b553efc43",
"content_id": "5d20fec6538d8be5ee487f4c1724fd0a00a9aff0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 234,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 9,
"path": "/nice_to_talk_to_you.py",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\nimport utils\n\ndef run(string, entities):\n\t\"\"\"nice_to_talk_to_you module of greetings package\"\"\"\n\n\treturn utils.output('end', 'nice_to_talk_to_you', utils.translate('nice_to_talk_to_you'))\n"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.671875,
"avg_line_length": 20.33333396911621,
"blob_id": "1d7b1b3e42190ea776276cfa1589b9ed6b50c3ff",
"content_id": "08a6f4acd838894b22ca14906f3c062716a3646d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 192,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 9,
"path": "/hello.py",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\nimport utils\n\ndef run(string, entities):\n\t\"\"\"hello module of greetings package\"\"\"\n\n\treturn utils.output('end', 'hello', utils.translate('hello'))\n"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 19,
"blob_id": "1d43a3c03a3a65bf663861ddc89a1cf66f5f74e0",
"content_id": "6130c9516dadb2efd06704fcb35bf234510d55ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 20,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 1,
"path": "/README.md",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "# greetings-package\n"
},
{
"alpha_fraction": 0.6952381134033203,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 22.33333396911621,
"blob_id": "f36eb50d3f3989c1882ea9fa91d4963de4887a5b",
"content_id": "1d28b41f1c80623a2d0ccb826615842d7bb97ea8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 210,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 9,
"path": "/goodmorning.py",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\nimport utils\n\ndef run(string, entities):\n\t\"\"\"goodmorning module of greetings package\"\"\"\n\n\treturn utils.output('end', 'goodmorning', utils.translate('goodmorning'))\n"
},
{
"alpha_fraction": 0.6711711883544922,
"alphanum_fraction": 0.6756756901741028,
"avg_line_length": 23.66666603088379,
"blob_id": "044f7ee14377d7a6dc375b4d4983ac059d372ed8",
"content_id": "8c36c97937bf1d54b3938a486af714d48a2165b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 222,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 9,
"path": "/nice_to_see_you.py",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\nimport utils\n\ndef run(string, entities):\n\t\"\"\"nice_to_see_you module of greetings package\"\"\"\n\n\treturn utils.output('end', 'nice_to_see_you', utils.translate('nice_to_see_you'))\n"
},
{
"alpha_fraction": 0.6755555272102356,
"alphanum_fraction": 0.6800000071525574,
"avg_line_length": 24,
"blob_id": "87e0309ea9ccd3a2671606d6c50b757f0a8c4f9d",
"content_id": "5a86acf95429b9d673a16b054f05d7b4249820ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 225,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 9,
"path": "/nice_to_meet_you.py",
"repo_name": "Eve-AI/greetings-package",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\nimport utils\n\ndef run(string, entities):\n\t\"\"\"nice_to_meet_you module of greetings package\"\"\"\n\n\treturn utils.output('end', 'nice_to_meet_you', utils.translate('nice_to_meet_you'))\n"
}
] | 6 |
OmarElraies/green-hub-api | https://github.com/OmarElraies/green-hub-api | 6d6d47ad031fa7cb0b38100f7e888e5aae919c6b | 3b993b3b729639b448bc72b522761a82b7aa2348 | 6d3d060beb53dc35106134c7bf306758afbd5826 | refs/heads/master | 2020-04-24T20:24:39.097868 | 2019-03-16T19:07:09 | 2019-03-16T19:07:09 | 172,242,422 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5636363625526428,
"alphanum_fraction": 0.7272727489471436,
"avg_line_length": 17.33333396911621,
"blob_id": "570e3cf03cf1878a0b0890cecf99f39f558d01d0",
"content_id": "96878525d5fa10dff216416ac190488feaa849e4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 55,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 3,
"path": "/requirements.txt",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "Django==2.1.5\ndjangorestframework==3.9.1\nPillow==5.4.1\n"
},
{
"alpha_fraction": 0.714525580406189,
"alphanum_fraction": 0.714525580406189,
"avg_line_length": 32.97142791748047,
"blob_id": "53b12301277020c720bdca4aa5429d5964087ef6",
"content_id": "e5579ab2b745d4c5b73d8547ba588201f93ea10c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1191,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 35,
"path": "/greenhub_api/garden/serializers.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\nfrom .models import Plants, Category, SubCategory, Places\nfrom django.contrib.auth.models import User\n\n\nclass PlantsSerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Plants\n fields = '__all__'\n\nclass CategorySerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Category\n fields = '__all__'\n\nclass SubCategorySerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = SubCategory\n fields = '__all__'\n\nclass PlacesSerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Places\n fields = '__all__'\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n plants = serializers.HyperlinkedRelatedField(many=True, view_name='plants-detail', read_only=True)\n\n class Meta:\n model = User\n fields = ('url', 'id', 'username', 'plants')\n\n\n"
},
{
"alpha_fraction": 0.6336353421211243,
"alphanum_fraction": 0.6404303312301636,
"avg_line_length": 33.6274528503418,
"blob_id": "08f75f81dfcb1f6cf4242daba873abc800815005",
"content_id": "648f9e62f99056920fd60ef272109ffc9913fa83",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1766,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 51,
"path": "/greenhub_api/plants/models.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom model_utils.models import TimeStampedModel\nfrom model_utils import Choices\nfrom django.contrib.auth.models import User\n\nclass Plants(TimeStampedModel):\n name = models.CharField(max_length=255)\n owner = models.ForeignKey('auth.User', related_name='plants', on_delete=models.CASCADE, blank=True, null=True)\n photo = models.ImageField()\n last_watering_time = models.DateTimeField()\n place = models.ForeignKey('Places', on_delete=models.CASCADE, blank=True, null=True)\n category = models.ForeignKey('Category', on_delete=models.CASCADE, blank=True, null=True)\n \n class Meta:\n ordering = ('created',)\n\n def __str__(self):\n return self.name\n\nclass Places(TimeStampedModel):\n name = models.CharField(max_length=255)\n owner = models.ForeignKey('auth.User', related_name='places', on_delete=models.CASCADE, blank=True, null=True)\n def __str__(self):\n return self.name\n\nclass Category(TimeStampedModel):\n SEASON_CHOICES = (\n ('SP', 'Spring',),\n ('SU', 'Summer',),\n ('A', 'autumn',),\n ('W', 'Winter',),\n )\n SUNLIGHT_CHOICES = (\n ('SL', 'Sunlight',),\n ('SH', 'Shadow',),\n )\n SOIL_CHOICES = (\n ('SA', 'sand',),\n ('SI', 'silt',),\n ('C', 'clay',),\n )\n\n name = models.CharField(max_length=255)\n owner = models.ForeignKey('auth.User', related_name='categories', on_delete=models.CASCADE, blank=True, null=True)\n water_every = models.TimeField()\n sunlight = models.CharField(max_length=2, choices=SUNLIGHT_CHOICES,)\n soil = models.CharField(max_length=2, choices=SOIL_CHOICES,)\n season = models.CharField(max_length=2, choices=SEASON_CHOICES,)\n\n def __str__(self):\n return self.name\n"
},
{
"alpha_fraction": 0.57001793384552,
"alphanum_fraction": 0.578994631767273,
"avg_line_length": 49.6363639831543,
"blob_id": "1011d5d2a419e242e52b0f39b56d8afda8d681da",
"content_id": "508ca0f503f875c0567563a2df86a92541b902b2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3342,
"license_type": "permissive",
"max_line_length": 170,
"num_lines": 66,
"path": "/greenhub_api/garden/migrations/0001_initial.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.1.5 on 2019-03-12 15:37\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Category',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255)),\n ('water_every', models.TimeField()),\n ('sunlight', models.CharField(choices=[('SL', 'Sunlight'), ('SH', 'Shadow')], max_length=2)),\n ('soil', models.CharField(choices=[('SA', 'sand'), ('SI', 'silt'), ('C', 'clay')], max_length=2)),\n ('season', models.CharField(choices=[('SP', 'Spring'), ('SU', 'Summer'), ('A', 'autumn'), ('W', 'Winter')], max_length=2)),\n ('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='category', to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='Places',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255)),\n ('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='places', to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='Plants',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('created', models.DateTimeField(auto_now_add=True)),\n ('name', models.CharField(max_length=255)),\n ('photo', models.ImageField(upload_to='')),\n ('last_watering_time', models.DateTimeField()),\n ('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.Category')),\n ('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='plants', to=settings.AUTH_USER_MODEL)),\n ('place', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.Places')),\n ],\n options={\n 'ordering': ('created',),\n },\n ),\n migrations.CreateModel(\n name='SubCategory',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255)),\n ('owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subcategory', to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.AddField(\n model_name='category',\n name='sub_category',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='garden.SubCategory'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7352941036224365,
"alphanum_fraction": 0.7352941036224365,
"avg_line_length": 36.77777862548828,
"blob_id": "72bb2d8d9cf606dbea754032fe648980bf87f00f",
"content_id": "c99f8287a3fac86a098e6199cb725ae62cb34469",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 340,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 9,
"path": "/greenhub_api/accounts/serializers.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\nfrom django.contrib.auth.models import User\n\nclass UserSerializer(serializers.HyperlinkedModelSerializer):\n plants = serializers.HyperlinkedRelatedField(many=True, view_name='plants-detail', read_only=True)\n\n class Meta:\n model = User\n fields = ('url', 'id', 'username', 'plants')\n"
},
{
"alpha_fraction": 0.7837837934494019,
"alphanum_fraction": 0.7837837934494019,
"avg_line_length": 33,
"blob_id": "13d93a24859a227ab1ff7cc8350494a8acae4be0",
"content_id": "66fcbb355ce515f6c4d2f724c8a6178ffe21f2ce",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 407,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 12,
"path": "/greenhub_api/plants/urls.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.urls import path, include\nfrom rest_framework.routers import DefaultRouter\nfrom plants import views\n\n# Create a router and register our viewsets with it.\nrouter = DefaultRouter()\nrouter.register(r'plants', views.PlantsViewSet)\nrouter.register(r'category', views.CategoryViewSet)\n# The API URLs are now determined automatically by the router.\nurlpatterns = [\n path('', include(router.urls)),\n]"
},
{
"alpha_fraction": 0.8051282167434692,
"alphanum_fraction": 0.8051282167434692,
"avg_line_length": 23.375,
"blob_id": "a9c8c887a356cd1b1388bd528476354c734e0e90",
"content_id": "453b78bfa44fa022b511dbd56e20b9f7029a3fa6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 195,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 8,
"path": "/greenhub_api/plants/admin.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\n# Register your models here.\nfrom .models import Plants, Category, Places\n\nadmin.site.register(Plants)\nadmin.site.register(Category)\nadmin.site.register(Places)\n"
},
{
"alpha_fraction": 0.7028112411499023,
"alphanum_fraction": 0.721552848815918,
"avg_line_length": 40.5,
"blob_id": "f1536aca98b58764cf8b7ee87ef58626e6533f76",
"content_id": "2f56bfd5524fa3c178a46d89a482cbf7d0b66589",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 747,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 18,
"path": "/greenhub_api/accounts/urls.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.urls import include, path, re_path\nfrom rest_auth.views import PasswordResetConfirmView\n\nfrom rest_framework.routers import DefaultRouter\nfrom accounts import views\n\n# Create a router and register our viewsets with it.\nrouter = DefaultRouter()\nrouter.register(r'user', views.UserViewSet)\n# The API URLs are now determined automatically by the router.\n\nurlpatterns = [\n\tpath('', include(router.urls)),\n path('rest-auth/', include('rest_auth.urls')),\n path('rest-auth/registration/', include('rest_auth.registration.urls')),\n\tre_path(r'^rest-auth/password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', PasswordResetConfirmView.as_view(),\n name='password_reset_confirm'),\n]\n"
},
{
"alpha_fraction": 0.7836644649505615,
"alphanum_fraction": 0.7836644649505615,
"avg_line_length": 31.428571701049805,
"blob_id": "c516edef2f352ce5938a0d76d3a5fa1e1776c436",
"content_id": "dc33c2c4bc049516c156e94bf1eef881673fef9c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 453,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 14,
"path": "/greenhub_api/garden/urls.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.urls import path, include\nfrom rest_framework.routers import DefaultRouter\nfrom garden import views\n\n# Create a router and register our viewsets with it.\nrouter = DefaultRouter()\nrouter.register(r'garden', views.PlantsViewSet)\nrouter.register(r'category', views.CategoryViewSet)\nrouter.register(r'users', views.UserViewSet)\n\n# The API URLs are now determined automatically by the router.\nurlpatterns = [\n path('', include(router.urls)),\n]"
},
{
"alpha_fraction": 0.7336217761039734,
"alphanum_fraction": 0.7336217761039734,
"avg_line_length": 35.772727966308594,
"blob_id": "f1b4005f0ca52a310169a87cc6ff143f4a09e0eb",
"content_id": "ab4eb5df366fbfb130ffab5e4760196952163afc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1618,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 44,
"path": "/greenhub_api/plants/views.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from plants.models import Plants, Places, Category\nfrom plants.permissions import IsOwnerOrReadOnly\nfrom plants.serializers import PlantsSerializer, PlacesSerializer, CategorySerializer\nfrom django.contrib.auth.models import User\nfrom rest_framework import permissions\nfrom rest_framework import renderers\nfrom rest_framework import viewsets\nfrom rest_framework.decorators import action\nfrom rest_framework.response import Response\n\n\nclass PlantsViewSet(viewsets.ModelViewSet):\n \"\"\"\n This viewset automatically provides `list`, `create`, `retrieve`,\n `update` and `destroy` actions.\n \"\"\"\n queryset = Plants.objects.all()\n serializer_class = PlantsSerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass PlacesViewSet(viewsets.ModelViewSet):\n \"\"\"\n This viewset automatically provides `list` and `detail` actions.\n \"\"\"\n queryset = Places.objects.all()\n serializer_class = PlacesSerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass CategoryViewSet(viewsets.ModelViewSet):\n queryset = Category.objects.all()\n serializer_class = CategorySerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n"
},
{
"alpha_fraction": 0.8207171559333801,
"alphanum_fraction": 0.8207171559333801,
"avg_line_length": 34.85714340209961,
"blob_id": "cc6ba936efb984708ce5b1ae2dfe238e203d0538",
"content_id": "1395ee5bbabc2c01b328f37a870657b585180fb3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 251,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 7,
"path": "/greenhub_api/accounts/views.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from django.contrib.auth.models import User\nfrom rest_framework import viewsets\nfrom accounts.serializers import UserSerializer\n\nclass UserViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n"
},
{
"alpha_fraction": 0.7106825113296509,
"alphanum_fraction": 0.7106825113296509,
"avg_line_length": 29.545454025268555,
"blob_id": "741721eef921848d6a69339ff21ad90cbc460334",
"content_id": "2d2ce23c7cfc9699d187ba44cc26431ad783cabb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 674,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 22,
"path": "/greenhub_api/plants/serializers.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\nfrom .models import Plants, Category, Places\n\n\nclass PlantsSerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Plants\n fields = '__all__'\n\nclass CategorySerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Category\n fields = '__all__'\n\n\nclass PlacesSerializer(serializers.HyperlinkedModelSerializer):\n owner = serializers.ReadOnlyField(source='owner.username')\n class Meta:\n model = Places\n fields = '__all__'\n\n\n"
},
{
"alpha_fraction": 0.7767857313156128,
"alphanum_fraction": 0.7767857313156128,
"avg_line_length": 36,
"blob_id": "d090b132748caac188d87cac186e3a19393e06fc",
"content_id": "c2e2bda47ce56681849c95dec3506b21ac917dd1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 112,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 3,
"path": "/README.md",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "# Green Hub [Django REST Framework]\n\nGreenHub is a very useful app for keeping your plans data and watering it \n"
},
{
"alpha_fraction": 0.7388764023780823,
"alphanum_fraction": 0.7388764023780823,
"avg_line_length": 35.49180221557617,
"blob_id": "ef9af12b5ff186a0b0b67480759f125563fbdab6",
"content_id": "7187e46ef9099b6e23f55b79cee8dfe769cdb045",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2225,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 61,
"path": "/greenhub_api/garden/views.py",
"repo_name": "OmarElraies/green-hub-api",
"src_encoding": "UTF-8",
"text": "from garden.models import Plants, Places, Category, SubCategory\nfrom garden.permissions import IsOwnerOrReadOnly\nfrom garden.serializers import PlantsSerializer, SubCategorySerializer\nfrom garden.serializers import UserSerializer, PlacesSerializer, CategorySerializer\nfrom django.contrib.auth.models import User\nfrom rest_framework import permissions\nfrom rest_framework import renderers\nfrom rest_framework import viewsets\nfrom rest_framework.decorators import action\nfrom rest_framework.response import Response\n\n\nclass PlantsViewSet(viewsets.ModelViewSet):\n \"\"\"\n This viewset automatically provides `list`, `create`, `retrieve`,\n `update` and `destroy` actions.\n\n Additionally we also provide an extra `highlight` action.\n \"\"\"\n queryset = Plants.objects.all()\n serializer_class = PlantsSerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass PlacesViewSet(viewsets.ModelViewSet):\n \"\"\"\n This viewset automatically provides `list` and `detail` actions.\n \"\"\"\n queryset = Places.objects.all()\n serializer_class = PlacesSerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass CategoryViewSet(viewsets.ModelViewSet):\n queryset = Category.objects.all()\n serializer_class = CategorySerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass SubCategoryViewSet(viewsets.ModelViewSet):\n queryset = SubCategory.objects.all()\n serializer_class = SubCategorySerializer\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly,)\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass UserViewSet(viewsets.ReadOnlyModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializer"
}
] | 14 |
pyrooka/fallout-terminal-cracker | https://github.com/pyrooka/fallout-terminal-cracker | 7fff7bddd9549feaabeb575952f62f0b0efb8ef4 | 99bbb06bc2acff3c42d513d34667944327373367 | 62e7c62c066275010b39bfeeeb78179a84c7ec1c | refs/heads/master | 2020-12-02T01:40:23.828965 | 2016-08-29T11:54:06 | 2016-08-29T11:54:06 | 66,739,032 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.5894502997398376,
"alphanum_fraction": 0.5964410305023193,
"avg_line_length": 25.453781127929688,
"blob_id": "cca5d27fb4b13fbcb9aa5560cb9dad18c963f12b",
"content_id": "57f547d517b25c94be08289d5c8ac101708d4dd3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3147,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 119,
"path": "/fallout_terminal_cracker.py",
"repo_name": "pyrooka/fallout-terminal-cracker",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\nimport sys, re\n\nbasewords = [] #default list\nanswers = [] #list of possible codes\n\n#print help for the lame guyz outside\ndef printHelp(case):\n if case == 1:\n print \"Use from terminal/cmd with one parameter: list of the words separate by enter.\"\n if case == 2:\n print \"Input format: \\\"yourword\\\" \\\"likeness\\\" (without qoutes)\"\n\n#narrow the answers list\ndef narrowList():\n\n global answers\n global basewords\n\n print \"You can choose from these words: \" + str(basewords) # all remained words\n print \"\"\n print \"The answer is in these words: \" + str(answers) # possible asnwers\n print \"\"\n\n result = raw_input(\"Result(word likeness): \") #prompt to input the last tries word and its likeness\n\n # check the input\n if not inputCheck(result):\n printHelp(2)\n return False\n\n # set the word and the likeness value from the user input\n if \" \" in result:\n temp_list = result.split(\" \")\n word = temp_list[0]\n likeness = int(temp_list[1])\n else:\n word = result\n likeness = 0\n\n # delete all appereance of the input word from the answers list\n answers = filter(lambda a: a != word, answers)\n basewords = filter(lambda a: a != word, basewords)\n\n # if the likeness is 0, just deleted the word from the list, nothing else to do\n if likeness == 0:\n return\n\n # if likeness not 0 lets continue\n for answer in answers:\n like = 0 # if a letter equals, increase this by one\n for i in range(0,len(answer)):\n if(answer[i] == word[i]):\n like += 1\n # after get all of the equals return and the like is the same or bigger then the likeness let it in the list.\n # else delete it\n\n if like < likeness:\n answers = filter(lambda a: a != answer, answers)\n\n\n# check is the user input correct..never trust in ppl.except urself. except if ur name anuka. meh never trust in anuka\ndef inputCheck(inp):\n global basewords\n\n if \" \" in inp:\n l = inp.split(\" \")\n if l[0] not in basewords:\n return False\n r = re.match(\"[a-zA-Z]* [0-9]*\", inp)\n if not r:\n return False\n r = re.match(\"[a-zA-Z]*\", inp)\n if not r:\n return False\n\n return True\n\ndef main(file):\n\n global basewords\n global answers\n\n # open the word file for read\n try:\n f = open(file, \"r\")\n except:\n print \"Cannot open %s.\" % file\n sys.exit()\n\n # fill up basewords list without the new line character\n for line in f:\n basewords.append(line.replace(\"\\n\", \"\"))\n\n answers = basewords # atm the possible answer is the default list\n printHelp(2)\n while True:\n narrowList() # lets start narrowing the our answer list\n\n # if only 1 answer left no more trying\n if len(answers) == 1:\n print \"Congratulation! The solution is: %s\" % (answers[0])\n break\n\n print \"The possible answers are: \" + str(answers)\n sys.exit()\n\n\n\n\n\n\nif __name__ == \"__main__\":\n if len(sys.argv) != 2:\n printHelp(1)\n sys.exit()\n\n main(sys.argv[1])"
},
{
"alpha_fraction": 0.747474730014801,
"alphanum_fraction": 0.7575757503509521,
"avg_line_length": 32,
"blob_id": "9d7963b76ccd56569babc7abb40377367200e304",
"content_id": "db01a11b84a683b8ee916dd2117713a588394098",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 99,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 3,
"path": "/README.md",
"repo_name": "pyrooka/fallout-terminal-cracker",
"src_encoding": "UTF-8",
"text": "# README #\n\nThis is a simple script which help you to win the terminal cracking game in Fallout 4.\n"
}
] | 2 |
SUTHARRAM/Cricket-Database-System | https://github.com/SUTHARRAM/Cricket-Database-System | 89147ead7a3bcd6ac4502f6c58393ed6f98d164e | 9ecfe0444613bb69ac497bbfc772a4f393e3b43f | 8e58042e474fcf533bf6900a9fed1ac868874cee | refs/heads/master | 2020-08-09T21:34:02.385257 | 2019-10-10T12:47:46 | 2019-10-10T12:47:46 | 214,179,995 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6358620524406433,
"alphanum_fraction": 0.6537930965423584,
"avg_line_length": 35.7088623046875,
"blob_id": "eb5033502269a2cc32a6808662c2011fd8b67087",
"content_id": "aa2969eb7dded40d78cf4aa10f277837168b19dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2900,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 79,
"path": "/delete_team.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'delete_team.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_delete_team(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def delete_team(self):\n self.team_id = self.inputTeamID.text()\n\n self.sql = \"DELETE FROM Teams WHERE Teams.Team_ID = %s\"\n self.arg = (self.team_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Team Deleted successfully!\") \n \n self.inputTeamID.clear()\n def setupUi(self, delete_team):\n delete_team.setObjectName(\"delete_team\")\n delete_team.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(delete_team)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.btnDelete = QtWidgets.QPushButton(self.centralwidget)\n self.btnDelete.setGeometry(QtCore.QRect(330, 360, 89, 25))\n self.btnDelete.setObjectName(\"btnDelete\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(156, 250, 111, 20))\n self.label.setObjectName(\"label\")\n self.inputTeamID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputTeamID.setGeometry(QtCore.QRect(490, 250, 151, 25))\n self.inputTeamID.setObjectName(\"inputTeamID\")\n delete_team.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(delete_team)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n delete_team.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(delete_team)\n self.statusbar.setObjectName(\"statusbar\")\n delete_team.setStatusBar(self.statusbar)\n\n self.retranslateUi(delete_team)\n QtCore.QMetaObject.connectSlotsByName(delete_team)\n\n def retranslateUi(self, delete_team):\n _translate = QtCore.QCoreApplication.translate\n delete_team.setWindowTitle(_translate(\"delete_team\", \"MainWindow\"))\n \n self.btnDelete.setText(_translate(\"delete_team\", \"DELETE\"))\n self.btnDelete.clicked.connect(self.delete_team)\n \n self.label.setText(_translate(\"delete_team\", \"ENTER TEAM ID\"))\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n delete_team = QtWidgets.QMainWindow()\n ui = Ui_delete_team()\n ui.setupUi(delete_team)\n delete_team.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6134899854660034,
"alphanum_fraction": 0.6367942690849304,
"avg_line_length": 39.290077209472656,
"blob_id": "f178b34e2356613896b1b0ddfb68e096d1320283",
"content_id": "e78efd67d86149c474653dced877b12875313115",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5278,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 131,
"path": "/view_team.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'view_team.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\nclass Ui_view_team(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n \n def view_team(self):\n self.tableWidget.setRowCount(2)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"TEAM ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"TEAM NAME\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"NO OF PLAYERS\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"RANKING\"))\n\n self.team_id = self.inputTeamID.text()\n\n self.sql = \"SELECT * FROM Teams WHERE Teams.Team_ID = %s \"\n\n self.arg = (self.team_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.result = cr.fetchone()\n self.col = 0\n for i in self.result:\n self.tableWidget.setItem(1,self.col,QtWidgets.QTableWidgetItem(str(i)))\n self.col = self.col+1\n finally:\n self.col = 0\n\n def view_teams(self):\n self.tableWidget.setRowCount(1)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"TEAM ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"TEAM NAME\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"NO OF PLAYERS\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"RANKING\"))\n\n self.sql = \"SELECT * FROM Teams \"\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql)\n self.result = cr.fetchall()\n for row_no, row_data in enumerate(self.result):\n self.tableWidget.insertRow(row_no+1)\n for col_no, data in enumerate(row_data):\n self.tableWidget.setItem(row_no+1,col_no,QtWidgets.QTableWidgetItem(str(data)))\n finally:\n pass\n\n\n def setupUi(self, view_team):\n view_team.setObjectName(\"view_team\")\n view_team.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(view_team)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.btnSearch = QtWidgets.QPushButton(self.centralwidget)\n self.btnSearch.setGeometry(QtCore.QRect(600, 20, 89, 25))\n self.btnSearch.setObjectName(\"btnSearch\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(130, 20, 111, 20))\n self.label.setObjectName(\"label\")\n self.inputTeamID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputTeamID.setGeometry(QtCore.QRect(340, 20, 151, 25))\n self.inputTeamID.setObjectName(\"inputTeamID\")\n self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)\n self.tableWidget.setGeometry(QtCore.QRect(55, 70, 691, 421))\n \n self.tableWidget.setRowCount(5000)\n self.tableWidget.setColumnCount(4)\n self.tableWidget.setObjectName(\"tableWidget\")\n #################### Change Column Width ################################\n \n self.tableWidget.setColumnWidth(0,150)\n self.tableWidget.setColumnWidth(1,400)\n self.tableWidget.setColumnWidth(2,150)\n self.tableWidget.setColumnWidth(3,150)\n\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(57, 50, 681, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n self.btnSearchAll = QtWidgets.QPushButton(self.centralwidget)\n self.btnSearchAll.setGeometry(QtCore.QRect(360, 510, 89, 25))\n self.btnSearchAll.setObjectName(\"btnSearchAll\")\n view_team.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(view_team)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n view_team.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(view_team)\n self.statusbar.setObjectName(\"statusbar\")\n view_team.setStatusBar(self.statusbar)\n\n self.retranslateUi(view_team)\n QtCore.QMetaObject.connectSlotsByName(view_team)\n\n def retranslateUi(self, view_team):\n _translate = QtCore.QCoreApplication.translate\n view_team.setWindowTitle(_translate(\"view_team\", \"MainWindow\"))\n \n self.btnSearch.setText(_translate(\"view_team\", \"SEARCH\"))\n self.btnSearch.clicked.connect(self.view_team)\n \n self.label.setText(_translate(\"view_team\", \"ENTER TEAM ID\"))\n \n self.btnSearchAll.setText(_translate(\"view_team\", \"SEARCH ALL\"))\n self.btnSearchAll.clicked.connect(self.view_teams)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n view_team = QtWidgets.QMainWindow()\n ui = Ui_view_team()\n ui.setupUi(view_team)\n view_team.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.5488380193710327,
"alphanum_fraction": 0.5663113594055176,
"avg_line_length": 42.35606002807617,
"blob_id": "265d13cb7dddae1e2338350673376ab6749fbb8b",
"content_id": "29c14699f581b2eaf8256c4dee74547aaec0d155",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5723,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 132,
"path": "/player_position.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'player_position.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\n## playerID, player_name, team_name, Player_Position Cloumns in table\n\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_players_position(object):\n\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def view_player_position(self):\n self.game_id = self.inputGameID.text()\n self.tableWidget.setRowCount(1)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"PLAYER ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"PLAYER NAME\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"TEAM NAME\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"PLAYER POSITION\"))\n\n if(self.game_id != \"\"):\n\n self.sql = \" SELECT TM.P_ID AS P_ID, \"\\\n \" TM.P_N AS P_N, \"\\\n \"Teams.Team_Name as T_N, \"\\\n \"TM.P_P AS P_P \"\\\n \"FROM \"\\\n \"(SELECT T.G_ID AS G_ID, \"\\\n \"T.P_ID AS P_ID,\"\\\n \"T.P_P AS P_P,\"\\\n \" T.T_ID AS T_ID,\"\\\n \"Players.Player_Name as P_N\"\\\n \" FROM \"\\\n \" (SELECT Player_Game.Game_ID as G_ID, \"\\\n \" Player_Game.Player_ID as P_ID,\"\\\n \" Player_Game.Player_Position as P_P,\"\\\n \"Team_Player.Team_ID as T_ID \"\\\n \" FROM Player_Game\"\\\n \" JOIN Team_Player\"\\\n \" ON Team_Player.Player_ID = Player_Game.Player_ID) as T \"\\\n \" JOIN Players \"\\\n \" ON T.P_ID = Players.Player_ID) AS TM \"\\\n \"JOIN Teams \"\\\n \" ON TM.T_ID = Teams.Team_ID \"\\\n \" WHERE TM.G_ID = %s \"\n self.arg = (self.game_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.result = cr.fetchall()\n for row_no, row_data in enumerate(self.result):\n self.tableWidget.insertRow(row_no+1)\n for col_no, data in enumerate(row_data):\n self.tableWidget.setItem(row_no+1,col_no,QtWidgets.QTableWidgetItem(str(data)))\n finally:\n pass\n\n\n\n \n def setupUi(self, players_position):\n players_position.setObjectName(\"players_position\")\n players_position.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(players_position)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(156, 20, 111, 20))\n self.label.setObjectName(\"label\")\n self.btnSearch = QtWidgets.QPushButton(self.centralwidget)\n self.btnSearch.setGeometry(QtCore.QRect(570, 20, 89, 25))\n self.btnSearch.setObjectName(\"btnSearch\")\n self.inputGameID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputGameID.setGeometry(QtCore.QRect(330, 20, 151, 25))\n self.inputGameID.setObjectName(\"inputGameID\")\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(50, 40, 701, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n \n self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)\n self.tableWidget.setGeometry(QtCore.QRect(45, 60, 711, 471))\n self.tableWidget.setRowCount(5000)\n self.tableWidget.setColumnCount(4)\n self.tableWidget.setObjectName(\"tableWidget\")\n ########## set column width ###################\n\n self.tableWidget.setColumnWidth(0,150)\n self.tableWidget.setColumnWidth(1,400)\n self.tableWidget.setColumnWidth(2,400)\n self.tableWidget.setColumnWidth(3,150)\n \n players_position.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(players_position)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n players_position.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(players_position)\n self.statusbar.setObjectName(\"statusbar\")\n players_position.setStatusBar(self.statusbar)\n\n self.retranslateUi(players_position)\n QtCore.QMetaObject.connectSlotsByName(players_position)\n\n def retranslateUi(self, players_position):\n _translate = QtCore.QCoreApplication.translate\n players_position.setWindowTitle(_translate(\"players_position\", \"MainWindow\"))\n self.label.setText(_translate(\"players_position\", \"ENTER GAME ID\"))\n \n self.btnSearch.setText(_translate(\"players_position\", \"SEARCH\"))\n self.btnSearch.clicked.connect(self.view_player_position)\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n players_position = QtWidgets.QMainWindow()\n ui = Ui_players_position()\n ui.setupUi(players_position)\n players_position.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6371803879737854,
"alphanum_fraction": 0.6551485657691956,
"avg_line_length": 35.17499923706055,
"blob_id": "d664987fce375b4da28a3c40b72ed6fb75c4106d",
"content_id": "8f891e38089ffd3dd38e92c3edcc219e4aaba101",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2894,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 80,
"path": "/delete_game.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'delete_game.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_delete_game(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def delete_game(self):\n self.game_id = self.inputGameID.text()\n\n self.sql = \"DELETE FROM Games WHERE Games.Game_ID = %s\"\n self.arg = (self.game_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Game Deleted successfully!\") \n \n self.inputGameID.clear()\n\n def setupUi(self, delete_game):\n delete_game.setObjectName(\"delete_game\")\n delete_game.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(delete_game)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.btnDelete = QtWidgets.QPushButton(self.centralwidget)\n self.btnDelete.setGeometry(QtCore.QRect(330, 360, 89, 25))\n self.btnDelete.setObjectName(\"btnDelete\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(166, 260, 111, 20))\n self.label.setObjectName(\"label\")\n self.inputGameID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputGameID.setGeometry(QtCore.QRect(442, 260, 191, 25))\n self.inputGameID.setObjectName(\"inputGameID\")\n delete_game.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(delete_game)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n delete_game.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(delete_game)\n self.statusbar.setObjectName(\"statusbar\")\n delete_game.setStatusBar(self.statusbar)\n\n self.retranslateUi(delete_game)\n QtCore.QMetaObject.connectSlotsByName(delete_game)\n\n def retranslateUi(self, delete_game):\n _translate = QtCore.QCoreApplication.translate\n delete_game.setWindowTitle(_translate(\"delete_game\", \"MainWindow\"))\n \n self.btnDelete.setText(_translate(\"delete_game\", \"DELETE\"))\n self.btnDelete.clicked.connect(self.delete_game)\n\n self.label.setText(_translate(\"delete_game\", \"ENTER GAME ID\"))\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n delete_game = QtWidgets.QMainWindow()\n ui = Ui_delete_game()\n ui.setupUi(delete_game)\n delete_game.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.5491088628768921,
"alphanum_fraction": 0.5673214793205261,
"avg_line_length": 42.6761360168457,
"blob_id": "2df21cdee0d3d91876a59c8a30425edd6fc25be1",
"content_id": "7430e9c357795bdb119614ab57e0cd2750f2aeb3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7687,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 176,
"path": "/view_players.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'view_players.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_players_details(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def view_player(self):\n self.tableWidget.setRowCount(2)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"PLAYER ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"PLAYER NAME\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"BATING STYLE\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"BOLING STYLE\"))\n self.tableWidget.setItem(0,4, QtWidgets.QTableWidgetItem(\"JERSEY NO\"))\n self.tableWidget.setItem(0,5, QtWidgets.QTableWidgetItem(\"TEAM NAME\"))\n\n ##### taking player id from input section ###############\n self.player_id = self.inputPlayerID.text()\n self.sql = \"SELECT T.P_ID,\"\\\n \"T.P_N ,\"\\\n \"T.BT_S ,\"\\\n \"T.BO_S ,\"\\\n \"T.J_N ,\"\\\n \"Teams.Team_Name \"\\\n \"FROM \"\\\n \"(SELECT Players.Player_ID as P_ID, \"\\\n \"Players.Player_Name as P_N, \"\\\n \"Players.Bating_Style as BT_S, \"\\\n \"Players.Boling_Style as BO_S, \"\\\n \"Players.Player_No as J_N, \"\\\n \"Team_Player.Team_ID as T_ID \"\\\n \"FROM Players \"\\\n \"JOIN Team_Player \"\\\n \"ON Players.Player_ID = Team_Player.Player_ID) as T \"\\\n \"JOIN Teams \"\\\n \"ON T.T_ID = Teams.Team_ID \"\\\n \"WHERE T.P_ID = %s\"\n self.arg = (self.player_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.result = cr.fetchone()\n self.col = 0\n for i in self.result:\n self.tableWidget.setItem(1,self.col,QtWidgets.QTableWidgetItem(str(i)))\n self.col = self.col+1\n finally:\n self.col = 0\n\n\n def view_players(self):\n self.tableWidget.setRowCount(1)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"PLAYER ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"PLAYER NAME\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"BATING STYLE\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"BOLING STYLE\"))\n self.tableWidget.setItem(0,4, QtWidgets.QTableWidgetItem(\"JERSEY NO\"))\n self.tableWidget.setItem(0,5, QtWidgets.QTableWidgetItem(\"TEAM NAME\"))\n\n self.sql = \"SELECT T.P_ID,\"\\\n \"T.P_N ,\"\\\n \"T.BT_S ,\"\\\n \"T.BO_S ,\"\\\n \"T.J_N ,\"\\\n \"Teams.Team_Name \"\\\n \"FROM \"\\\n \"(SELECT Players.Player_ID as P_ID, \"\\\n \"Players.Player_Name as P_N, \"\\\n \"Players.Bating_Style as BT_S, \"\\\n \"Players.Boling_Style as BO_S, \"\\\n \"Players.Player_No as J_N, \"\\\n \"Team_Player.Team_ID as T_ID \"\\\n \"FROM Players \"\\\n \"JOIN Team_Player \"\\\n \"ON Players.Player_ID = Team_Player.Player_ID) as T \"\\\n \"JOIN Teams \"\\\n \"ON T.T_ID = Teams.Team_ID \"\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql)\n self.result = cr.fetchall()\n for row_no, row_data in enumerate(self.result):\n self.tableWidget.insertRow(row_no+1)\n for col_no, data in enumerate(row_data):\n self.tableWidget.setItem(row_no+1, col_no, QtWidgets.QTableWidgetItem(str(data)))\n \n finally:\n pass\n \n\n\n def setupUi(self, players_details):\n players_details.setObjectName(\"players_details\")\n players_details.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(players_details)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(110, 20, 121, 17))\n self.label.setObjectName(\"label\")\n self.inputPlayerID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPlayerID.setGeometry(QtCore.QRect(310, 20, 161, 25))\n self.inputPlayerID.setObjectName(\"inputPlayerID\")\n self.btnSearch = QtWidgets.QPushButton(self.centralwidget)\n self.btnSearch.setGeometry(QtCore.QRect(600, 20, 89, 25))\n self.btnSearch.setObjectName(\"btnSearch\")\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(37, 50, 731, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n \n self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)\n self.tableWidget.setGeometry(QtCore.QRect(35, 70, 731, 421))\n self.tableWidget.setRowCount(5000)\n self.tableWidget.setColumnCount(6)\n self.tableWidget.setObjectName(\"tableWidget\")\n\n ########### Change Column Width #############################\n\n self.tableWidget.setColumnWidth(0,200)\n self.tableWidget.setColumnWidth(1,200)\n self.tableWidget.setColumnWidth(2,200)\n self.tableWidget.setColumnWidth(3,200)\n self.tableWidget.setColumnWidth(4,100)\n self.tableWidget.setColumnWidth(5,400)\n\n \n self.btnSearchAll = QtWidgets.QPushButton(self.centralwidget)\n self.btnSearchAll.setGeometry(QtCore.QRect(330, 510, 141, 25))\n self.btnSearchAll.setObjectName(\"btnSearchAll\")\n players_details.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(players_details)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n players_details.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(players_details)\n self.statusbar.setObjectName(\"statusbar\")\n players_details.setStatusBar(self.statusbar)\n\n self.retranslateUi(players_details)\n QtCore.QMetaObject.connectSlotsByName(players_details)\n\n def retranslateUi(self, players_details):\n _translate = QtCore.QCoreApplication.translate\n players_details.setWindowTitle(_translate(\"players_details\", \"MainWindow\"))\n self.label.setText(_translate(\"players_details\", \"ENTER PLAYER ID\"))\n \n self.btnSearch.setText(_translate(\"players_details\", \"SEARCH\"))\n self.btnSearch.clicked.connect(self.view_player)\n\n self.btnSearchAll.setText(_translate(\"players_details\", \"VIEW ALL PLAYERS\"))\n self.btnSearchAll.clicked.connect(self.view_players)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n players_details = QtWidgets.QMainWindow()\n ui = Ui_players_details()\n ui.setupUi(players_details)\n players_details.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6462838053703308,
"alphanum_fraction": 0.6638513803482056,
"avg_line_length": 35.543209075927734,
"blob_id": "34af9b1886267dfb47aba9d4d36f2ca161b38008",
"content_id": "9af6d1ef2e086a229182d54e907ac90e96bbfc8a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2960,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 81,
"path": "/delete_player.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'delete_player.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_delete_player(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def delete_player(self):\n self.player_id = self.inputPlayerID.text()\n\n self.sql = \"DELETE FROM Players WHERE Player_ID = %s\"\n\n self.arg = (self.player_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Player Deleted successfully!\") \n\n self.inputPlayerID.clear()\n \n def setupUi(self, delete_player):\n delete_player.setObjectName(\"delete_player\")\n delete_player.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(delete_player)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.btnDelete = QtWidgets.QPushButton(self.centralwidget)\n self.btnDelete.setGeometry(QtCore.QRect(310, 350, 89, 25))\n self.btnDelete.setObjectName(\"btnDelete\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(146, 230, 121, 20))\n self.label.setObjectName(\"label\")\n self.inputPlayerID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPlayerID.setGeometry(QtCore.QRect(450, 230, 141, 25))\n self.inputPlayerID.setObjectName(\"inputPlayerID\")\n delete_player.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(delete_player)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n delete_player.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(delete_player)\n self.statusbar.setObjectName(\"statusbar\")\n delete_player.setStatusBar(self.statusbar)\n\n self.retranslateUi(delete_player)\n QtCore.QMetaObject.connectSlotsByName(delete_player)\n\n def retranslateUi(self, delete_player):\n _translate = QtCore.QCoreApplication.translate\n delete_player.setWindowTitle(_translate(\"delete_player\", \"MainWindow\"))\n \n self.btnDelete.setText(_translate(\"delete_player\", \"DELETE\"))\n self.btnDelete.clicked.connect(self.delete_player)\n\n self.label.setText(_translate(\"delete_player\", \"ENTER PLAYER ID\"))\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n delete_player = QtWidgets.QMainWindow()\n ui = Ui_delete_player()\n ui.setupUi(delete_player)\n delete_player.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.5100917220115662,
"alphanum_fraction": 0.5889908075332642,
"avg_line_length": 37.89285659790039,
"blob_id": "b925cab4271c4cb275caf66057f7e868948f750f",
"content_id": "db6395c98b87a8f71b5e0246b6d01fcd1761b042",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 1090,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 28,
"path": "/sql/data.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "/*INSERT INTO Players(Player_Name,Bating_Style,Boling_Style,Player_No)\n\tVALUES (\"Mahendra Singh Dhoni\", \"Right Hander\",\"Right Hander\",7),\n\t\t(\"Virat Kohli\", \"Right Hander\", \"Right Hander\", 18),\n\t\t(\"Rohit Sharma\", \"Right Hander\",\"Right Hander\",45),\n\t\t(\"Shikhar Dhawan\",\"Right Hander\",\"Left Hander\",25),\n\t\t(\"KL Rahul\", \"Left Hander\",\"Left Hander\",1),\n\t\t(\"Dineash Kartik\",\"Right Hander\", \"Left Hander\", 19),\n\t\t(\"Ravindra Jadeja\",\"Left Hander\",\"Left Hander\",8),\n\t\t(\"Hardik Pandya\",\"Left Hander\", \"Left Hander\",33),\n\t\t(\"Kedar Jadhav\", \"Right Hander\",\"Right Hander\",81),\n\t\t(\"Vijay Shankar\",\"Left Hander\",\"Left Hander\",59),\n\t\t(\"Bhuvneshwar Kumar\",\"Right Hander\",\"Right Hander\",15),\n\t\t(\"Jasprit Bumrah\",\"Left Hander\", \"Left Hander\",93),\n\t\t(\"Mohammed Shami\",\"Right Hander\", \"Right Hander\",11),\n\t\t(\"Yuzvendra Chahal\",\"Left Hander\",\"Left Hander\",6),\n\t\t(\"Kuldeep Yadav\", \"Left Hander\", \"Left Hander\",23); \n\n\nINSERT INTO Teams(Team_Name,NoOfPlayers,Team_Rank)\n\tVALUES (\"Indian Cricket Team\", 15, 1);\n\n*/\n\nINSERT INTO Team_Player(Team_ID, Player_ID, Player_Position)\n\tVALUES (1,1,1),(1,2,2),(1,3,3),(1,4,4),\n\t\t(1,5,5),(1,6,6),(1,7,7),(1,8,8),\n\t\t(1,9,9),(1,10,10),(1,11,11),(1,12,12),\n\t\t(1,13,13),(1,14,14),(1,15,15); \n"
},
{
"alpha_fraction": 0.6703878045082092,
"alphanum_fraction": 0.6886016726493835,
"avg_line_length": 34.45833206176758,
"blob_id": "6bd35aa2420f6cf631aa763c3ee60918b0fa5fb9",
"content_id": "3d501ead97f03ef3a36b06a83da90d1edd4fe787",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1702,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 48,
"path": "/about_author.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'about_author.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\n\n\nclass Ui_about_author(object):\n def setupUi(self, about_author):\n about_author.setObjectName(\"about_author\")\n about_author.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(about_author)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(310, 160, 121, 17))\n self.label.setObjectName(\"label\")\n about_author.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(about_author)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n about_author.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(about_author)\n self.statusbar.setObjectName(\"statusbar\")\n about_author.setStatusBar(self.statusbar)\n\n self.retranslateUi(about_author)\n QtCore.QMetaObject.connectSlotsByName(about_author)\n\n def retranslateUi(self, about_author):\n _translate = QtCore.QCoreApplication.translate\n about_author.setWindowTitle(_translate(\"about_author\", \"MainWindow\"))\n self.label.setText(_translate(\"about_author\", \"ABOUT AUTHOR\"))\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n about_author = QtWidgets.QMainWindow()\n ui = Ui_about_author()\n ui.setupUi(about_author)\n about_author.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6976743936538696,
"alphanum_fraction": 0.7441860437393188,
"avg_line_length": 41,
"blob_id": "ec13c04339049a8561203c89f71e1e5ecefe7c88",
"content_id": "a88f7f2fbd999a3c8348ad4da01687663310caed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 43,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 1,
"path": "/sql/delete_player.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "DELETE FROM Players WHERE Player_ID = 44; \n"
},
{
"alpha_fraction": 0.7192205786705017,
"alphanum_fraction": 0.7351638674736023,
"avg_line_length": 25.23255729675293,
"blob_id": "7501977d713fe5d675c63732725e3bf974e3e255",
"content_id": "a3afd6c8069c6b352b3e42468e9317efcc1878c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 1129,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 43,
"path": "/sql/tables.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "CREATE TABLE Teams(\n\tTeam_ID INT PRIMARY KEY AUTO_INCREMENT,\n\tTeam_Name VARCHAR(255) NOT NULL,\n\tNoOfPlayers INT NOT NULL,\n\tTeam_Rank INT NOT NULL\n); \n\nCREATE TABLE Players(\n\tPlayer_ID INT PRIMARY KEY AUTO_INCREMENT,\n\tPlayer_Name VARCHAR(255) NOT NULL,\n\tBating_Style VARCHAR(255) NOT NULL, \n\tBoling_Style VARCHAR(255) NOT NULL,\n\tPlayer_No INT NOT NULL\n); \n\nCREATE TABLE Games(\n\tGame_ID INT PRIMARY KEY AUTO_INCREMENT,\n\tGame_Date DATE,\n\tStadium_Name VARCHAR(255) NOT NULL,\n\tFirst_Team_ID INT NOT NULL,\n\tSecond_Team_ID INT NOT NULL,\n\tGame_Status VARCHAR(300) NOT NULL\n); \n\n/* RELATIONS **************/\n\nCREATE TABLE Team_Player(\n\tTeam_ID INT,\n\tPlayer_ID INT,\n\tPlayer_Position INT NOT NULL,\n\tPRIMARY KEY(Team_ID,Player_ID),\n\tFOREIGN KEY (Team_ID) REFERENCES Teams(Team_ID) ON DELETE CASCADE,\n\tFOREIGN KEY (Player_ID) REFERENCES Players(Player_ID) ON DELETE CASCADE\n); \n\nCREATE TABLE Player_Game(\n\tPlayer_ID INT,\n\tGame_ID INT,\n\tPlayer_Position INT NOT NULL,\n\tPRIMARY KEY(Player_ID,Game_ID),\n\tFOREIGN KEY(Player_ID) REFERENCES Players(Player_ID) ON DELETE CASCADE,\n\tFOREIGN KEY(Game_ID) REFERENCES Games(Game_ID) ON DELETE CASCADE\n); \n"
},
{
"alpha_fraction": 0.6199915409088135,
"alphanum_fraction": 0.6475467681884766,
"avg_line_length": 44.890323638916016,
"blob_id": "467ee6dfde5813b856563fa9e528d381ca10210b",
"content_id": "efa5867d9b5a16e2a6825b8a29de64737a21e7b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7113,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 155,
"path": "/register_game.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'register_game.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom registation_players_for_game import Ui_register_players_for_game\nimport pymysql\n\n\nclass Ui_register_game(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def register_game(self):\n self.game_date = self.inputDate.text()\n self.stadium = self.inputStadiumName.text()\n self.first_team_id = self.inputFirstTeamID.text()\n self.second_team_id = self.inputSecondTeamID.text()\n self.winner = self.inputWinnerTeamName.text()\n\n if(self.game_date != \"\" and self.stadium != \"\" and\n self.first_team_id != \"\" and self.second_team_id != \"\" and self.winner != \"\"):\n\n self.sql = \"INSERT INTO Games(Game_Date,Stadium_Name,First_Team_ID, Second_Team_ID, Game_Status) \"\\\n \"VALUES (%s,%s,%s,%s,%s) \"\n self.arg = (self.game_date,self.stadium, self.first_team_id,self.second_team_id,self.winner)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Game Registered successfully!\")\n\n self.inputDate.clear()\n self.inputFirstTeamID.clear()\n self.inputSecondTeamID.clear()\n self.inputWinnerTeamName.clear()\n self.inputStadiumName.clear()\n\n def register_players_for_game(self):\n self.view_player_window = QtWidgets.QMainWindow()\n self.ui = Ui_register_players_for_game()\n self.ui.setupUi(self.view_player_window)\n self.view_player_window.show()\n\n def setupUi(self, register_game):\n register_game.setObjectName(\"register_game\")\n register_game.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(register_game)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(280, 20, 211, 20))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(270, 40, 221, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n self.label_2 = QtWidgets.QLabel(self.centralwidget)\n self.label_2.setGeometry(QtCore.QRect(160, 80, 141, 17))\n self.label_2.setObjectName(\"label_2\")\n self.label_3 = QtWidgets.QLabel(self.centralwidget)\n self.label_3.setGeometry(QtCore.QRect(160, 140, 111, 17))\n self.label_3.setObjectName(\"label_3\")\n self.label_4 = QtWidgets.QLabel(self.centralwidget)\n self.label_4.setGeometry(QtCore.QRect(160, 200, 111, 17))\n self.label_4.setObjectName(\"label_4\")\n self.label_5 = QtWidgets.QLabel(self.centralwidget)\n self.label_5.setGeometry(QtCore.QRect(160, 260, 131, 17))\n self.label_5.setObjectName(\"label_5\")\n self.label_6 = QtWidgets.QLabel(self.centralwidget)\n self.label_6.setGeometry(QtCore.QRect(160, 330, 161, 17))\n self.label_6.setObjectName(\"label_6\")\n \n self.btnSave = QtWidgets.QPushButton(self.centralwidget)\n self.btnSave.setGeometry(QtCore.QRect(350, 450, 89, 25))\n self.btnSave.setObjectName(\"btnSave\")\n\n ######################################################################\n\n self.btnRegisterPlayerForGame = QtWidgets.QPushButton(self.centralwidget)\n self.btnRegisterPlayerForGame.setGeometry(300,500,200,27)\n self.btnRegisterPlayerForGame.setObjectName(\"btnRegisterPlayerForGame\")\n\n ##########################################################################\n \n \n self.inputDate = QtWidgets.QLineEdit(self.centralwidget)\n self.inputDate.setGeometry(QtCore.QRect(470, 70, 181, 25))\n self.inputDate.setObjectName(\"inputDate\")\n self.inputStadiumName = QtWidgets.QLineEdit(self.centralwidget)\n self.inputStadiumName.setGeometry(QtCore.QRect(470, 130, 181, 25))\n self.inputStadiumName.setObjectName(\"inputStadiumName\")\n self.inputFirstTeamID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputFirstTeamID.setGeometry(QtCore.QRect(470, 190, 181, 25))\n self.inputFirstTeamID.setObjectName(\"inputFirstTeamID\")\n self.inputSecondTeamID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputSecondTeamID.setGeometry(QtCore.QRect(470, 260, 181, 25))\n self.inputSecondTeamID.setObjectName(\"inputSecondTeamID\")\n self.inputWinnerTeamName = QtWidgets.QLineEdit(self.centralwidget)\n self.inputWinnerTeamName.setGeometry(QtCore.QRect(470, 330, 181, 25))\n self.inputWinnerTeamName.setObjectName(\"inputWinnerTeamName\")\n register_game.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(register_game)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n register_game.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(register_game)\n self.statusbar.setObjectName(\"statusbar\")\n register_game.setStatusBar(self.statusbar)\n\n self.retranslateUi(register_game)\n QtCore.QMetaObject.connectSlotsByName(register_game)\n\n def retranslateUi(self, register_game):\n _translate = QtCore.QCoreApplication.translate\n register_game.setWindowTitle(_translate(\"register_game\", \"MainWindow\"))\n self.label.setText(_translate(\"register_game\", \"REGISTER NEW GAME\"))\n self.label_2.setText(_translate(\"register_game\", \"ENTER GAME DATE\"))\n self.label_3.setText(_translate(\"register_game\", \"STADIUM NAME\"))\n self.label_4.setText(_translate(\"register_game\", \"FIRST TEAM ID\"))\n self.label_5.setText(_translate(\"register_game\", \"SECOND TEAM ID\"))\n self.label_6.setText(_translate(\"register_game\", \"WINNER TEAM NAME\"))\n \n \n self.btnSave.setText(_translate(\"register_game\", \"SAVE\"))\n self.btnSave.clicked.connect(self.register_game)\n\n #################################################################\n\n self.btnRegisterPlayerForGame.setText(_translate(\"register_game\", \"Rgister Players For Game\"))\n self.btnRegisterPlayerForGame.clicked.connect(self.register_players_for_game)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n register_game = QtWidgets.QMainWindow()\n ui = Ui_register_game()\n ui.setupUi(register_game)\n register_game.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6276971101760864,
"alphanum_fraction": 0.6537858247756958,
"avg_line_length": 42.94827651977539,
"blob_id": "2e4c843e8ec8ba37da14811698fd574049f71e5f",
"content_id": "d4e435692622b66f7232e76ee4a7a2827cc50c52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5098,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 116,
"path": "/register_team.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'register_team.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\n\nclass Ui_register_team(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def register_team(self):\n self.team_name = self.inputTeamName.text()\n self.no_of_player = self.inputNoOfPlayers.text()\n self.ranking = self.inputTeamRanking.text()\n\n if(self.team_name != \"\" and self.no_of_player != \"\" and self.ranking != \"\"):\n\n self.sql = \"INSERT INTO Teams(Team_Name,NoOfPlayers,Team_Rank) \"\\\n \"VALUES(%s,%s,%s) \"\n self.arg = (self.team_name,self.no_of_player,self.ranking)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Team Registered successfully!\") \n\n self.inputNoOfPlayers.clear()\n self.inputTeamName.clear()\n self.inputTeamRanking.clear() \n else:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"error\")\n self.error_message.showMessage(\"Please provid valid data!\") \n \n def setupUi(self, register_team):\n register_team.setObjectName(\"register_team\")\n register_team.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(register_team)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(280, 20, 211, 17))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(267, 50, 231, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n self.label_2 = QtWidgets.QLabel(self.centralwidget)\n self.label_2.setGeometry(QtCore.QRect(170, 130, 141, 17))\n self.label_2.setObjectName(\"label_2\")\n self.label_3 = QtWidgets.QLabel(self.centralwidget)\n self.label_3.setGeometry(QtCore.QRect(170, 220, 141, 17))\n self.label_3.setObjectName(\"label_3\")\n self.label_4 = QtWidgets.QLabel(self.centralwidget)\n self.label_4.setGeometry(QtCore.QRect(170, 310, 141, 17))\n self.label_4.setObjectName(\"label_4\")\n self.inputTeamName = QtWidgets.QLineEdit(self.centralwidget)\n self.inputTeamName.setGeometry(QtCore.QRect(460, 130, 191, 25))\n self.inputTeamName.setObjectName(\"inputTeamName\")\n self.inputNoOfPlayers = QtWidgets.QLineEdit(self.centralwidget)\n self.inputNoOfPlayers.setGeometry(QtCore.QRect(462, 220, 191, 25))\n self.inputNoOfPlayers.setObjectName(\"inputNoOfPlayers\")\n self.inputTeamRanking = QtWidgets.QLineEdit(self.centralwidget)\n self.inputTeamRanking.setGeometry(QtCore.QRect(460, 310, 191, 25))\n self.inputTeamRanking.setObjectName(\"inputTeamRanking\")\n self.btnSave = QtWidgets.QPushButton(self.centralwidget)\n self.btnSave.setGeometry(QtCore.QRect(360, 440, 89, 25))\n self.btnSave.setObjectName(\"btnSave\")\n register_team.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(register_team)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n register_team.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(register_team)\n self.statusbar.setObjectName(\"statusbar\")\n register_team.setStatusBar(self.statusbar)\n\n self.retranslateUi(register_team)\n QtCore.QMetaObject.connectSlotsByName(register_team)\n\n def retranslateUi(self, register_team):\n _translate = QtCore.QCoreApplication.translate\n register_team.setWindowTitle(_translate(\"register_team\", \"MainWindow\"))\n self.label.setText(_translate(\"register_team\", \"REGISTER NEW TEAM\"))\n self.label_2.setText(_translate(\"register_team\", \"ENTER TEAM NAME\"))\n self.label_3.setText(_translate(\"register_team\", \"NO OF PLAYERS\"))\n self.label_4.setText(_translate(\"register_team\", \"RANKING\"))\n \n self.btnSave.setText(_translate(\"register_team\", \"SAVE\"))\n self.btnSave.clicked.connect(self.register_team)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n register_team = QtWidgets.QMainWindow()\n ui = Ui_register_team()\n ui.setupUi(register_team)\n register_team.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.58003169298172,
"alphanum_fraction": 0.5977081656455994,
"avg_line_length": 43.825138092041016,
"blob_id": "7680a5562fc29459fbf38822ebaef2efc210e0c3",
"content_id": "945fcb633b545fd0da329ea3cf2a3e4f9b4e1825",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8203,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 183,
"path": "/view_game.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'view_game.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\n# gameID, gameDate, StadiumName, FirstTeamName, secondTeamName, winnerTeam\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom player_position import Ui_players_position\nimport pymysql\n\n\nclass Ui_view_game(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n \n def view_game(self):\n self.tableWidget.setRowCount(2)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"GAME ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"GAME DATE\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"STADIUM NAME\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"FIRST TEAM NAME\"))\n self.tableWidget.setItem(0,4, QtWidgets.QTableWidgetItem(\"SECOND TEAM NAME\"))\n self.tableWidget.setItem(0,5, QtWidgets.QTableWidgetItem(\"WINNER TEAM NAME\"))\n\n self.game_id = self.lineEdit.text()\n if(self.game_id != \"\"):\n\n self.sql = \"SELECT T.G_ID, T.G_D, T.S_N, T.FT_N, Teams.Team_Name as ST_N, T.G_S AS WINNER \"\\\n \"FROM \"\\\n \"(SELECT Games.Game_ID as G_ID, \"\\\n \"Games.Game_Date as G_D,\"\\\n \"Games.Stadium_Name as S_N,\"\\\n \"Teams.Team_Name as FT_N, \"\\\n \"Games.Game_Status as G_S, \"\\\n \"Games.Second_Team_ID as ST_ID \"\\\n \"FROM Games \"\\\n \"JOIN Teams \"\\\n \"ON Games.First_Team_ID = Teams.Team_ID) as T \"\\\n \"JOIN Teams \"\\\n \"ON T.ST_ID = Teams.Team_ID \"\\\n \"WHERE T.G_ID = %s \"\n self.arg = (self.game_id)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.result = cr.fetchone()\n self.col = 0\n for i in self.result:\n self.tableWidget.setItem(1,self.col,QtWidgets.QTableWidgetItem(str(i)))\n self.col = self.col+1\n finally:\n self.col = 0\n\n def view_games(self):\n self.tableWidget.setRowCount(1)\n self.tableWidget.setItem(0,0, QtWidgets.QTableWidgetItem(\"GAME ID\"))\n self.tableWidget.setItem(0,1, QtWidgets.QTableWidgetItem(\"GAME DATE\"))\n self.tableWidget.setItem(0,2, QtWidgets.QTableWidgetItem(\"STADIUM NAME\"))\n self.tableWidget.setItem(0,3, QtWidgets.QTableWidgetItem(\"FIRST TEAM NAME\"))\n self.tableWidget.setItem(0,4, QtWidgets.QTableWidgetItem(\"SECOND TEAM NAME\"))\n self.tableWidget.setItem(0,5, QtWidgets.QTableWidgetItem(\"WINNER TEAM NAME\"))\n\n self.sql = \"SELECT T.G_ID, T.G_D, T.S_N, T.FT_N, Teams.Team_Name as ST_N, T.G_S AS WINNER \"\\\n \"FROM \"\\\n \"(SELECT Games.Game_ID as G_ID, \"\\\n \"Games.Game_Date as G_D,\"\\\n \"Games.Stadium_Name as S_N,\"\\\n \"Teams.Team_Name as FT_N, \"\\\n \"Games.Game_Status as G_S, \"\\\n \"Games.Second_Team_ID as ST_ID \"\\\n \"FROM Games \"\\\n \"JOIN Teams \"\\\n \"ON Games.First_Team_ID = Teams.Team_ID) as T \"\\\n \"JOIN Teams \"\\\n \"ON T.ST_ID = Teams.Team_ID \"\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql)\n self.result = cr.fetchall()\n for row_no, row_data in enumerate(self.result):\n self.tableWidget.insertRow(row_no+1)\n for col_no, data in enumerate(row_data):\n self.tableWidget.setItem(row_no+1,col_no,QtWidgets.QTableWidgetItem(str(data)))\n finally:\n pass\n\n def view_palyer_position(self):\n self.player_position_window = QtWidgets.QMainWindow()\n self.ui = Ui_players_position()\n self.ui.setupUi(self.player_position_window)\n self.player_position_window.show()\n\n \n def setupUi(self, view_game):\n view_game.setObjectName(\"view_game\")\n view_game.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(view_game)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)\n self.lineEdit.setGeometry(QtCore.QRect(350, 20, 113, 25))\n self.lineEdit.setObjectName(\"lineEdit\")\n self.pushButton = QtWidgets.QPushButton(self.centralwidget)\n self.pushButton.setGeometry(QtCore.QRect(580, 20, 89, 25))\n self.pushButton.setObjectName(\"pushButton\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(130, 20, 111, 17))\n self.label.setObjectName(\"label\")\n self.line = QtWidgets.QFrame(self.centralwidget)\n self.line.setGeometry(QtCore.QRect(60, 50, 691, 20))\n self.line.setFrameShape(QtWidgets.QFrame.HLine)\n self.line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line.setObjectName(\"line\")\n \n self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)\n self.tableWidget.setGeometry(QtCore.QRect(55, 70, 701, 431))\n self.tableWidget.setRowCount(5000)\n self.tableWidget.setColumnCount(6)\n self.tableWidget.setObjectName(\"tableWidget\")\n \n ######### set column width ###################\n\n self.tableWidget.setColumnWidth(0,150)\n self.tableWidget.setColumnWidth(1,150)\n self.tableWidget.setColumnWidth(2,300)\n self.tableWidget.setColumnWidth(3,400)\n self.tableWidget.setColumnWidth(4,400)\n self.tableWidget.setColumnWidth(5,400)\n \n self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)\n self.pushButton_2.setGeometry(QtCore.QRect(360, 520, 89, 25))\n self.pushButton_2.setObjectName(\"pushButton_2\")\n view_game.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(view_game)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n self.menuFILE = QtWidgets.QMenu(self.menubar)\n self.menuFILE.setObjectName(\"menuFILE\")\n view_game.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(view_game)\n self.statusbar.setObjectName(\"statusbar\")\n view_game.setStatusBar(self.statusbar)\n self.actionVIEW_PLAYER_POSITION_IN_GAME = QtWidgets.QAction(view_game)\n self.actionVIEW_PLAYER_POSITION_IN_GAME.setObjectName(\"actionVIEW_PLAYER_POSITION_IN_GAME\")\n self.menuFILE.addAction(self.actionVIEW_PLAYER_POSITION_IN_GAME)\n self.menubar.addAction(self.menuFILE.menuAction())\n\n self.retranslateUi(view_game)\n QtCore.QMetaObject.connectSlotsByName(view_game)\n\n def retranslateUi(self, view_game):\n _translate = QtCore.QCoreApplication.translate\n view_game.setWindowTitle(_translate(\"view_game\", \"MainWindow\"))\n \n self.pushButton.setText(_translate(\"view_game\", \"SEARCH\"))\n self.pushButton.clicked.connect(self.view_game)\n \n self.label.setText(_translate(\"view_game\", \"ENTER GAME ID\"))\n \n self.pushButton_2.setText(_translate(\"view_game\", \"SEARCH ALL\"))\n self.pushButton_2.clicked.connect(self.view_games)\n\n self.menuFILE.setTitle(_translate(\"view_game\", \"FILE\"))\n \n self.actionVIEW_PLAYER_POSITION_IN_GAME.setText(_translate(\"view_game\", \"VIEW PLAYER POSITION IN GAME\"))\n self.actionVIEW_PLAYER_POSITION_IN_GAME.triggered.connect(self.view_palyer_position)\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n view_game = QtWidgets.QMainWindow()\n ui = Ui_view_game()\n ui.setupUi(view_game)\n view_game.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.47289973497390747,
"alphanum_fraction": 0.47289973497390747,
"avg_line_length": 27.30769157409668,
"blob_id": "13ce73778e0a61226258ffaaa2590d6ae8ecc80a",
"content_id": "82f386d01be5ead2e1f00062729b7bff38b06bdb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 738,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 26,
"path": "/sql/player_position_in_game.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "\nSELECT TM.P_ID AS P_ID,\n TM.P_N AS P_N,\n Teams.Team_Name as T_N,\n TM.P_P AS P_P\n\nFROM\n\n (SELECT T.G_ID AS G_ID,\n T.P_ID AS P_ID,\n T.P_P AS P_P,\n T.T_ID AS T_ID,\n Players.Player_Name as P_N\n FROM\n\n (SELECT Player_Game.Game_ID as G_ID, \n Player_Game.Player_ID as P_ID,\n Player_Game.Player_Position as P_P,\n Team_Player.Team_ID as T_ID\n FROM Player_Game\n JOIN Team_Player\n ON Team_Player.Player_ID = Player_Game.Player_ID) as T \n\n JOIN Players \n ON T.P_ID = Players.Player_ID) AS TM \nJOIN Teams\nON TM.T_ID = Teams.Team_ID; \n"
},
{
"alpha_fraction": 0.507722020149231,
"alphanum_fraction": 0.507722020149231,
"avg_line_length": 24.850000381469727,
"blob_id": "fca1009660e0c3ccbf3062ded06f5403f9cd76d1",
"content_id": "20853cf2cfad6f8e0f4223b03d926be3a5bebfa7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 518,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 20,
"path": "/sql/view_player.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "SELECT T.P_ID,\n T.P_N, \n T.BT_S, \n T.BO_S, \n T.J_N, \n Teams.Team_Name\n\nFROM \n\n (SELECT Players.Player_ID as P_ID,\n Players.Player_Name as P_N, \n Players.Bating_Style as BT_S, \n Players.Boling_Style as BO_S, \n Players.Player_No as J_N, \n Team_Player.Team_ID as T_ID \n FROM Players\n JOIN Team_Player \n ON Players.Player_ID = Team_Player.Player_ID) as T \nJOIN Teams\nON T.T_ID = Teams.Team_ID; \n"
},
{
"alpha_fraction": 0.6326059103012085,
"alphanum_fraction": 0.6613607406616211,
"avg_line_length": 37.56435775756836,
"blob_id": "2513af2176c50d787d506e38714427b6ba898f8a",
"content_id": "772e71d26b5822bb73953b93447bfa44bece1d59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3895,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 101,
"path": "/login.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'login.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom welcome import Ui_welcome\n\n\nclass Ui_MainWindow(object):\n def __init__(self):\n super().__init__()\n\n ######## Login Button ##################\n\n def cricket_window(self):\n self.welcome_window = QtWidgets.QMainWindow()\n self.ui = Ui_welcome()\n self.ui.setupUi(self.welcome_window)\n MainWindow.hide()\n self.welcome_window.show()\n \n ######### Cancel Button ###################\n\n def close_window(self):\n sys.exit()\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n MainWindow.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(140, 50, 511, 81))\n font = QtGui.QFont()\n font.setPointSize(32)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n self.label_2 = QtWidgets.QLabel(self.centralwidget)\n self.label_2.setGeometry(QtCore.QRect(140, 200, 121, 21))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label_2.setFont(font)\n self.label_2.setObjectName(\"label_2\")\n self.label_3 = QtWidgets.QLabel(self.centralwidget)\n self.label_3.setGeometry(QtCore.QRect(140, 310, 121, 21))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label_3.setFont(font)\n self.label_3.setObjectName(\"label_3\")\n self.inputUserName = QtWidgets.QLineEdit(self.centralwidget)\n self.inputUserName.setGeometry(QtCore.QRect(420, 200, 221, 25))\n self.inputUserName.setObjectName(\"inputUserName\")\n self.inputPassword = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPassword.setGeometry(QtCore.QRect(420, 310, 221, 25))\n self.inputPassword.setObjectName(\"inputPassword\")\n self.btnLogin = QtWidgets.QPushButton(self.centralwidget)\n self.btnLogin.setGeometry(QtCore.QRect(470, 450, 89, 25))\n self.btnLogin.setObjectName(\"btnLogin\")\n self.btnCancel = QtWidgets.QPushButton(self.centralwidget)\n self.btnCancel.setGeometry(QtCore.QRect(200, 450, 89, 25))\n self.btnCancel.setObjectName(\"btnCancel\")\n MainWindow.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(MainWindow)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n MainWindow.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(MainWindow)\n self.statusbar.setObjectName(\"statusbar\")\n MainWindow.setStatusBar(self.statusbar)\n\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"MainWindow\"))\n self.label.setText(_translate(\"MainWindow\", \"NIT-H CRICKET DATABASE\"))\n self.label_2.setText(_translate(\"MainWindow\", \"USER NAME\"))\n self.label_3.setText(_translate(\"MainWindow\", \"PASSWORD\"))\n \n self.btnLogin.setText(_translate(\"MainWindow\", \"LOGIN\"))\n self.btnLogin.clicked.connect(self.cricket_window)\n\n self.btnCancel.setText(_translate(\"MainWindow\", \"CANCEL\"))\n self.btnCancel.clicked.connect(self.close_window)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n MainWindow = QtWidgets.QMainWindow()\n ui = Ui_MainWindow()\n ui.setupUi(MainWindow)\n MainWindow.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.6292393803596497,
"alphanum_fraction": 0.6434752345085144,
"avg_line_length": 43.64174270629883,
"blob_id": "175ed288fa3025f53b3ad79a4835c739e1f56246",
"content_id": "72a1997769dd0f5a3aa9c0cfea00b1facf7f7250",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14330,
"license_type": "no_license",
"max_line_length": 140,
"num_lines": 321,
"path": "/welcome.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'welcome.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom view_players import Ui_players_details\nfrom delete_player import Ui_delete_player\nfrom register_team import Ui_register_team\nfrom view_team import Ui_view_team\nfrom delete_team import Ui_delete_team\nfrom register_game import Ui_register_game\nfrom view_game import Ui_view_game\nfrom delete_game import Ui_delete_game\nfrom about_author import Ui_about_author\n\nimport pymysql\n\n\nclass Ui_welcome(object):\n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n\n\n ############## Players Section ################################\n\n def register_player(self):\n self.player_name = self.inputPlayerName.text()\n self.batting_style = self.inputBatingStyle.text()\n self.boling_style = self.inputBolingStyle.text()\n self.jersey_no = self.inputJerseyNumber.text()\n self.team_id = self.inputTeamID.text()\n self.player_id = []\n\n if(self.player_name != \"\" and self.batting_style != \"\" and self.boling_style != \"\" and self.jersey_no != \"\" and self.team_id != \"\"):\n #print(\"first run if\")\n self.sql1 = \"INSERT INTO Players(Player_Name,Bating_Style,Boling_Style,Player_No) \"\\\n \"VALUES(%s,%s,%s,%s)\"\n \n self.arg1 = (self.player_name,self.batting_style,self.boling_style,self.jersey_no)\n\n #### query for getting current player id ##################\n self.sql3 = \"SELECT Player_ID FROM Players ORDER BY Player_ID DESC LIMIT 1\" \n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql1,self.arg1)\n # self.db.commit()\n cr.execute(self.sql3)\n self.player_id = list(cr.fetchone())\n # print(self.player_id[0])\n #with self.db.cursor() as cr:\n self.sql = \"INSERT INTO Team_Player(Team_ID,Player_ID,Player_Position) VALUES(%s,%s,%s)\"\n self.arg = (self.team_id,self.player_id[0],self.player_id[0])\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n # print(\"Finally\")\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Player Registered successfully!\") \n \n # else:\n # print(\"then run else\")\n # self.error_message = QtWidgets.QErrorMessage()\n # self.error_message.setWindowTitle(\"Registation Error\")\n # self.error_message.showMessage(\"Please Enter Valid Data !\") \n\n self.inputBatingStyle.clear()\n self.inputBolingStyle.clear()\n self.inputPlayerName.clear()\n self.inputJerseyNumber.clear()\n self.inputTeamID.clear() \n\n\n\n \n \n def view_player(self):\n self.view_player_window = QtWidgets.QMainWindow()\n self.ui = Ui_players_details()\n self.ui.setupUi(self.view_player_window)\n self.view_player_window.show()\n\n def delete_player(self):\n self.delete_player_window = QtWidgets.QMainWindow()\n self.ui = Ui_delete_player()\n self.ui.setupUi(self.delete_player_window)\n self.delete_player_window.show()\n\n ############# Team Section #######################################\n\n def register_team(self):\n self.register_team_window = QtWidgets.QMainWindow()\n self.ui = Ui_register_team()\n self.ui.setupUi(self.register_team_window)\n self.register_team_window.show()\n\n def view_team(self):\n self.view_team_window = QtWidgets.QMainWindow()\n self.ui = Ui_view_team()\n self.ui.setupUi(self.view_team_window)\n self.view_team_window.show()\n \n def delete_team(self):\n self.delete_team_window = QtWidgets.QMainWindow()\n self.ui = Ui_delete_team()\n self.ui.setupUi(self.delete_team_window)\n self.delete_team_window.show()\n\n ############ Game Section #########################################\n\n def register_game(self):\n self.register_game_window = QtWidgets.QMainWindow()\n self.ui = Ui_register_game()\n self.ui.setupUi(self.register_game_window)\n self.register_game_window.show()\n\n def view_game(self):\n self.view_game_window = QtWidgets.QMainWindow()\n self.ui = Ui_view_game()\n self.ui.setupUi(self.view_game_window)\n self.view_game_window.show()\n\n def delete_game(self):\n self.delete_game_window = QtWidgets.QMainWindow()\n self.ui = Ui_delete_game()\n self.ui.setupUi(self.delete_game_window)\n self.delete_game_window.show()\n ########### Author Section ########################################\n\n def about_author(self):\n self.about_author_window = QtWidgets.QMainWindow()\n self.ui = Ui_about_author()\n self.ui.setupUi(self.about_author_window)\n self.about_author_window.show()\n\n def setupUi(self, welcome):\n welcome.setObjectName(\"welcome\")\n welcome.resize(1003, 620)\n welcome.setStyleSheet(\"background-image: url(:/newPrefix/cricket.jpeg);\\n\"\n\"background-image: url(:/newPrefix/cricket.jpeg);\")\n self.centralwidget = QtWidgets.QWidget(welcome)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(330, 0, 351, 21))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n self.label_2 = QtWidgets.QLabel(self.centralwidget)\n self.label_2.setGeometry(QtCore.QRect(390, 40, 231, 20))\n font = QtGui.QFont()\n font.setPointSize(16)\n self.label_2.setFont(font)\n self.label_2.setObjectName(\"label_2\")\n self.label_3 = QtWidgets.QLabel(self.centralwidget)\n self.label_3.setGeometry(QtCore.QRect(240, 110, 101, 21))\n self.label_3.setObjectName(\"label_3\")\n self.label_4 = QtWidgets.QLabel(self.centralwidget)\n self.label_4.setGeometry(QtCore.QRect(240, 190, 101, 17))\n self.label_4.setObjectName(\"label_4\")\n self.label_5 = QtWidgets.QLabel(self.centralwidget)\n self.label_5.setGeometry(QtCore.QRect(240, 270, 101, 17))\n self.label_5.setObjectName(\"label_5\")\n self.label_6 = QtWidgets.QLabel(self.centralwidget)\n self.label_6.setGeometry(QtCore.QRect(240, 360, 81, 17))\n self.label_6.setObjectName(\"label_6\")\n self.inputPlayerName = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPlayerName.setGeometry(QtCore.QRect(570, 110, 201, 25))\n self.inputPlayerName.setObjectName(\"inputPlayerName\")\n self.inputBatingStyle = QtWidgets.QLineEdit(self.centralwidget)\n self.inputBatingStyle.setGeometry(QtCore.QRect(570, 190, 201, 25))\n self.inputBatingStyle.setObjectName(\"inputBatingStyle\")\n self.inputBolingStyle = QtWidgets.QLineEdit(self.centralwidget)\n self.inputBolingStyle.setGeometry(QtCore.QRect(570, 270, 201, 25))\n self.inputBolingStyle.setObjectName(\"inputBolingStyle\")\n self.inputJerseyNumber = QtWidgets.QLineEdit(self.centralwidget)\n self.inputJerseyNumber.setGeometry(QtCore.QRect(570, 360, 201, 25))\n self.inputJerseyNumber.setObjectName(\"inputJerseyNumber\")\n self.label_7 = QtWidgets.QLabel(self.centralwidget)\n self.label_7.setGeometry(QtCore.QRect(240, 440, 61, 17))\n self.label_7.setObjectName(\"label_7\")\n self.inputTeamID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputTeamID.setGeometry(QtCore.QRect(570, 440, 201, 25))\n self.inputTeamID.setObjectName(\"inputTeamID\")\n \n self.btnSave = QtWidgets.QPushButton(self.centralwidget)\n self.btnSave.setGeometry(QtCore.QRect(430, 520, 89, 25))\n \n self.btnSave.setObjectName(\"btnSave\")\n self.btnSave.clicked.connect(self.register_player)\n\n welcome.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(welcome)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 1003, 22))\n self.menubar.setObjectName(\"menubar\")\n self.menuPlayers = QtWidgets.QMenu(self.menubar)\n self.menuPlayers.setObjectName(\"menuPlayers\")\n self.menuTeams = QtWidgets.QMenu(self.menubar)\n self.menuTeams.setObjectName(\"menuTeams\")\n self.menuGames = QtWidgets.QMenu(self.menubar)\n self.menuGames.setObjectName(\"menuGames\")\n self.menuAbout_Us = QtWidgets.QMenu(self.menubar)\n self.menuAbout_Us.setObjectName(\"menuAbout_Us\")\n welcome.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(welcome)\n self.statusbar.setObjectName(\"statusbar\")\n welcome.setStatusBar(self.statusbar)\n self.actionRegister_Player = QtWidgets.QAction(welcome)\n self.actionRegister_Player.setObjectName(\"actionRegister_Player\")\n self.actionView_Players = QtWidgets.QAction(welcome)\n self.actionView_Players.setObjectName(\"actionView_Players\")\n self.actionDelete_Player = QtWidgets.QAction(welcome)\n self.actionDelete_Player.setObjectName(\"actionDelete_Player\")\n self.actionRegister_Team = QtWidgets.QAction(welcome)\n self.actionRegister_Team.setObjectName(\"actionRegister_Team\")\n self.actionView_Teams = QtWidgets.QAction(welcome)\n self.actionView_Teams.setObjectName(\"actionView_Teams\")\n self.actionDelete_Team = QtWidgets.QAction(welcome)\n self.actionDelete_Team.setObjectName(\"actionDelete_Team\")\n self.actionRegister_Game = QtWidgets.QAction(welcome)\n self.actionRegister_Game.setObjectName(\"actionRegister_Game\")\n self.actionView_Games = QtWidgets.QAction(welcome)\n self.actionView_Games.setObjectName(\"actionView_Games\")\n self.actionDelete_Game = QtWidgets.QAction(welcome)\n self.actionDelete_Game.setObjectName(\"actionDelete_Game\")\n self.actionAuthor = QtWidgets.QAction(welcome)\n self.actionAuthor.setObjectName(\"actionAuthor\")\n self.menuPlayers.addAction(self.actionRegister_Player)\n self.menuPlayers.addAction(self.actionView_Players)\n self.menuPlayers.addAction(self.actionDelete_Player)\n self.menuTeams.addAction(self.actionRegister_Team)\n self.menuTeams.addAction(self.actionView_Teams)\n self.menuTeams.addAction(self.actionDelete_Team)\n self.menuGames.addAction(self.actionRegister_Game)\n self.menuGames.addAction(self.actionView_Games)\n self.menuGames.addAction(self.actionDelete_Game)\n self.menuAbout_Us.addAction(self.actionAuthor)\n self.menubar.addAction(self.menuPlayers.menuAction())\n self.menubar.addAction(self.menuTeams.menuAction())\n self.menubar.addAction(self.menuGames.menuAction())\n self.menubar.addAction(self.menuAbout_Us.menuAction())\n\n self.retranslateUi(welcome)\n QtCore.QMetaObject.connectSlotsByName(welcome)\n\n def retranslateUi(self, welcome):\n _translate = QtCore.QCoreApplication.translate\n welcome.setWindowTitle(_translate(\"welcome\", \"MainWindow\"))\n self.label.setText(_translate(\"welcome\", \"WELCOME NITH CRICKET DATABASE\"))\n self.label_2.setText(_translate(\"welcome\", \"REGISTER NEW PLAYER\"))\n self.label_3.setText(_translate(\"welcome\", \"PLAYER NAME\"))\n self.label_4.setText(_translate(\"welcome\", \"BATING STYLE\"))\n self.label_5.setText(_translate(\"welcome\", \"BOLING STYLE\"))\n self.label_6.setText(_translate(\"welcome\", \"JERSEY NO\"))\n self.label_7.setText(_translate(\"welcome\", \"TEAM ID\"))\n \n self.btnSave.setText(_translate(\"welcome\", \"SAVE\"))\n self.btnSave.clicked.connect(self.register_player)\n\n self.menuPlayers.setTitle(_translate(\"welcome\", \"Players\"))\n self.menuTeams.setTitle(_translate(\"welcome\", \"Teams\"))\n self.menuGames.setTitle(_translate(\"welcome\", \"Games\"))\n self.menuAbout_Us.setTitle(_translate(\"welcome\", \"About Us\"))\n\n #################### Players Sections ################################\n\n self.actionRegister_Player.setText(_translate(\"welcome\", \"Register Player\"))\n \n\n self.actionView_Players.setText(_translate(\"welcome\", \"View Players\"))\n self.actionView_Players.triggered.connect(self.view_player)\n\n self.actionDelete_Player.setText(_translate(\"welcome\", \"Delete Player\"))\n self.actionDelete_Player.triggered.connect(self.delete_player)\n\n ################### Team Section ########################################\n\n self.actionRegister_Team.setText(_translate(\"welcome\", \"Register Team\"))\n self.actionRegister_Team.triggered.connect(self.register_team)\n\n self.actionView_Teams.setText(_translate(\"welcome\", \"View Teams\"))\n self.actionView_Teams.triggered.connect(self.view_team)\n\n self.actionDelete_Team.setText(_translate(\"welcome\", \"Delete Team\"))\n self.actionDelete_Team.triggered.connect(self.delete_team)\n\n ################ Game Section #######################################\n\n self.actionRegister_Game.setText(_translate(\"welcome\", \"Register Game\"))\n self.actionRegister_Game.triggered.connect(self.register_game)\n\n self.actionView_Games.setText(_translate(\"welcome\", \"View Games\"))\n self.actionView_Games.triggered.connect(self.view_game)\n\n self.actionDelete_Game.setText(_translate(\"welcome\", \"Delete Game\"))\n self.actionDelete_Game.triggered.connect(self.delete_game)\n\n ################# Author Section ########################################\n\n self.actionAuthor.setText(_translate(\"welcome\", \"Author\"))\n self.actionAuthor.triggered.connect(self.about_author)\n\n\nfrom Resources import source\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n welcome = QtWidgets.QMainWindow()\n ui = Ui_welcome()\n ui.setupUi(welcome)\n welcome.show()\n sys.exit(app.exec_())\n"
},
{
"alpha_fraction": 0.5278970003128052,
"alphanum_fraction": 0.5278970003128052,
"avg_line_length": 24.83333396911621,
"blob_id": "14538e5094c210611c0d284e5e3c5570e2df5ee2",
"content_id": "2ec4ccca0161aa26f7839193c5b2c5e3194ed54c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 466,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 18,
"path": "/sql/view_game.sql",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "SELECT T.G_ID, \n T.G_D,\n T.S_N,\n T.FT_N,\n Teams.Team_Name as ST_N,\n T.G_S AS WIN\nFROM\n (SELECT Games.Game_ID as G_ID,\n Games.Game_Date as G_D,\n Games.Stadium_Name as S_N,\n Teams.Team_Name as FT_N,\n Games.Game_Status as G_S,\n Games.Second_Team_ID as ST_ID\n FROM Games\n JOIN Teams\n ON Games.First_Team_ID = Teams.Team_ID) as T \nJOIN Teams\nON T.ST_ID = Teams.Team_ID; \n"
},
{
"alpha_fraction": 0.6434188485145569,
"alphanum_fraction": 0.6678394079208374,
"avg_line_length": 45.46154022216797,
"blob_id": "f97c8c0d0aaf4b803e987664f306804b00ade370",
"content_id": "3268e3875d4c202e1d33383a16e9e5c34baa8c3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4832,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 104,
"path": "/registation_players_for_game.py",
"repo_name": "SUTHARRAM/Cricket-Database-System",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'registation_players_for_game.ui'\n#\n# Created by: PyQt5 UI code generator 5.12.1\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nimport pymysql\n\nclass Ui_register_players_for_game(object):\n \n def __init__(self):\n super().__init__()\n self.db = pymysql.connect(\"localhost\", \"ram\", \"password\", \"CRICKET_DB\")\n\n def register_player(self):\n self.game_id = self.inputGameID.text()\n self.player_id = self.inputPlayerID.text()\n self.position = self.inputPlayerPosition.text()\n\n if(self.game_id != \"\" and self.player_id != \"\" and self.position != \"\"):\n\n self.sql = \"INSERT INTO Player_Game(Player_ID, Game_ID, Player_Position) \"\\\n \" VALUES (%s,%s,%s) \"\n self.arg = (self.player_id, self.game_id, self.position)\n\n try:\n with self.db.cursor() as cr:\n cr.execute(self.sql,self.arg)\n self.db.commit()\n finally:\n self.error_message = QtWidgets.QErrorMessage()\n self.error_message.setWindowTitle(\"success\")\n self.error_message.showMessage(\"Player Registered successfully!\") \n\n self.inputGameID.clear()\n self.inputPlayerID.clear()\n self.inputPlayerPosition.clear()\n\n def setupUi(self, register_players_for_game):\n register_players_for_game.setObjectName(\"register_players_for_game\")\n register_players_for_game.resize(800, 600)\n self.centralwidget = QtWidgets.QWidget(register_players_for_game)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(290, 20, 201, 20))\n self.label.setObjectName(\"label\")\n self.label_2 = QtWidgets.QLabel(self.centralwidget)\n self.label_2.setGeometry(QtCore.QRect(156, 100, 61, 20))\n self.label_2.setObjectName(\"label_2\")\n self.label_3 = QtWidgets.QLabel(self.centralwidget)\n self.label_3.setGeometry(QtCore.QRect(160, 200, 71, 17))\n self.label_3.setObjectName(\"label_3\")\n self.label_4 = QtWidgets.QLabel(self.centralwidget)\n self.label_4.setGeometry(QtCore.QRect(160, 290, 191, 17))\n self.label_4.setObjectName(\"label_4\")\n self.btnSave = QtWidgets.QPushButton(self.centralwidget)\n self.btnSave.setGeometry(QtCore.QRect(350, 420, 89, 25))\n self.btnSave.setObjectName(\"btnSave\")\n self.inputGameID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputGameID.setGeometry(QtCore.QRect(462, 90, 171, 25))\n self.inputGameID.setObjectName(\"inputGameID\")\n self.inputPlayerID = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPlayerID.setGeometry(QtCore.QRect(460, 190, 171, 25))\n self.inputPlayerID.setObjectName(\"inputPlayerID\")\n self.inputPlayerPosition = QtWidgets.QLineEdit(self.centralwidget)\n self.inputPlayerPosition.setGeometry(QtCore.QRect(460, 290, 171, 25))\n self.inputPlayerPosition.setObjectName(\"inputPlayerPosition\")\n register_players_for_game.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(register_players_for_game)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 22))\n self.menubar.setObjectName(\"menubar\")\n register_players_for_game.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(register_players_for_game)\n self.statusbar.setObjectName(\"statusbar\")\n register_players_for_game.setStatusBar(self.statusbar)\n\n self.retranslateUi(register_players_for_game)\n QtCore.QMetaObject.connectSlotsByName(register_players_for_game)\n\n def retranslateUi(self, register_players_for_game):\n _translate = QtCore.QCoreApplication.translate\n register_players_for_game.setWindowTitle(_translate(\"register_players_for_game\", \"MainWindow\"))\n self.label.setText(_translate(\"register_players_for_game\", \"REGISTER PLAYER FOR GAME\"))\n self.label_2.setText(_translate(\"register_players_for_game\", \"GAME ID\"))\n self.label_3.setText(_translate(\"register_players_for_game\", \"PLAYER ID\"))\n self.label_4.setText(_translate(\"register_players_for_game\", \"PLAYER POSITION IN GAME\"))\n \n self.btnSave.setText(_translate(\"register_players_for_game\", \"SAVE\"))\n self.btnSave.clicked.connect(self.register_player)\n\n\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n register_players_for_game = QtWidgets.QMainWindow()\n ui = Ui_register_players_for_game()\n ui.setupUi(register_players_for_game)\n register_players_for_game.show()\n sys.exit(app.exec_())\n"
}
] | 19 |
subminu/forlikelion | https://github.com/subminu/forlikelion | 2f98f335420b1f0f02ac7aa128575360a65b1b7e | 954953bde7bf16d14fa6c7635118aabf85285eee | 1d5ce8e7a6fc3e9551f04ee5713f81c9e9e4ee4c | refs/heads/master | 2020-05-18T01:08:10.989869 | 2019-05-13T14:05:34 | 2019-05-13T14:05:34 | 182,933,739 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.512635350227356,
"alphanum_fraction": 0.5595667958259583,
"avg_line_length": 9.692307472229004,
"blob_id": "74135de7745a328dd11de90c65d7311d3d1fc492",
"content_id": "f4465b535010c3b1c0f59f721d922efe068923a3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 277,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 26,
"path": "/week5/README.md",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "This is my git test\n===\nThis is subtitle\n---\n\n# This is H1\n## This is H2\n### This is H3\n#### This is H4\n##### This is H5\n###### This is H6\n####### This is H7\n\n'''\nThis is test.py\nIt print \"hello world\"\n'''\n1. text1\n2. text2\n3. text3\n\n<code>\n'''\nprint(\"hello world\")\n'''\n</code>"
},
{
"alpha_fraction": 0.608562707901001,
"alphanum_fraction": 0.6116207838058472,
"avg_line_length": 30.190475463867188,
"blob_id": "16ff039230f315cec99bd1397aee22e0921b51cd",
"content_id": "90f419a160bc3f45f5ac248c5cfaeed52e9eb6f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 654,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 21,
"path": "/week3/week3app/views.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\n\n# Create your views here.\ndef home(request):\n return render(request, 'home.html')\ndef about(request):\n return render(request, 'about.html')\ndef count(request):\n full_text = request.GET['fulltext']\n word_list = full_text.split()\n def word_dictionary(list):\n result = {}\n for word in list:\n if not word in result:\n result[word] = 1\n else:\n result[word] += 1\n return result\n word_count = word_dictionary(word_list)\n\n return render(request, 'count.html',{'fulltext':full_text, 'total': len(word_list), 'countWord': word_count})"
},
{
"alpha_fraction": 0.5270270109176636,
"alphanum_fraction": 0.5270270109176636,
"avg_line_length": 24,
"blob_id": "4e10bc318d8a670b1d9f31231cf7f9a859b609bc",
"content_id": "971e388423968bcc9d97734367ceab82bbd8ed15",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 74,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 3,
"path": "/week5/multifly.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "def multifly(a,b):\n print(\"====multifly====\")\n print(\"result :\",a*b)"
},
{
"alpha_fraction": 0.774193525314331,
"alphanum_fraction": 0.774193525314331,
"avg_line_length": 20,
"blob_id": "c7964b4bde4c8645676614418f4f96aca97caa8a",
"content_id": "b9d4ab8de6d729ef605657e5adf15eaaaf4fd764",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 62,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 3,
"path": "/week5/git_test_week5/test.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "print(\"hello world\")\n# it print hello world\n# djlfajfldsjflksj"
},
{
"alpha_fraction": 0.7362637519836426,
"alphanum_fraction": 0.7582417726516724,
"avg_line_length": 17.200000762939453,
"blob_id": "6b409be1314a25a844ec7096edfb5692ce958bc3",
"content_id": "4fad5c1733ca09492baaa5bcea43afc1499f5e6f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 91,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 5,
"path": "/week3/week3app/apps.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass Week3AppConfig(AppConfig):\n name = 'week3app'\n"
},
{
"alpha_fraction": 0.5346534848213196,
"alphanum_fraction": 0.5346534848213196,
"avg_line_length": 24.5,
"blob_id": "0a9f895f46848a144657ae16263c74a90ced231b",
"content_id": "0bb34522e3e4c17f13f9aef1fa15fb467c9754e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 101,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 4,
"path": "/week5/plus.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "def plus(a,b):\n print(\"====plus====\")\n print(\"result :\",a+b)\n print(\"edited on plus branch\")"
},
{
"alpha_fraction": 0.49295774102211,
"alphanum_fraction": 0.49295774102211,
"avg_line_length": 23,
"blob_id": "77064755af93c1645af31ac5f8bceec374a4d930",
"content_id": "956e6ac3a7a39623f3c981075982099214308872",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 71,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 3,
"path": "/week5/divide.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "def divide(a,b):\n print(\"====divide====\")\n print(\"result :\", a/b)"
},
{
"alpha_fraction": 0.5480769276618958,
"alphanum_fraction": 0.5480769276618958,
"avg_line_length": 25.25,
"blob_id": "775c40d005e402b99101b3fe55f40cc84c91e2b3",
"content_id": "4810b5a6af9d37ff1c4049838ca3ec6a82c6c0a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 104,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 4,
"path": "/week5/minus.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "def minus(a,b):\n print(\"====minus====\")\n print(\"result :\",a-b)\n print(\"edited on minus branch\")"
},
{
"alpha_fraction": 0.7362637519836426,
"alphanum_fraction": 0.7582417726516724,
"avg_line_length": 17.200000762939453,
"blob_id": "814d4221f169398bc78c2b9ca94a991125ace680",
"content_id": "35eee7ccbba8b0239cbed6d1c9b256e8f5369553",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 91,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 5,
"path": "/week4/week4app/apps.py",
"repo_name": "subminu/forlikelion",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass Week4AppConfig(AppConfig):\n name = 'week4app'\n"
}
] | 9 |
chicm/machine-learning | https://github.com/chicm/machine-learning | bc8e50ebf6e69121ac2018e281f9a2b795d21a73 | 1c12dd9c8abf0737885aac03621909494529f442 | c8bfb04ca8cf0e4463f8f76214bc23e82278692a | refs/heads/master | 2020-07-07T05:39:18.484773 | 2016-09-03T06:18:39 | 2016-09-03T06:18:39 | 67,273,344 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.728654146194458,
"alphanum_fraction": 0.7474674582481384,
"avg_line_length": 32.73170852661133,
"blob_id": "fd603f5469a9c7000dd63e0778ea2eb9d7d1a09d",
"content_id": "97278f719a7c31f16f078c1fcab94f884da0899d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1382,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 41,
"path": "/sonar/sonar.py",
"repo_name": "chicm/machine-learning",
"src_encoding": "UTF-8",
"text": "import numpy\nimport pandas\nfrom datetime import datetime\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.wrappers.scikit_learn import KerasClassifier\nfrom sklearn.cross_validation import cross_val_score\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.cross_validation import StratifiedKFold\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.pipeline import Pipeline\n\nstart_time = datetime.now()\nprint(str(datetime.now()))\nseed = 7\nnumpy.random.seed(seed)\ndf = pandas.read_csv(\"sonar.all-data.txt\", header=None)\n#df.describe()\nds = df.values\nX = ds[:,0:60].astype(float)\nY = ds[:,60]\nencoder = LabelEncoder()\nencoder.fit(Y)\nencoded_Y = encoder.transform(Y)\n#df\n\ndef create_baseline():\n model = Sequential()\n model.add(Dense(60, input_dim=60, init='normal',activation='relu'))\n model.add(Dense(1, init='normal', activation='sigmoid'))\n model.compile(loss='binary_crossentropy', optimizer = 'adam', metrics=['accuracy'])\n return model\n\n\nestimator = KerasClassifier(build_fn=create_baseline, nb_epoch=100, batch_size=5, verbose=0 )\nkfold = StratifiedKFold(y=encoded_Y, n_folds=10, shuffle=True, random_state=seed)\nresults = cross_val_score(estimator, X, encoded_Y, cv=kfold)\nprint(\"Result: %.2f%% (%.2f%%)\" % (results.mean()*100, results.std()*100))\n\nprint(str(datetime.now()))\nprint(str(datetime.now() - start_time))"
}
] | 1 |
yanpeipan/scrapy | https://github.com/yanpeipan/scrapy | 9a7e657a9137dba820d3c019343f40b80627c2ef | a2a350eeed15da32f0d5848ad61c3ddbc55b2f62 | deb0e1b1a38a253fea6bba2391244994560ba0b7 | refs/heads/master | 2021-01-22T10:01:43.069852 | 2017-01-27T14:13:14 | 2017-01-27T14:13:14 | 18,146,732 | 4 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6720554232597351,
"alphanum_fraction": 0.6743649244308472,
"avg_line_length": 23.05555534362793,
"blob_id": "4fb5c9a09c997d4549954ebb174b6a5f4f62347a",
"content_id": "91c321f00c3f3f5604ffe68564760fc4b64cb64b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 433,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 18,
"path": "/Scrapy/spiders/cntv.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport scrapy\n\n\nclass CntvSpider(scrapy.Spider):\n\tname = \"cntv\"\n\tallowed_domains = [\"http://tv.cntv.cn\"]\n\tvideoset_search='http://tv.cntv.cn/videoset/search'\n\t#init\n\tdef __init__(self, *args, **kwargs):\n\t\tpass\n\t#start request\n\tdef start_requests(self):\n\t\treturn [scrapy.http.Request(url=getattr(self, 'videoset_search'))]\n\t#parse code\n\tdef parse(self, response):\n\t\tprint response.xpath('//dd[@code]')\n\t\tpass\n"
},
{
"alpha_fraction": 0.6943521499633789,
"alphanum_fraction": 0.7034883499145508,
"avg_line_length": 31.54054069519043,
"blob_id": "c954409862d7ab2e5e21ddc064fcd5132ceb6ca8",
"content_id": "1f9a3a6b342074e51308bf1d634c41a17e128b2b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1204,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 37,
"path": "/Scrapy/spiders/tudou.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "#coding=utf-8\nfrom scrapy.spiders import CrawlSpider, Spider\nfrom scrapy.loader import ItemLoader\nfrom scrapy.loader.processors import TakeFirst, MapCompose, Join\nfrom scrapy.exceptions import CloseSpider\nfrom scrapy.selector import Selector\nfrom scrapy.http import FormRequest\nfrom scrapy.http import Request\nfrom Scrapy.items import *\nfrom urlparse import urlparse,parse_qs\nimport json\nimport pymongo\nfrom datetime import datetime, date, time\n\nclass TudouSpider(CrawlSpider):\n\n name = 'tudou'\n allowed_domins = ['http://www.tudou.com']\n list_url = 'http://www.tudou.com/list/index.html'\n\n rate=float(1000)/3600\n\n def __init__(self, category = None, *args, **kwargs):\n if hasattr(self, 'rate'):\n self.download_delay=1/getattr(self, 'rate')\n if category:\n self.category=unicode(category, 'utf-8')\n\n def start_requests(self):\n\treturn [Request(getattr(self, 'list_url'), callback=self.parseList)]\n\n def parseList(self, response):\n\tchannels=response.xpath('//*[@id=\"secMenu\"]/ul/li')\n\tfor channel in channels:\n\t id=channel.xpath('@data-id').extract()\n\t url=channel.xpath('.//a/@href').extract()\n\t name=channel.xpath('.//a/text()').extract()\n"
},
{
"alpha_fraction": 0.505831778049469,
"alphanum_fraction": 0.6065070629119873,
"avg_line_length": 46.911766052246094,
"blob_id": "b7dc67da4420810050d5550a4e43217ed7d2f419",
"content_id": "a08f17c1f2585f2a0bec154381bbc87ebeb50887",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1629,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 34,
"path": "/Scrapy/spiders/windj007.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "from scrapy.linkextractors import LinkExtractor\nfrom scrapy.spiders import CrawlSpider, Rule\nfrom Scrapy.items import ProxyItem\nimport re\n\n\nclass Windj007Spider(CrawlSpider):\n name = 'Windj007'\n start_urls = ['http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&d=1',\n 'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=10',\n 'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=20',\n 'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=30',\n 'http://www.google.ru/search?q=%2B%94%3A8080+%2B%94%3A3128+%2B%94%3A80+filetype%3Atxt&hl=ru&source=hp&btnG=%CF%EE%E8%F1%EA+%E2+Google&gbv=1&start=40',\n ]\n\n _address_re = re.compile(r'(\\d{1,4}\\.\\d{1,4}\\.\\d{1,4}\\.\\d{1,4})[^0-9]+(\\d+)')\n rules = (\n Rule(LinkExtractor(restrict_xpaths = '//h3[@class=\"r\"]'),\n callback = 'parse_proxylist',\n follow = True\n ),\n )\n\n def parse_proxylist(self, response):\n\n if response.status >= 400:\n\n return\n\n addresses_parsed = ProxySpider._address_re.finditer(response.body)\n for row in addresses_parsed:\n res = ProxyItem()\n res['ip'] = '%s:%s' % tuple(row.groups())\n yield res\n"
},
{
"alpha_fraction": 0.5139859914779663,
"alphanum_fraction": 0.5699300765991211,
"avg_line_length": 50.890296936035156,
"blob_id": "e6cd6ed7ec24e52667a6f92d92a20e719494430f",
"content_id": "7838446c456796f3c3c706150468aba2d4d91e58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12498,
"license_type": "no_license",
"max_line_length": 832,
"num_lines": 237,
"path": "/Scrapy/spiders/Baidupan.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom scrapy.spiders import CrawlSpider\nfrom scrapy.http import Request\nfrom Scrapy.items import *\nimport json\n\nclass BaidupanSpider(CrawlSpider):\n name = 'baidupan'\n uks = []\n allowed_domins = ['https://pan.baidu.com']\n URL_HOT = 'https://pan.baidu.com/pcloud/friend/gethotuserlist?start={start}&limit=24'\n \"\"\"\n {\"errno\":0,\"request_id\":3296180617,\"hotuser_list\":[{\"type\":-1,\"hot_uname\":\"\\u6700\\u7ec8***4\\u4e91\\u76d8\",\"avatar_url\":\"https:\\/\\/ss0.bdstatic.com\\/7Ls0a8Sm1A5BphGlnYG\\/sys\\/portrait\\/item\\/50424c4f.jpg\",\"intro\":\"\\u767e\\u5ea6\\u300a\\u6700\\u7ec8\\u5e7b\\u60f314\\u300b\\u4e91\\u5e73\\u53f0\\u30028\\u670825\\u65e5\\u5f00\\u653e\\u6027\\u6d4b\\u8bd5\\u5f00\\u542f\\uff0c\\u656c\\u8bf7\\u671f\\u5f85\\u3002\",\"follow_count\":0,\"fans_count\":1278735,\"user_type\":4,\"is_vip\":0,\"pubshare_count\":2,\"hot_uk\":1112219283,\"album_count\":3},{\"type\":-1,\"hot_uname\":\"\\u8273*\\u90ed\\u9759\",\"avatar_url\":\"https:\\/\\/ss0.bdstatic.com\\/7Ls0a8Sm1A5BphGlnYG\\/sys\\/portrait\\/item\\/7a567d4d.jpg\",\"intro\":\"\\u90ed\\u9759\\u4e0e15\\u4e2a\\u57ce\\u5e02\\u7684\\u8273\\u9047\",\"follow_count\":0,\"fans_count\":1370108,\"user_type\":4,\"is_vip\":0,\"pubshare_count\":0,\"hot_uk\":1447638178,\"album_count\":0}]}\n \"\"\"\n URL_INFO = 'https://pan.baidu.com/pcloud/user/getinfo?&query_uk={uk}'\n \"\"\"\n {\"errno\":0,\"request_id\":456845460,\"user_info\":{\"avatar_url\":\"https:\\/\\/ss0.bdstatic.com\\/7Ls0a8Sm1A5BphGlnYG\\/sys\\/portrait\\/item\\/decad705.jpg\",\"fans_count\":1,\"follow_count\":1,\"intro\":\"\",\"uname\":\"\\u65adVS\\u5f26\",\"uk\":3389100040,\"album_count\":0,\"pubshare_count\":0,\"tui_user_count\":0,\"c2c_user_sell_count\":0,\"c2c_user_buy_count\":0,\"c2c_user_product_count\":0,\"pair_follow_type\":-1}}\n uname:昵称\n \"\"\"\n #缺少专辑列表\n URL_SHARE_LIMIT = 100\n URL_SHARE = 'https://pan.baidu.com/pcloud/feed/getsharelist?&auth_type=1&start={start}&limit=100&query_uk={uk}' #获得分享列表\n \"\"\"\n {\"feed_type\":\"share\",\"category\":6,\"public\":\"1\",\"shareid\":\"1541924625\",\"data_id\":\"2418757107690953697\",\"title\":\"\\u5723\\u8bde\\u58c1\\u7eb8\\u5927\\u6d3e\\u9001\",\"third\":0,\"clienttype\":0,\"filecount\":1,\"uk\":1798788396,\"username\":\"SONYcity03\",\"feed_time\":1418986714000,\"desc\":\"\",\"avatar_url\":\"http:\\/\\/himg.bdimg.com\\/sys\\/portrait\\/item\\/1b6bf333.jpg\",\"dir_cnt\":1,\"filelist\":[{\"server_filename\":\"\\u5723\\u8bde\\u58c1\\u7eb8\\u5927\\u6d3e\\u9001\",\"category\":6,\"isdir\":1,\"size\":1024,\"fs_id\":870907642649299,\"path\":\"%2F%E5%9C%A3%E8%AF%9E%E5%A3%81%E7%BA%B8%E5%A4%A7%E6%B4%BE%E9%80%81\",\"md5\":\"0\",\"sign\":\"1221d7d56438970225926ad552423ff6a5d3dd33\",\"time_stamp\":1439542024}],\"source_uid\":\"871590683\",\"source_id\":\"1541924625\",\"shorturl\":\"1dDndV6T\",\"vCnt\":34296,\"dCnt\":7527,\"tCnt\":5056,\"like_status\":0,\"like_count\":60,\"comment_count\":19},\n public:公开分享\n title:文件名称\n uk:用户编号\n \"\"\"\n URL_FOLLOW_LIMIT = 24\n URL_FOLLOW = 'https://pan.baidu.com/pcloud/friend/getfollowlist?query_uk={uk}&limit={limit}&start={start}' #获得订阅列表\n \"\"\"\n {\"type\":-1,\"follow_uname\":\"\\u597d\\u55e8\\u597d\\u55e8\\u554a\",\"avatar_url\":\"http:\\/\\/himg.bdimg.com\\/sys\\/portrait\\/item\\/979b832f.jpg\",\"intro\":\"\\u9700\\u8981\\u597d\\u8d44\\u6599\\u52a0994798392\",\"user_type\":0,\"is_vip\":0,\"follow_count\":2,\"fans_count\":2276,\"follow_time\":1415614418,\"pubshare_count\":36,\"follow_uk\":2603342172,\"album_count\":0},\n follow_uname:订阅名称\n fans_count:粉丝数\n \"\"\"\n URL_FANS_LIMIT = 24\n URL_FANS = 'https://pan.baidu.com/pcloud/friend/getfanslist?query_uk={uk}&limit={limit}&start={start}' # 获取关注列表\n \"\"\"\n {\"type\":-1,\"fans_uname\":\"\\u62e8\\u52a8\\u795e\\u7684\\u5fc3\\u7eea\",\"avatar_url\":\"http:\\/\\/himg.bdimg.com\\/sys\\/portrait\\/item\\/d5119a2b.jpg\",\"intro\":\"\",\"user_type\":0,\"is_vip\":0,\"follow_count\":8,\"fans_count\":39,\"follow_time\":1439541512,\"pubshare_count\":15,\"fans_uk\":288332613,\"album_count\":0}\n avatar_url:头像\n fans_uname:用户名\n \"\"\"\n # rate: 40page/min\n rate = 360.0 / 60.0\n parse_fans = False\n parse_share_list = True\n parse_share_priority = 0\n\n def __init__(self, *args, **kwargs):\n for k, v in enumerate(kwargs):\n setattr(self, v, kwargs[v])\n if hasattr(self, 'rate'):\n self.download_delay = 1 / getattr(self, 'rate')\n\n def start_requests(self):\n requests = []\n start = 0\n hotUserRequest = Request(\n url=self.URL_HOT.format(start=start),\n callback=self.parseHotUserList,\n meta={'start': start},\n dont_filter=True\n )\n requests.append(hotUserRequest)\n for _,uk in enumerate(self.uks):\n shareListRequest = Request(\n url=self.URL_SHARE.format(uk=uk, start=start, limit=self.URL_SHARE_LIMIT),\n callback=self.parseShareList,\n headers={'Referer':'https://pan.baidu.com/share/home'},\n meta={'uk': uk, 'start': start, 'limit': self.URL_SHARE_LIMIT},\n priority=self.parse_share_priority\n )\n fansRequest = Request(\n url=self.URL_FANS.format(uk=uk, start=start, limit=self.URL_FANS_LIMIT),\n callback=self.parseFans,\n meta={'uk': uk, 'start': start, 'limit': self.URL_FANS_LIMIT}\n )\n followRequest = Request(\n url=self.URL_FOLLOW.format(uk=uk, start=start, limit=self.URL_FOLLOW_LIMIT),\n callback=self.parseFollow,\n meta={'uk': uk, 'start': start, 'limit': self.URL_FOLLOW_LIMIT}\n )\n if self.parse_share_list:\n requests.append(shareListRequest)\n if self.parse_fans:\n requests.append(fansRequest)\n requests.append(followRequest)\n\n return requests\n \"\"\"\n 解析热门用户列表\n \"\"\"\n def parseHotUserList(self, response):\n list = json.loads(response.body_as_unicode())\n if list['errno'] == 0:\n for _, record in enumerate(list['hotuser_list']):\n yield BaidupanHotUserItem(record)\n uk = record['hot_uk']\n if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list:\n yield Request(\n url=self.URL_SHARE.format(uk=uk, start=0, limit=self.URL_SHARE_LIMIT),\n callback=self.parseShareList,\n headers={'Referer':'https://pan.baidu.com/share/home'},\n meta={'uk': uk, 'start': 0, 'limit': self.URL_SHARE_LIMIT},\n priority=self.parse_share_priority\n )\n if record['fans_count'] > 0 and self.parse_fans:\n yield Request(\n url=self.URL_FANS.format(uk=uk, start=0, limit=self.URL_FANS_LIMIT),\n callback=self.parseFans,\n meta={'uk': uk, 'start': 0, 'limit': self.URL_FANS_LIMIT}\n )\n if record['follow_count'] > 0:\n yield Request(\n url=self.URL_FOLLOW.format(uk=uk, start=0, limit=self.URL_FOLLOW_LIMIT),\n callback=self.parseFollow,\n meta={'uk': uk, 'start': 0, 'limit': self.URL_FOLLOW_LIMIT},\n dont_filter=True\n )\n if len(list) > 0:\n start = response.meta['start'] + 24\n yield Request(\n url=self.URL_HOT.format(start=start),\n callback=self.parseHotUserList,\n meta={'start': start},\n dont_filter=True\n )\n \"\"\"\n 解析分享列表\n \"\"\"\n def parseShareList(self, response):\n list = json.loads(response.body_as_unicode())\n if list['errno'] == 0:\n for _,record in enumerate(list['records']):\n yield BaiduPanShareItem(record)\n # next page\n start = response.meta['start']\n totalCount = (int)(list['total_count'])\n if (start + 1) < totalCount and self.parse_share_list:\n uk = response.meta['uk']\n start = start + self.URL_SHARE_LIMIT\n limit = self.URL_SHARE_LIMIT\n yield Request(\n url=self.URL_SHARE.format(uk=uk, start=start, limit=limit),\n headers={'Referer':'https://pan.baidu.com/share/home'},\n callback=self.parseShareList,\n meta={'uk': uk, 'start': start, 'limit': limit},\n priority=self.parse_share_priority\n )\n\n \"\"\"\n 解析粉丝\n \"\"\"\n def parseFans(self, response):\n list = json.loads(response.body_as_unicode())\n print(list)\n if list['errno'] == 0:\n start = response.meta['start']\n for _,record in enumerate(list['fans_list']):\n # 解析粉丝的关注,粉丝,分享列表(start从0开始\n yield BaiduPanFansItem(record)\n uk = record['fans_uk']\n if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list :\n yield Request(\n url=self.URL_SHARE.format(uk=uk, start=0, limit=self.URL_SHARE_LIMIT),\n callback=self.parseShareList,\n headers={'Referer':'https://pan.baidu.com/share/home'},\n meta={'uk': uk, 'start': 0, 'limit': self.URL_SHARE_LIMIT},\n priority=self.parse_share_priority\n )\n if record['fans_count'] > 0 and self.parse_fans:\n yield Request(\n url=self.URL_FANS.format(uk=uk, start=0, limit=self.URL_FANS_LIMIT),\n callback=self.parseFans,\n meta={'uk': uk, 'start': 0, 'limit': self.URL_FANS_LIMIT}\n )\n if record['follow_count'] > 0:\n yield Request(\n url=self.URL_FOLLOW.format(uk=uk, start=0, limit=self.URL_FOLLOW_LIMIT),\n callback=self.parseFollow,\n meta={'uk': uk, 'start': 0, 'limit': self.URL_FOLLOW_LIMIT}\n )\n\n # next page\n start = response.meta['start']\n totalCount = (int)(list['total_count'])\n if (start + 1) < totalCount and self.parse_fans:\n print('next')\n uk = response.meta['uk']\n start = start + self.URL_FANS_LIMIT\n yield Request(\n url=self.URL_FANS.format(uk=uk, start=start, limit=self.URL_FANS_LIMIT),\n callback=self.parseFans,\n meta={'uk': uk, 'start': start, 'limit': self.URL_FANS_LIMIT}\n )\n \"\"\"\n 解析关注\n \"\"\"\n def parseFollow(self, response):\n list = json.loads(response.body_as_unicode())\n start = response.meta['start']\n if list['errno'] == 0:\n for _,record in enumerate(list['follow_list']):\n yield BaiduPanFollwItem(record)\n # 请求分享列表\n if (record['pubshare_count'] > 0 or record['album_count'] > 0) and self.parse_share_list :\n yield Request(\n url=self.URL_SHARE.format(uk=record['follow_uk'], start=0, limit=self.URL_SHARE_LIMIT),\n callback=self.parseShareList,\n headers={'Referer':'https://pan.baidu.com/share/home'},\n meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_SHARE_LIMIT},\n priority=self.parse_share_priority\n )\n if record['fans_count'] > 0 and self.parse_fans:\n yield Request(\n url=self.URL_FANS.format(uk=record['follow_uk'], start=0, limit=self.URL_FANS_LIMIT),\n callback=self.parseFans,\n meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_FANS_LIMIT}\n )\n if record['follow_count'] > 0:\n yield Request(\n url=self.URL_FOLLOW.format(uk=record['follow_uk'], start=0, limit=self.URL_FOLLOW_LIMIT),\n callback=self.parseFollow,\n meta={'uk': record['follow_uk'], 'start': 0, 'limit': self.URL_FOLLOW_LIMIT}\n )\n # next page\n start = response.meta['start']\n totalCount = (int)(list['total_count'])\n if (start + 1) < totalCount and self.parse_fans:\n uk = response.meta['uk']\n start = start + self.URL_FOLLOW_LIMIT\n yield Request(\n url=self.URL_FOLLOW.format(uk=uk, start=start, limit=self.URL_FOLLOW_LIMIT),\n callback=self.parseFollow,\n meta={'uk': uk, 'start': start, 'limit': self.URL_FOLLOW_LIMIT}\n )\n"
},
{
"alpha_fraction": 0.7516778707504272,
"alphanum_fraction": 0.7516778707504272,
"avg_line_length": 15.55555534362793,
"blob_id": "a513c95d3c101b3dfcd91dada1665db594d4dd41",
"content_id": "390be11c81427c84d512a5a689ff0b6909a64659",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 149,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 9,
"path": "/README.md",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "Scrapy\n======\n#Install Dependence\n```\nsudo pip install scrapy\nsudo pip install selenium \nsudo pip install pymongo\nsudo apt-get install phantomjs\n```\n"
},
{
"alpha_fraction": 0.7027027010917664,
"alphanum_fraction": 0.7387387156486511,
"avg_line_length": 21.576271057128906,
"blob_id": "8c34ef213c3de17070a6e85d0a71657f70dbb43b",
"content_id": "3c12a8882d66ba7d247923e77ef63cc310e0c135",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1332,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 59,
"path": "/Scrapy/settings.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "# Scrapy settings for Scrapy project\n#\n# For simplicity, this file contains only the most important settings by\n# default. All the other settings are documented here:\n#\n# http://doc.scrapy.org/en/latest/topics/settings.html\n#\n\nBOT_NAME = 'Scrapy'\n\nSPIDER_MODULES = ['Scrapy.spiders']\nNEWSPIDER_MODULE = 'Scrapy.spiders'\n\nITEM_PIPELINES = {\n 'Scrapy.pipelines.MongoPipeline': 100,\n }\n\nDOWNLOADER_MIDDLEWARES = {\n 'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90,\n 'Scrapy.middlewares.ProxyMiddleware': 99,\n 'scrapy_proxies.RandomProxy': 100,\n 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 110,\n 'Scrapy.middlewares.BaiduyunMiddleware': 560,\n }\n\nSPIDER_MIDDLEWARES = {\n }\nCONCURRENT_ITEMS = 100\nCONCURRENT_REQUESTS = 16\nCONCURRENT_REQUESTS_PER_DOMAIN = 8\n\nREACTOR_THREADPOOL_MAXSIZE = 10\n\nCOOKIES_ENABLED = False\n\nCONCURRENT_ITEMS = 1000\n\nLOG_ENABLED = True\n#LOG_FILE = 'ScrapyCrawl.log'\n#LOG_LEVEL = 'INFO'\n\nDOWNLOAD_DELAY = 0.25\n\nGRAPHITE_HOST = '127.0.0.1'\nGRAPHITE_PORT = 2003\n#STATS_CLASS = 'Scrapy.graphite.RedisGraphiteStatsCollector'\nDEPTH_LIMIT = 0\nDEPTH_PRIORITY = 1\nDEPTH_STATS = True\n\nAUTOTHROTTLE_ENABLED = True\nAUTOTHROTTLE_DEBUG = True\nAUTOTHROTTLE_START_DELAY = 1\nAUTOTHROTTLE_MAX_DELAY = 300\n\nRETRY_ENABLED = True\nRETRY_TIMES = 3\n\nPROXY_LIST = '/tmp/ip-good.txt'\n"
},
{
"alpha_fraction": 0.6383700966835022,
"alphanum_fraction": 0.6443123817443848,
"avg_line_length": 34.164180755615234,
"blob_id": "75a107d0cf1c79738bfc63ffa3dfbd1f6dd1a69f",
"content_id": "c5ed4badf803d2fe49d0ff8171509ab432e68db8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2356,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 67,
"path": "/Scrapy/spiders/proxy.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "from scrapy.spiders import Spider, Request\nfrom scrapy.selector import Selector\nfrom Scrapy.items import ProxyItem\nfrom selenium import webdriver\nfrom scrapy.selector import HtmlXPathSelector\n\nimport time\n\nclass ProxySpider(Spider):\n name = 'proxy'\n pipelines = ['ProxySpider']\n middlewares = ['Selenium']\n start_urls = ['http://www.baidu.com']\n urls = {\n 'Youdaili':'http://www.youdaili.cn/Daili/http/',\n 'Hidemyass':'https://hidemyass.com/proxy-list/',\n #'Cnproxy':'http://www.cnproxy.com/proxy1.html'\n }\n\n def __init__(self, *args, **kwargs):\n pass\n def parse(self, response):\n if response.status == 200:\n self.url = response.url\n for proxy, url in self.urls.iteritems():\n yield Request(url = url, callback = getattr(self, 'parse' + proxy))\n\n def parseCnproxyDetail(self, response):\n pass\n\n def parseCnproxy(self, response):\n dr=webdriver.PhantomJS()\n dr.get(response.url)\n pageSource = dr.page_source\n dr.close()\n sel = Selector(text = pageSource, type='html')\n trs = sel.xpath('//*[@id=\"proxylisttb\"]/table[3]//tr[1]/following-sibling::tr')\n for key, tr in enumerate(trs):\n result = tr.re(r'(\\d+(?:\\.\\d+){3})(?:.*)(:\\d+)')\n if len(result) == 2:\n proxy = result[0] + result[1]\n yield Request(url=self.url + '?' + proxy, method=\"HEAD\", meta={\"proxy\":'http://' + proxy, \"download_timeout\":10}, callback=self.parseProxy)\n\n def parseHidemyass(self, response):\n return\n\n def parseYoudaili(self, response):\n return\n sel = Selector(response)\n links = sel.xpath('//ul[@class=\"newslist_line\"]/li/a/@href').extract()\n for key, link in enumerate(links):\n yield Request(url = link, callback = self.parseYoudailiDetail)\n return\n\n def parseYoudailiDetail(self, response):\n sel = Selector(response)\n proxys = sel.xpath('//div[@class=\"cont_font\"]/p').re(r\"\\d+.\\d+.\\d+.\\d+:\\d+\")\n for proxy in proxys:\n yield Request(url=self.url + '?' + proxy, method=\"HEAD\", meta={\"proxy\":'http://' + proxy, \"download_timeout\":10}, callback=self.parseProxy)\n\n def parseProxy(self, response):\n proxyItem = ProxyItem()\n proxyItem['ip'] = response.meta['proxy']\n proxyItem['delay'] = response.meta['endTime'] - response.meta['startTime']\n proxyItem['status'] = response.status\n proxyItem['time'] = time.time()\n yield proxyItem\n"
},
{
"alpha_fraction": 0.6361141800880432,
"alphanum_fraction": 0.6437980532646179,
"avg_line_length": 33.377357482910156,
"blob_id": "94c15d1a21905f813c87c102cbc9145a1765aeff",
"content_id": "cf07241f9b248107da347bebac8ca6e094931f17",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1822,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 53,
"path": "/Scrapy/middlewares.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "from urlparse import urlparse,parse_qs\nfrom pymongo import MongoClient\nfrom scrapy.downloadermiddlewares.useragent import UserAgentMiddleware\nfrom selenium import webdriver\nimport time\nimport random\nimport json\n\nclass ProxyMiddleware(object):\n def process_request(self, request, spider):\n url = urlparse(request.url)\n if url.path == '/pcloud/friend/gethotuserlist':\n request.meta.pop('proxy', None)\n elif url.path == '/pcloud/friend/getfollowlist':\n request.meta.pop('proxy', None)\n\n\nclass DoubanMiddleware(object):\n def __init__(self):\n mongo = MongoClient().scrapy\n self.proxys = list(mongo.proxy.find({'status':200}))\n\n def process_request(self, request, spider):\n if spider.__class__.__name__ == 'DoubanSpider':\n url = urlparse(request.url)\n params = parse_qs(url.query)\n if url.scheme == 'https':\n if len(url.query) == 0:\n request = request.replace(url = \"%s?apikey=0d58236c3758bc2928086a44a60a347b\" % request.url)\n elif 'apikey' not in parse_qs(url.query):\n request = request.replace(url = \"%s&apikey=0d58236c3758bc2928086a44a60a347b\" % request.url)\n elif url.scheme == 'http':\n pass\n elif 'Selenium' in getattr(spider, 'middlewares', []):\n pass\n #browser = webdriver.Firefox()\n #browser.get(request.url)\n\n def process_response(self, request, response, spider):\n if response.status != 200:\n pass\n return response\n def process_exception(self, request, exception, spider):\n pass\n\nclass BaiduyunMiddleware(object):\n def process_response(self, request, response, spider):\n if spider.__class__.__name__ == 'BaidupanSpider':\n list = json.loads(response.body_as_unicode())\n if list['errno'] != 0:\n time.sleep(300)\n return response.replace(status=500)\n return response\n"
},
{
"alpha_fraction": 0.6018065214157104,
"alphanum_fraction": 0.6042529344558716,
"avg_line_length": 21.806867599487305,
"blob_id": "36beb84da782a9205a94b4e3444297288541a805",
"content_id": "fc8c45dab6524a73061418b4ea0675e2f85f4d25",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5314,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 233,
"path": "/Scrapy/items.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nfrom scrapy.item import Item, Field\nfrom scrapy.loader import ItemLoader\nfrom scrapy.loader.processors import Join, MapCompose, TakeFirst,Identity,Compose\nfrom datetime import datetime\n\nclass VideoItem(Item):\n source=Field()\nclass BaidupanHotUserItem(Item):\n uk = Field()\n uname = Field()\n type = Field()\n hot_uname = Field()\n avatar_url = Field()\n intro = Field()\n user_type = Field()\n is_vip = Field()\n follow_count = Field()\n fans_count = Field()\n follow_time = Field()\n pubshare_count = Field()\n hot_uk = Field()\n album_count = Field()\nclass BaiduPanFansItem(Item):\n uk = Field()\n uname = Field()\n type = Field()\n fans_uname = Field()\n avatar_url = Field()\n intro = Field()\n user_type = Field()\n is_vip = Field()\n follow_count = Field()\n fans_count = Field()\n follow_time = Field()\n pubshare_count = Field()\n fans_uk = Field()\n album_count = Field()\nclass BaiduPanFollwItem(Item):\n uk = Field()\n uname = Field()\n type = Field()\n follow_uname = Field()\n avatar_url = Field()\n intro = Field()\n user_type = Field()\n is_vip = Field()\n follow_count = Field()\n fans_count = Field()\n follow_time = Field()\n pubshare_count = Field()\n follow_uk = Field()\n album_count = Field()\nclass BaiduPanShareItem(Item):\n cover_thumb = Field()\n operation = Field()\n album_id = Field()\n feed_type = Field()\n category = Field()\n public = Field()\n shareid = Field()\n data_id = Field()\n title = Field()\n third = Field()\n clienttype = Field()\n filecount = Field()\n uk = Field()\n username = Field()\n feed_time = Field()\n desc = Field()\n avatar_url = Field()\n category_1_cnt = Field()\n category_2_cnt = Field()\n category_3_cnt = Field()\n category_4_cnt = Field()\n category_5_cnt = Field()\n category_6_cnt = Field()\n category_7_cnt = Field()\n category_8_cnt = Field()\n category_9_cnt = Field()\n dir_cnt = Field()\n filelist = Field()\n source_uid = Field()\n source_id = Field()\n shorturl = Field()\n vCnt = Field()\n dCnt = Field()\n tCnt = Field()\n like_status = Field()\n like_count = Field()\n comment_count = Field()\n\nclass CelebrityItem(Item):\n mobile_url = Field()\n aka_en = Field()\n name = Field()\n works = Field()\n gender = Field()\n avatars = Field()\n id = Field()\n aka = Field()\n name_en = Field()\n born_place = Field()\n alt = Field()\n\nclass Person(Item):\n id = Field()\n name = Field()\n icon = Field()\n collect = Field()\n wish = Field()\n\nclass MovieItem(VideoItem):\n rating = Field()\n title = Field()\n collect_count = Field()\n original_title = Field()\n subtype = Field()\n year = Field()\n images = Field()\n alt = Field()\n id = Field()\n reviews_count = Field()\n wish_count = Field()\n douban_site = Field()\n mobile_url= Field()\n title= Field()\n do_count= Field()\n seasons_count= Field()\n schedule_url= Field()\n episodes_count= Field()\n countries= Field()\n genres= Field()\n collect_count= Field()\n casts= Field()\n current_season= Field()\n original_title= Field()\n summary= Field()\n subtype= Field()\n directors= Field()\n comments_count= Field()\n ratings_count= Field()\n aka= Field()\n writers = Field()\n imdb_id = Field()\n tags = Field()\n recommendations = Field()\n comments = Field()\n reviews = Field()\n\nclass ProxyItem(Item):\n ip = Field()\n delay = Field()\n type = Field()\n anonymity = Field()\n status = Field()\n time = Field()\n\nclass streamtypes(Item):\n hd2=Field()\n flv=Field()\n hd=Field()\n hd3gp=Field()\n hd3=Field()\n\nclass ShowItem(VideoItem):\n id=Field()\n name=Field()\n link=Field()\n play_link=Field()\n last_play_link=Field()\n poster=Field()\n thumbnail=Field()\n streamtypes=Field()\n hasvideotype=Field()\n completed=Field()\n episode_count=Field(serializer=int)\n episode_updated=Field()\n category=Field()\n view_count=Field(serializer=int)\n source=Field()\n paid=Field()\n published=Field()\n released=Field()\n comment_count=Field(serializer=int)\n favorite_count=Field(serializer=int)\n lastupdate=Field()\n dma=Field()\n type=Field()\n dct=Field()\n algInfo=Field()\n related=Field()\n\nclass ShowLoader(ItemLoader):\n\n default_output_processor=TakeFirst()\n\n streamtypes_out=Identity()\n hasvideotype_out=Identity()\n #published_out=Compose(lambda s:datetime.strptime(s[0], '%Y-%m-%d'))\n\n favorite_count_in=MapCompose(int)\n episode_count_in=MapCompose(int)\n view_count_in=MapCompose(int)\n comment_count_in=MapCompose(int)\n\nclass ShowVideoItem(Item):\n show_id=Field()\n id=Field()\n title=Field()\n link=Field()\n thumbnail=Field()\n duration=Field()\n category=Field()\n view_count=Field()\n favorite_count=Field()\n comment_count=Field()\n up_count=Field()\n down_count=Field()\n stage=Field()\n seq=Field()\n published=Field()\n operation_limit=Field()\n streamtypes=Field()\n state=Field()\n rc_title=Field()\n\nclass UncomplatedItem(Item):\n id=Field()\n"
},
{
"alpha_fraction": 0.5716558694839478,
"alphanum_fraction": 0.580345094203949,
"avg_line_length": 42.00526428222656,
"blob_id": "b6a21ca6b722a41e569c7a639acb5a8340897785",
"content_id": "7412081d2f195dcbc461b0bd3b2f86b1b417d0bb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8199,
"license_type": "no_license",
"max_line_length": 199,
"num_lines": 190,
"path": "/Scrapy/spiders/youku.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "#coding=utf-8\nfrom scrapy.spiders import CrawlSpider, Spider\nfrom scrapy.loader import ItemLoader\nfrom scrapy.loader.processors import TakeFirst, MapCompose, Join\nfrom scrapy.exceptions import CloseSpider\nfrom scrapy.selector import Selector\nfrom scrapy.http import FormRequest\nfrom scrapy.http import Request\nfrom Scrapy.items import *\nfrom urlparse import urlparse,parse_qs\nimport scrapy\nimport json\nimport pymongo\nfrom datetime import datetime, date, time\n\nclass YoukuSpider(CrawlSpider):\n name = 'youku'\n #download_delay=3600/1000\n allowed_domins = ['http://www.youku.com', 'https://openapi.youku.com']\n start_urls = []\n\n \"\"\"\n config of youku\n \"\"\"\n client_id='696c961ded023528'\n count='100'\n max_matches=1500\n parse_videos_after_show=False\n category=u\"电影\"\n #1000/hour\n #@link http://open.youku.com/docs/newbieguide.html#id4\n rate=float(1000)/3600\n\n \"\"\"\n Apis\n \"\"\"\n shows_by_category_url='https://openapi.youku.com/v2/shows/by_category.json'\n show_category_url='https://openapi.youku.com/v2/schemas/show/category.json'\n shows_show_url='https://openapi.youku.com/v2/shows/show.json'\n shows_videos_url='https://openapi.youku.com/v2/shows/videos.json'\n\n def __init__(self, category = None, *args, **kwargs):\n self.mongo=pymongo.MongoClient()\n if hasattr(self, 'rate'):\n self.download_delay=1/getattr(self, 'rate')\n if category:\n self.category=unicode(category, 'utf-8')\n for k,v in enumerate(kwargs):\n if not hasattr(self, v):\n setattr(self, v, kwargs[v])\n\n def start_requests(self):\n if hasattr(self, 'type') and getattr(self, 'type') == 'uncompleted videos':\n requests=[]\n for show in self.mongo.scrapy.videos.find({'completed':0}):\n requests.append(self.queryShowsVideos({'show_id':show['id']}))\n return requests\n elif hasattr(self, 'show_id') and hasattr(self, 'videos'):\n #update videos of show which id is `show_id`\n return [self.queryShowsVideos({'show_id':getattr(self, 'show_id')})]\n else:\n #update all\n return [Request(self.show_category_url, callback=self.parseCategory)]\n\n def parseCategory(self, response):\n categories=json.loads(response.body)\n if 'categories' in categories:\n for category in categories['categories']:\n if hasattr(self, 'category') and self.category != category['label']:\n continue\n\t\tdata={'client_id':self.client_id, 'category':category['label'], 'page':'1', 'count':'100'}\n if 'genre' in category:\n if hasattr(self, 'year'):\n\t\t\tpass\n #data['release_year']=getattr(self, 'year')\n if hasattr(self, 'area'):\n\t\t\tpass\n #data['area']=getattr(self, 'area')\n if hasattr(self, 'orderby'):\n\t\t\tpass\n #data['orderby']=getattr(self, 'orderby')\n for genre in category['genre']:\n data['genre']=genre['label']\n yield self.queryShowsByCategory(data)\n else:\n\t\t\tyield self.queryShowsByCategory(data)\n\n def parseShowsByCategory(self, response):\n shows=json.loads(response.body)\n if 'total' in shows:\n shows_total=int(shows['total'])\n if shows_total == 0:\n return\n # add subclass(area, release_year),if total of shows greater than max_matches\n elif shows_total > self.max_matches:\n data=response.meta['formdata']\n #if 'area' not in response.meta['formdata']:\n # for area in self.schemas_unit:\n # data['area']=area\n # yield self.queryShowsByCategory(data)\n if 'release_year' not in response.meta['formdata']:\n years=range(2008, datetime.now().year+1)\n years.append(9999)\n for year in years:\n data['release_year']=str(year)\n yield self.queryShowsByCategory(data)\n\t\t return\n if 'shows' in shows:\n for show in shows['shows']:\n if 'id' in show:\n pass\n #yield self.queryShowsVideos({'client_id':self.client_id, 'show_id':str(show['id'])})\n else:\n\t\t print show\n continue\n showItem=ShowItem(source='youku')\n itemLoader = ShowLoader(item=showItem)\n for k in show:\n if k in showItem.fields:\n showItem[k]=show[k]\n itemLoader.add_value(k, show[k])\n yield itemLoader.load_item()\n else:\n raise\n # add subclass(area, release_year),if total of shows greater than max_matches\n for show in shows['shows']:\n #parse videos of show\n if 'id' in show and getattr(self, 'parse_videos_after_show'):\n yield self.queryShowsVideos({'client_id':self.client_id, 'show_id':str(show['id'])})\n showItem=ShowItem(source='youku')\n itemLoader = ShowLoader(item=showItem)\n for k in show:\n if k in showItem.fields:\n showItem[k]=show[k]\n itemLoader.add_value(k, show[k])\n yield itemLoader.load_item()\n #next page\n if \"formdata\" in response.meta and all(key in response.meta['formdata'] for key in ['page', 'count', 'category']):\n page=int(response.meta['formdata']['page'])\n next_page=page+1\n count=int(response.meta['formdata']['count'])\n if next_page*count < self.max_matches and page*count < shows_total:\n data=response.meta['formdata']\n data['page']=str(next_page)\n\t print data\n yield self.queryShowsByCategory(data)\n\n def queryShowsByCategory(self, formdata):\n\tscrapy.log.msg(formdata, level=scrapy.log.INFO)\n #check necessary keys\n if all(key in formdata for key in ['client_id', 'category']): return FormRequest(self.shows_by_category_url, formdata=formdata, callback=self.parseShowsByCategory, meta={'formdata':formdata})\n\n def queryShowsVideos(self, formdata):\n #check necessary keys\n if all(key in formdata for key in ['show_id']):\n formdata['count']=str(formdata['count']) if 'count' in formdata else '100'\n formdata['page']=str(formdata['page']) if 'page' in formdata else '1'\n formdata['client_id']=str(formdata['client_id']) if 'client_id' in formdata else self.client_id\n #formdata['show_videotype']=str(formdata['show_videotype']) if 'show_videotype' in formdata else '正片,预告片,花絮,MV,资讯,首映式'\n formdata['orderby']=str(formdata['orderby']) if 'orderby' in formdata else 'videoseq-asc'\n return FormRequest(self.shows_videos_url, formdata=formdata, callback=self.parseShowsVideos, meta={'formdata':formdata})\n else:\n pass\n\n\n def parseShowsVideos(self, response):\n if 'formdata' not in response.meta or 'show_id' not in response.meta['formdata']:\n return\n #init variables\n formdata=response.meta['formdata']\n videos=json.loads(response.body)\n count=int(formdata['count']) if 'count' in formdata else 20\n page=int(formdata['page']) if 'page' in formdata else 1\n total=int(videos['total']) if 'total' in videos else False\n show_id=response.meta['formdata']['show_id']\n #videos\n if 'videos' in videos:\n for video in videos['videos']:\n showVideoItem=ShowVideoItem({'show_id':show_id})\n for k in video:\n if k in showVideoItem.fields:\n showVideoItem[k]=video[k]\n yield showVideoItem\n #next page\n if total > page*count:\n formdata['page']=str(page+1)\n yield self.queryShowsVideos(formdata)\n\n def parseShow(self, response):\n pass\n"
},
{
"alpha_fraction": 0.5733369588851929,
"alphanum_fraction": 0.5760632753372192,
"avg_line_length": 35.68000030517578,
"blob_id": "61de7a1617a494ceebf005dee62c9b3a613cfc8f",
"content_id": "814704d7beb0f4b822efd4e35f8a805b519d02fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3668,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 100,
"path": "/Scrapy/pipelines.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "#coding=utf-8\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import os\nimport pymongo\nfrom Scrapy.items import *\nfrom os import path\nfrom datetime import datetime\nfrom scrapy.exporters import BaseItemExporter\nfrom elasticsearch import Elasticsearch\n\nclass BasePipeline(object):\n def __init__(self):\n pass\n\n\"\"\"\nSerializer\n\"\"\"\nclass SerializerPipeline(BasePipeline):\n def process_item(self, item, spider):\n itemExporter=BaseItemExporter()\n for k,v in enumerate(item):\n item[v]=itemExporter.serialize_field(item.fields[v], v, item[v])\n\n\n\"\"\"\nMongoDB\n\"\"\"\nclass MongoPipeline(BasePipeline):\n def __init__(self):\n self.mongo = pymongo.MongoClient()\n self.es = Elasticsearch([\n {'host': '127.0.0.1'},\n ])\n self.es.indices.create(index='baidupan', ignore=400)\n\n def process_item(self, item, spider):\n #upsert youku show\n if isinstance(item, ShowItem) and 'id' in item:\n result=self.mongo.scrapy.videos.update({'id':item['id']}, {'$set':dict(item)}, upsert=True)\n #upsert youku videos when 'ShowVideoItem' == item.__class__.__name__\n if isinstance(item, ShowVideoItem) and 'id' in item and 'show_id' in item:\n result = self.mongo.scrapy.videos.update({'id':item['show_id'], 'videos.id':item['id']}, {'$set':{'videos.$':dict(item)}}, False, True)\n if result['updatedExisting'] == False:\n self.mongo.scrapy.videos.update({'id':item['show_id']}, {'$addToSet':{'videos':dict(item)}}, False, True)\n if 'ProxyItem' == item.__class__.__name__:\n self.mongo.scrapy.proxy.save(dict(item))\n #upsert douban movie\n if isinstance(item, MovieItem):\n if 'comments' in item:\n self.mongo.scrapy.videos.update({'id' : item['id']}, {'$push':{'comments': {'$each': item['comments']}}})\n del(item['comments'])\n self.mongo.scrapy.videos.update({'id' : item['id']}, {'$set':dict(item)}, upsert = True)\n if isinstance(item, CelebrityItem):\n self.mongo.scrapy.celebritys.update({'id' : item['id']}, {'$set':dict(item)}, upsert = True)\n if isinstance(item, BaiduPanShareItem):\n if 'shareid' in item:\n self.es.update('baidupan', 'sharelist', item['shareid'], {\n 'doc': dict(item),\n 'doc_as_upsert': True\n }\n )\n elif 'album_id' in item:\n self.es.update('baidupan', 'album', item['album_id'], {\n 'doc': dict(item),\n 'doc_as_upsert': True\n }\n )\n if isinstance(item, BaiduPanFansItem):\n item['uk'] = item['fans_uk']\n item['uname'] = item['fans_uname']\n item.pop('fans_uk', None)\n item.pop('fans_uname', None)\n self.es.update('baidupan', 'user', item['uk'], {\n 'doc': dict(item),\n 'doc_as_upsert': True\n }\n )\n if isinstance(item, BaiduPanFollwItem):\n item['uk'] = item['follow_uk']\n item['uname'] = item['follow_uname']\n item.pop('follow_uk', None)\n item.pop('follow_uname', None)\n self.es.update('baidupan', 'user', item['uk'], {\n 'doc': dict(item),\n 'doc_as_upsert': True\n }\n )\n if isinstance(item, BaidupanHotUserItem):\n item['uk'] = item['hot_uk']\n item['uname'] = item['hot_uname']\n item.pop('hot_uk', None)\n item.pop('hot_uname', None)\n self.es.update('baidupan', 'user', item['uk'], {\n 'doc': dict(item),\n 'doc_as_upsert': True\n }\n )\n return item\n"
},
{
"alpha_fraction": 0.5713264346122742,
"alphanum_fraction": 0.5747824907302856,
"avg_line_length": 44.6032600402832,
"blob_id": "bc4c8ed90b72d00e62f0e98d720ab166b46c2719",
"content_id": "30a9d0cf1a453e89f478f2295bf95e0ba4b6e9e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8391,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 184,
"path": "/Scrapy/spiders/douban.py",
"repo_name": "yanpeipan/scrapy",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom scrapy.spiders import CrawlSpider, Spider\nfrom scrapy.exceptions import CloseSpider\nfrom scrapy .selector import Selector\nfrom pymongo import MongoClient\nfrom scrapy.http import Request\nfrom Scrapy.items import *\nimport urlparse\nimport urllib\nimport json\nfrom datetime import datetime, date, time\nfrom scrapy.loader import ItemLoader\n\n\nclass DoubanSpider(CrawlSpider):\n name = 'douban'\n allowed_domins = ['http://www.douban.com', 'https://api.douban.com']\n start_urls = ['http://movie.douban.com/tag/']\n movie_tag_url = 'http://movie.douban.com/tag/'\n movie_search_url = 'https://api.douban.com/v2/movie/search'\n movei_subject_url = 'https://api.douban.com/v2/movie/subject/'\n # parse movei subject after search movie\n parse_movie_subject = False\n # rate: 40page/min\n rate = 40.0 / 60.0\n\n def __init__(self, *args, **kwargs):\n for k, v in enumerate(kwargs):\n setattr(self, v, kwargs[v])\n if hasattr(self, 'rate'):\n self.download_delay = 1 / getattr(self, 'rate')\n\n def start_requests(self):\n return [Request(self.movie_tag_url, callback=self.parseMovieTag)]\n\n def parseCollect(self, response):\n sel = Selector(response)\n links = sel.xpath('//div[@class=\"grid-view\"]/div')\n links.extract()\n for index, link in enumerate(links):\n movieId = link.xpath('div[@class=\"info\"]//a[contains(@href, \"http://movie.douban.com/subject/\")]').re(\n r\"http://movie.douban.com/subject/(\\d+)/\")\n nextLink = sel.xpath(\n '//div[@class=\"paginator\"]/span[@class=\"next\"]/a/@href').extract()\n if len(nextLink) > 0:\n yield Request(url=nextLink.pop(), callback=self.parseCollect)\n\n def parseCelebrity(self, response):\n celebrity = json.loads(response.body_as_unicode())\n if len(celebrity) > 0:\n celebrityItem = CelebrityItem()\n for k, v in celebrity.iteritems():\n celebrityItem[k] = v\n yield celebrityItem\n\n def parseComment(self, response):\n sel = Selector(response)\n movieItem = MovieItem()\n movieItem['id'] = response.meta['id']\n commentLinks = sel.xpath(\n '//div[@id=\"comments\"]/div[contains(@class, \"comment-item\")]')\n commentLinks.extract()\n comments = []\n for index, commentLink in enumerate(commentLinks):\n comment = {}\n comment['avatar'] = commentLink.xpath(\n 'div[@class=\"avatar\"]/a/img/@src').extract().pop()\n comment['uid'] = commentLink.xpath('div[@class=\"comment\"]//span[@class=\"comment-info\"]/a/@href').re(\n r\"http://movie.douban.com/people/(.*)/\").pop()\n comment['name'] = commentLink.xpath(\n 'div[@class=\"comment\"]//span[@class=\"comment-info\"]/a/text()').extract().pop()\n comment['comment'] = commentLink.xpath(\n 'div[@class=\"comment\"]/p/text()').extract().pop()\n dateStr = commentLink.xpath(\n 'div[@class=\"comment\"]/h3/span[@class=\"comment-info\"]/span/text()').re(r'\\d+-\\d+-\\d+').pop()\n comment['date'] = datetime.strptime(dateStr, \"%Y-%m-%d\")\n comment['vote'] = int(\n commentLink.xpath('div[@class=\"comment\"]//span[@class=\"comment-vote\"]/span[contains(@class, \"votes\")]/text()').extract().pop())\n comments.append(comment)\n movieItem['comments'] = comments\n yield movieItem\n paginator = sel.xpath(\n '//div[@id=\"paginator\"]/a[@class=\"next\"]/@href').extract()\n parsedUrl = urlparse(response.url)\n return # yan dd\n yield Request(url=parsedUrl.scheme + '://' + parsedUrl.netloc + parsedUrl.path + paginator.pop(), callback=self.parseComment, meta={'id': response.meta['id']})\n\n def parseReview(self, response):\n pass\n\n def parseSubject(self, response):\n sel = Selector(response)\n movieItem = MovieItem()\n movieItem['id'] = response.meta['id']\n # parse writers\n writerLinks = sel.xpath('//*[@id=\"info\"]/span[2]/a')\n writerLinks.extract()\n writers = []\n for index, link in enumerate(writerLinks):\n writerId = link.xpath('@href').re(r\"/celebrity/(\\d+)/\")\n if len(writerId) > 0:\n celebrity = writerId.pop()\n else:\n celebrity = None\n writer = {'id': celebrity, 'name':\n link.xpath('text()').extract().pop()}\n writers.append(writer)\n movieItem['writers'] = writers\n # prase imdb_id\n imdbId = sel.xpath('//*[@id=\"info\"]/a').re(\n r\"http://www.imdb.com/title/(tt\\d+)\")\n if len(imdbId) > 0:\n movieItem['imdb_id'] = imdbId.pop()\n else:\n movieItem['imdb_id'] = None\n # parse tags\n tagLinks = sel.xpath(\"//div[contains(@class, 'tags-body')]/a\")\n tags = []\n for i, tagLink in enumerate(tagLinks):\n tagItem = TagItem()\n tag = tagLink.xpath('text()').extract().pop()\n num = tagLink.xpath('span/text()').re(r\"\\((\\d+)\\)\").pop()\n tags.append({'tag': tag, 'num': num})\n movieItem['tags'] = tags\n # yield tagItem\n # parse recommendations\n links = sel.xpath('//*[@id=\"recommendations\"]/div/dl/dd/a')\n links.extract()\n recommendations = []\n for index, recommend in enumerate(links):\n movieId = recommend.xpath('@href').re(r\"/subject/(\\d+)\").pop()\n movieTitle = recommend.xpath('text()').extract().pop()\n recommendations.append({'id': movieId, 'title': movieTitle})\n movieItem['recommendations'] = recommendations\n yield Request(url='https://api.douban.com/v2/movie/subject/' + movieId, callback=self.parseMovie)\n yield movieItem\n\n def parseMovieSubject(self, response):\n movie = json.loads(response.body_as_unicode())\n if len(movie) > 0:\n movieItem = MovieItem()\n for k, v in movie.iteritems():\n movieItem[k] = v\n yield movieItem\n for celebrity in (movie['casts'] + movie['directors']):\n if id in celebrity:\n yield Request(url = 'https://api.douban.com/v2/movie/celebrity/' + celebrity['id'], callback = self.parseCelebrity)\n yield Request(url = 'http://movie.douban.com/subject/' + movie['id'], callback = self.parseSubject, meta = {'id':movie['id']})\n yield Request(url = 'http://movie.douban.com/subject/' + movie['id'] + '/comments', callback = self.parseComment, meta = {'id':movie['id']})\n yield Request(url = 'http://movie.douban.com/subject/' + movie['id'] + '/reviews', callback = self.parseReview, meta = {'id':movie['id']})\n\n def parseMovieList(self, response):\n movies = json.loads(response.body_as_unicode())\n for movie in movies['subjects']:\n movieItem = MovieItem(source='douban')\n #itemLoader = ItemLoader(item=movieItem, default_output_processor=TakeFirst())\n for key in movie:\n if key in movieItem.fields:\n movieItem[key] = movie[key]\n yield movieItem\n # parse movie subject, when self.parse_movie_subject == True\n if getattr(self, 'parse_movie_subject'):\n yield Request(url='https://api.douban.com/v2/movie/subject/' + movie['id'], callback=self.parseMovieSubject)\n if len(movies['subjects']) <= 0:\n return\n # next page\n url_parts = list(urlparse.urlparse(response.url))\n query = dict(urlparse.parse_qsl(url_parts[4]))\n if 'start' in query:\n query['start'] = (int)(query['start']) + 20\n else:\n query['start'] = 20\n url_parts[4] = urllib.urlencode(query)\n nextUrl = urlparse.urlunparse(url_parts)\n yield Request(url=nextUrl, callback=self.parseMovieList)\n\n def parseMovieTag(self, response):\n sel = Selector(response)\n items = sel.xpath('//table[@class=\"tagCol\"]//td')\n for item in items:\n tag = item.xpath('a/text()').extract().pop()\n #num=item.xpath('b/text()').re(r\"\\d+\").pop()\n yield Request(url=getattr(self, 'movie_search_url') + '?tag=' + tag, callback=self.parseMovieList)\n"
}
] | 12 |
FrozenInc/bachelor_thesis_kth_2019 | https://github.com/FrozenInc/bachelor_thesis_kth_2019 | cffb5fe8f0b0d4e05fdf3192b05c355c48026fb2 | 680cf1a49c2af4d95ab84b036a3003cbd1ef35e4 | 66a7b45942cdf602d82e80525c9dfa60f1cf167c | refs/heads/master | 2020-04-18T18:45:18.960871 | 2019-05-13T19:46:55 | 2019-05-13T19:46:55 | 167,693,984 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7112299203872681,
"alphanum_fraction": 0.7540106773376465,
"avg_line_length": 36,
"blob_id": "dc21f6cda59ee8b6c526bd61abf6c80f993d0425",
"content_id": "52d059f01af1964fd876c7ea17116065ac31f1ae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 187,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 5,
"path": "/README.md",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "# bachelor_thesis_kth_2019\n\nMade by Kristiyan Lazarov and Badi Mirzai for KTH Bachelor Thesis 2019. \n\nBased on Dorsa Sadigh's code that can be found on: https://github.com/dsadigh/driving-interactions\n\n\n"
},
{
"alpha_fraction": 0.6095574498176575,
"alphanum_fraction": 0.6196035742759705,
"avg_line_length": 43.38554382324219,
"blob_id": "d36ec9448690eea8b75f39c9638d14f2547b697d",
"content_id": "5ea56648167e77f82700590d28f7a7c99ee5f60b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3683,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 83,
"path": "/src/static_obj.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport utils\nimport theano as th\nimport theano.tensor as tt\nimport theano.tensor.slinalg as ts\nfrom trajectory import Trajectory\nimport feature\n\nclass Car(object):\n def __init__(self, dyn, x0, color='yellow', T=5):\n self.data0 = {'x0': x0} # init state av bilen\n self.bounds = [(-1., 1.), (-1., 1.)] # kollisions boxen for bilen\n self.T = T # hur manga tidsteg fram den ska berakna\n self.dyn = dyn # dynamiken for bilen\n self.traj = Trajectory(T, dyn) # trajectory for bilen som ar utraknad med hjalp av reward, ar i framtiden\n self.traj.x0.set_value(x0) # satter start vardet for trajectory\n self.linear = Trajectory(T, dyn) # samma sak som traj, men i tiden nu \n self.linear.x0.set_value(x0)\n self.color = color # byter farg pa bilen\n self.default_u = np.zeros(self.dyn.nu) # gor en matris av storleken av kontroll variabeln med bara nollor for att ha ne referens\n def reset(self): # resetar alla variabler till deras start varden som finns i __init__\n self.traj.x0.set_value(self.data0['x0']) \n self.linear.x0.set_value(self.data0['x0'])\n for t in range(self.T):\n self.traj.u[t].set_value(np.zeros(self.dyn.nu))\n self.linear.u[t].set_value(self.default_u)\n def move(self): # flyttar fram bilen\n pass\n #self.traj.tick()\n #self.linear.x0.set_value(self.traj.x0.get_value())\n @property # tar value av x0\n def x(self):\n return self.traj.x0.get_value()\n @property # tar value av u[0]\n def u(self):\n return self.traj.u[0].get_value()\n @u.setter # satter en ny value for u[0]\n def u(self, value):\n pass\n self.traj.u[0].set_value(value)\n def control(self, steer, gas): # gor literally ingenting\n pass\n\n\nclass SimpleOptimizerCar(Car): # expanderar Car klassen\n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n self.bounds = [(-1., 1.), (-1., 1.)]\n self.cache = [] # minns var den har varit forut\n self.index = 0 # minns pa vilken plats den ar\n self.sync = lambda cache: None # lamba kopierar i detta fall cache och satter den lika med None\n def reset(self): # resetar bilen \n Car.reset(self)\n self.index = 0\n @property\n def reward(self): # returnerar rewarden for bilen\n return self._reward\n @reward.setter\n def reward(self, reward): \n # tar fram reward med hjalp av input och reward fran bounded_control\n # bounded_control anvander sig av kollisions boxarna i varlden\n self._reward = reward+100.*feature.bounded_control(self.bounds)\n self.optimizer = None # skapar en tom optimizer\n def control(self, steer, gas):\n print len(self.cache) # VIKTIGT: printar ut vilken tidsteg ar nu\n if self.index<len(self.cache):\n self.u = self.cache[self.index]\n else:\n if self.optimizer is None:\n # skickar self.reward functionet till traj.reward\n # det som faktiskt blir skickad ar self._reward\n # detta behandlas i traj.reward for att far var reward for bilen\n r = self.traj.reward(self.reward)\n # skapar en instans av Maximizer for foljande reward och trajectory\n self.optimizer = utils.Maximizer(r, self.traj.u) #IMPORTANT: slow\n # maximerar rewarden med hjalp av maximizer\n self.optimizer.maximize()\n # cachar vad som har hand\n self.cache.append(self.u)\n # uppdaterar tiden nu\n self.sync(self.cache)\n # gar fram en tidsteg\n self.index += 1"
},
{
"alpha_fraction": 0.5346657633781433,
"alphanum_fraction": 0.5604163408279419,
"avg_line_length": 36.56230545043945,
"blob_id": "d2efa9510e24e53de077e610229ae498d30da93b",
"content_id": "bfa3ff49093db182372eddd51a8a1d8c5b0ab01a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 24116,
"license_type": "no_license",
"max_line_length": 204,
"num_lines": 642,
"path": "/src/plot.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import pickle\nimport subprocess\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.font_manager as font_manager\nimport csv\n\n#fontpath = './transfonter/cmr10.ttf' # Elis: was './transfonter/Palatino-Roman.ttf'\nfontpath = './transfonter/FCR.ttf' # Elis: was './transfonter/Palatino-Roman.ttf'\nprop = font_manager.FontProperties(fname=fontpath)\nmatplotlib.rcParams['font.family'] = prop.get_name()\n\nfrom pylab import *\n\nLIGHT_GRAY = (.6, .6, .6)\nDARK_GRAY = (.4, .4, .4)\nLIGHT_ORANGE = (1., .6, .4)\nDARK_ORANGE = (1., .4, 0.)\nPURPLE = (0.6, 0., 0.8)\nLIGHT_BLUE = (0.0, 0.8, 1.)\nDARK_BLUE = (0.0, 0.4, 1.)\n\ndt = 0.1\n\n# TODO: check the report and see which plot function is used\n\ndef ls(pattern):\n output = subprocess.check_output(\"ls {}\".format(pattern), shell=True).splitlines()\n return output\n\n\ndef load(filename, human_is_first=True):\n # load a saved .pickle file\n with open(filename) as f:\n ret = pickle.load(f)\n # human first, robot later\n # it unpacks the .pickle file\n u, x = ret\n # ur, xr = robot\n # uh, xh = human\n # uo, xo = obstacle (not used)\n\n #human_is_first = True # do this if human is a follower\n if(human_is_first):\n uh, ur, uo = u\n xh, xr, xo = x\n else:\n ur, uh, uo = u\n xr, xh, xo = x\n \n t = arange(len(xh))*dt\n if filename.split('/')[0] in ['data', 'unique_data']:\n user = '0'\n world = filename.split('/')[-1].split('-')[0]\n condition = 'purple'\n if world == 'world2':\n condition = 'purple (left)'\n elif world == 'world3':\n condition = 'purple (right)'\n traj = 0\n else:\n #user = int(filename.split('/')[1].split('-')[0][1:])\n user = 1\n traj = 1\n world = filename.split('/')[-1].split('-')[0]\n #traj = int(filename.split('/')[-1].split('-')[-1].split('.')[0])\n condition = {\n 'world0': 'gray',\n 'world1': 'orange',\n 'world2': 'orange',\n 'world3': 'blue',\n 'world4': 'orange',\n 'world5': 'gray',\n 'test': '',\n 'world_kex.pickle': 'orange'\n }[world]\n # human is uh and xh\n # robot is ur and xr\n # we need to make sure that it works with our code where we always have the follower first, but the human/robot changes\n return {\n 'uh': asarray(uh), 'ur': asarray(ur), 'xh': asarray(xh), 'xr': asarray(xr), 't': t,\n 'user': user,\n 'condition': condition,\n 'world': world,\n 'traj': traj\n }\n\ndef isempty(data):\n return len(data['t'])==0\n\ndef extend(a, w):\n if len(a)>=w:\n return a[:w]\n return concatenate([a, nan*ones(w-len(a))])\n\ndef cextend(a, w):\n if len(a)>=w:\n return a[:w]\n return concatenate([a, asarray([a[-1]]*(w-len(a)))])\n\n#worlds = ['world{}'.format(i) for i in range(6)] + ['test']\n\"\"\"worlds = ['world_kex']\ndatasets = {}\nfor w in worlds:\n #print(ls(\"saved_data/*/{}*\".format(w)))\n print(ls(\"saved_data/{}*\".format(w)))\n datasets[w] = [load(x) for x in ls(\"saved_data/{}*\".format(w))]\n datasets[w] = [data for data in datasets[w] if not isempty(data)]\n\nfor w, dataset in datasets.items():\n print '{}: {} samples'.format(w, len(dataset))\n\nprint '-'*20\n\"\"\"\n\n\n\ndef plotAnimate():\n T = 30\n #opt = load('data/world4-opt.pickle')\n opt = load('data/world_kex_swithing_leader.pickle')\n def setup():\n figure(figsize=(5, 5))\n gca().spines['right'].set_visible(False)\n gca().spines['top'].set_visible(False)\n gca().spines['left'].set_visible(True)\n gca().spines['bottom'].set_visible(True)\n gca().xaxis.set_ticks_position('bottom')\n gca().yaxis.set_ticks_position('left')\n xlim(-0.6, 1.)\n ylim(-0.4, 1.2)\n def animate(frame, w, col1, col2):\n if frame<0:\n return\n dataset = datasets[w]\n x = np.stack(extend(data['xr'][:, 0], T) for data in dataset)\n y = np.stack(extend(data['xh'][:, 1], T) for data in dataset)\n mx = nanmean(x, axis=0)\n my = nanmean(y, axis=0)\n s = sqrt(nanstd(x, axis=0)**2+nanstd(y, axis=0)**2)\n n = (~isnan(mx)).sum(axis=0)\n s = s/sqrt(n)\n frame = min(frame, len(my))\n fill_betweenx(my[:frame], (mx-s)[:frame], (mx+s)[:frame], color=col2)\n plot(mx[:frame], my[:frame], color=col1, linewidth=3.)\n return frame == len(my)\n def anim_purp(frame):\n if frame<0:\n return\n frame = min(frame, len(opt['xr']))\n plot(opt['xr'][:frame, 0], opt['xh'][:frame, 1], color=LIGHT_BLUE, linewidth=3)\n return frame == len(opt['xr'])\n f = [0, 0, 0]\n ind = 0\n while ind<len(f):\n setup()\n r = [False, False, False]\n r[0] = anim_purp(f[0])\n r[1] = animate(f[1], 'world_kex', DARK_ORANGE, LIGHT_ORANGE)\n r[2] = animate(f[2], 'world_kex', DARK_GRAY, LIGHT_GRAY)\n if r[ind]:\n ind += 1\n savefig('images/plot-{:04d}.png'.format(sum(f)), transparent=True)\n if ind==len(f):\n break\n f[ind] += 1\n\n\ndef plot45():\n T = 30\n plots = {}\n def setup(flag1=True, flag2=False):\n gca().spines['right'].set_visible(flag2)\n gca().spines['top'].set_visible(flag2)\n gca().spines['left'].set_visible(flag1)\n gca().spines['bottom'].set_visible(flag1)\n #gca().spines['bottom'].set_position('zero')\n #gca().spines['left'].set_smart_bounds(True)\n #gca().spines['bottom'].set_smart_bounds(True)\n gca().xaxis.set_ticks_position('bottom')\n gca().yaxis.set_ticks_position('left')\n if not flag1 and not flag2:\n tick_params(\n axis='x',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n tick_params(\n axis='y',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n gca().get_xaxis().set_ticks([])\n gca().get_yaxis().set_ticks([])\n figure(figsize=(9, 5))\n opt = load('data/world4-opt.pickle')\n subplot(1, 2, 1, aspect='equal')\n setup(False, False)\n #axis('equal')\n xlim(-0.8, 0.8)\n ylim(-0.5, 1.1)\n xlabel('\\nx of autonomous vehicle\\n(a)')\n ylabel('y of human driven vehicle')\n for w, color in [('world4', LIGHT_ORANGE), ('world5', LIGHT_GRAY)]:\n dataset = datasets[w]\n for data in dataset[::2]:\n plot(data['xr'][:, 0], data['xh'][:, 1], color=color, linewidth=0.7)#, linestyle='--' if w=='world2' else '-')\n plot(opt['xr'][:, 0], opt['xh'][:, 1], color=PURPLE, linewidth=1)#, linestyle='--' if w=='world2' else '-')\n gca().add_patch(Rectangle((-0.065, -0.065), 0.13, 0.13, color=LIGHT_BLUE))\n #annotate('intersection', xy=(0, 0), xytext=(0.1, 0.3), arrowprops=dict(facecolor='black', shrink=0.01))\n subplot(1, 2, 2, aspect='equal')\n setup(True)\n #axis('equal')\n xlim(-0.8, 0.8)\n ylim(-0.5, 1.1)\n xlabel('x of autonomous vehicle\\n(b)')\n ylabel('y of human driven vehicle')\n plots['intersection'] = gca().add_patch(Rectangle((-0.065, -0.065), 0.13, 0.13, color=LIGHT_BLUE))\n for w, col1, col2 in [('world4', DARK_ORANGE, LIGHT_ORANGE), ('world5', DARK_GRAY, LIGHT_GRAY)]:\n dataset = datasets[w]\n x = np.stack(extend(data['xr'][:, 0], T) for data in dataset)\n y = np.stack(extend(data['xh'][:, 1], T) for data in dataset)\n mx = nanmean(x, axis=0)\n my = nanmean(y, axis=0)\n s = sqrt(nanstd(x, axis=0)**2+nanstd(y, axis=0)**2)\n n = (~isnan(mx)).sum(axis=0)\n s = s/sqrt(n)\n fill_betweenx(my, mx-s, mx+s, color=col2)\n plots[w], = plot(mx, my, color=col1, linewidth=3.)\n plots['opt'], = plot(opt['xr'][:, 0], opt['xh'][:, 1], color=PURPLE, linewidth=3)#, linestyle='--' if w=='world2' else '-')\n figlegend((plots['opt'], plots['world5'], plots['world4'], plots['intersection']), ('Learned Human Model', 'Avoid Human', 'Affect Human', 'Intersection'), 'upper center', ncol=4, fontsize=12)\n savefig('plots/plot45.pdf')\n\ndef plot23():\n T = 50\n plots = {}\n def setup(flag1=True, flag2=False):\n gca().spines['right'].set_visible(flag2)\n gca().spines['top'].set_visible(flag2)\n gca().spines['left'].set_visible(flag1)\n gca().spines['bottom'].set_visible(flag1)\n #gca().spines['bottom'].set_position('zero')\n #gca().spines['left'].set_smart_bounds(True)\n #gca().spines['bottom'].set_smart_bounds(True)\n gca().xaxis.set_ticks_position('bottom')\n gca().yaxis.set_ticks_position('left')\n if not flag1 and not flag2:\n tick_params(\n axis='x',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n tick_params(\n axis='y',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n gca().get_xaxis().set_ticks([])\n gca().get_yaxis().set_ticks([])\n figure(figsize=(9, 7))\n opt1 = load('data/world2-opt.pickle')\n opt2 = load('data/world3-opt.pickle')\n subplot(1, 2, 1)\n setup(False, False)\n ylim(0, 2.)\n xlim(-0.4, 0.3)\n xlabel('\\n\\n(a)')\n for w, color in [('world2', LIGHT_ORANGE), ('world3', LIGHT_BLUE), ('world0', LIGHT_GRAY)]:\n dataset = datasets[w]\n for data in dataset[::2]:\n plot(data['xh'][:, 0]+(0.13 if w=='world0' else 0.), data['xh'][:, 1], color=color, linewidth=0.7)#, linestyle='--' if w=='world2' else '-')\n plot(opt1['xh'][:, 0], opt1['xh'][:, 1], color=PURPLE, linewidth=1)#, linestyle='--' if w=='world2' else '-')\n plot(opt2['xh'][:, 0], opt2['xh'][:, 1], color=PURPLE, linewidth=1)#, linestyle='--' if w=='world2' else '-')\n subplot(1, 2, 2)\n setup(True)\n ylim(0, 2.)\n xlim(-0.3, 0.3)\n xlabel('x\\n(b)')\n ylabel('y')\n for w, col1, col2 in [('world2', DARK_ORANGE, LIGHT_ORANGE), ('world3', DARK_BLUE, LIGHT_BLUE), ('world0', DARK_GRAY, LIGHT_GRAY)]:\n dataset = datasets[w]\n x = np.stack(extend(data['xh'][:, 0], T) for data in dataset)\n if w=='world0':\n x = x+0.13\n y = np.stack(extend(data['xh'][:, 1], T) for data in dataset)\n mx = nanmean(x, axis=0)\n my = nanmean(y, axis=0)\n s = nanstd(x, axis=0)\n n = (~isnan(mx)).sum(axis=0)\n s = s/sqrt(n)\n fill_betweenx(my, mx-s, mx+s, color=col2)\n plots[w], = plot(mx, my, color=col1, linewidth=3.)\n plots['opt'], = plot(opt1['xh'][:, 0], opt1['xh'][:, 1], color=PURPLE, linewidth=3)#, linestyle='--' if w=='world2' else '-')\n plot(opt2['xh'][:, 0], opt2['xh'][:, 1], color=PURPLE, linewidth=3)#, linestyle='--' if w=='world2' else '-')\n figlegend((plots['opt'], plots['world0'], plots['world2'], plots['world3']), ('Learned Human Model', 'Avoid Human', 'Affect Human (Left)', 'Affect Human (Right)'), 'upper center', ncol=4, fontsize=10)\n savefig('plots/plot23.pdf')\n\n\n\n\ndef plot01():\n T = dt*35\n def setup(flag1=True, flag2=False):\n gca().spines['right'].set_visible(flag2)\n gca().spines['top'].set_visible(flag2)\n gca().spines['left'].set_visible(flag1)\n gca().spines['bottom'].set_visible(flag1)\n #gca().spines['bottom'].set_position('zero')\n #gca().spines['left'].set_smart_bounds(True)\n #gca().spines['bottom'].set_smart_bounds(True)\n gca().xaxis.set_ticks_position('bottom')\n gca().yaxis.set_ticks_position('left')\n if not flag1 and not flag2:\n tick_params(\n axis='x',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n tick_params(\n axis='y',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n gca().get_xaxis().set_ticks([])\n gca().get_yaxis().set_ticks([])\n xlim(0., T)\n #opt = load('unique_data/world1-opt.pickle')\n opt = load('saved_data/world_kex.pickle')\n \n figure(figsize=(9, 7))\n subplot(2, 2, 1)\n xlabel('(a)')\n setup(False, False)\n ylabel('speed')\n for w, color in [('world_kex', LIGHT_GRAY), ('world_kex', LIGHT_ORANGE)]:\n dataset = datasets[w]\n for data in dataset[::2]:\n plot(data['t'], data['xh'][:, 3], color=color, linewidth=0.7)\n plot(opt['t'], opt['xh'][:, 3], color=PURPLE, linewidth=1.)\n subplot(2, 2, 3)\n setup()\n xlabel('time (s)\\n(b)')\n ylabel('average speed')\n for w, col1, col2 in [('world_kex', DARK_GRAY, LIGHT_GRAY), ('world_kex', DARK_ORANGE, LIGHT_ORANGE)]:\n dataset = datasets[w]\n d = np.stack(extend(data['xh'][:, 3], T/dt+1) for data in dataset)\n m = nanmean(d, axis=0)\n t = arange(len(m))*dt\n s = nanstd(d, axis=0)\n n = (~isnan(d)).sum(axis=0)\n s = s/sqrt(n)\n fill_between(t, m-s, m+s, color=col2)\n plot(t, m, color=col1, linewidth=3)\n plot(opt['t'], opt['xh'][:, 3], color=PURPLE, linewidth=3.)\n subplot(2, 2, 4)\n setup()\n xlabel('time (s)\\n(d)')\n ylabel('average latitude')\n plots = {}\n for w, col1, col2 in [('world_kex', DARK_GRAY, LIGHT_GRAY), ('world_kex', DARK_ORANGE, LIGHT_ORANGE)]:\n dataset = datasets[w]\n d = np.stack(extend(data['xh'][:, 1], T/dt+1) for data in dataset)\n m = nanmean(d, axis=0)\n t = arange(len(m))*dt\n s = nanstd(d, axis=0)\n n = (~isnan(d)).sum(axis=0)\n s = s/sqrt(n)\n fill_between(t, m-s, m+s, color=col2)\n plots[w], = plot(t, m, color=col1, linewidth=3)\n plots['opt'], = plot(opt['t'], opt['xh'][:, 1], color=PURPLE, linewidth=3.)\n subplot(2, 2, 2)\n setup(False, False)\n ylabel('latitude')\n xlabel('(c)')\n ylim(0., 3.)\n for w, color in [('world_kex', LIGHT_GRAY), ('world_kex', LIGHT_ORANGE)]:\n dataset = datasets[w]\n for data in dataset[::2]:\n plot(data['t'], data['xh'][:, 1], color=color, linewidth=0.7)\n plot(opt['t'], opt['xh'][:, 1], color=PURPLE, linewidth=1.)\n figlegend((plots['opt'], plots['world_kex'], plots['world_kex']), ('Learned Human Model', 'Avoid Human', 'Affect Human'), 'upper center', ncol=3)\n savefig('plots/world01.pdf', transparent=True)\n\ndef plotNumbers():\n def with_score(d, f):\n ret = dict(d)\n ret['score'] = f(d)\n return ret\n def measure1(data):\n L = 50\n return mean(cextend(data['xh'][:, 3], L)**2)\n\n f1 = open('csvs/dataI.csv', 'w')\n writer = csv.DictWriter(f1, extrasaction='ignore', fieldnames=[\n 'user', 'traj', 'condition', 'score'\n ])\n writer.writeheader()\n writer.writerow(with_score(load('data/world1-opt.pickle'), measure1))\n for data in datasets['world0']:\n writer.writerow(with_score(data, measure1))\n for data in datasets['world1']:\n writer.writerow(with_score(data, measure1))\n\n print 'Scenario I (mean of speed^2 over 5 seconds)'\n a = mean(asarray([measure1(data) for data in datasets['world0']]))\n b = mean(asarray([measure1(data) for data in datasets['world1']]))\n c = measure1(load('data/world1-opt.pickle'))\n print 'World0 (Gray)', a\n print 'World1 (Orange)', b\n print 'Optimum (Purple)', c\n print '-'*10\n\n\n def measure2(data):\n L = 50\n return mean(cextend(data['xh'][:, 0], L))\n\n f2 = open('csvs/dataII.csv', 'w')\n writer = csv.DictWriter(f2, extrasaction='ignore', fieldnames=[\n 'user', 'traj', 'condition', 'score'\n ])\n writer.writeheader()\n writer.writerow(with_score(load('data/world2-opt.pickle'), measure2))\n writer.writerow(with_score(load('data/world3-opt.pickle'), measure2))\n for data in datasets['world0']:\n writer.writerow(with_score(data, lambda x: measure2(x)+0.13))\n for data in datasets['world2']:\n writer.writerow(with_score(data, measure2))\n for data in datasets['world3']:\n writer.writerow(with_score(data, measure2))\n\n print 'Scenario II (mean of x over 5 seconds)'\n print 'World0 (Gray)', mean(asarray([measure2(data) for data in datasets['world0']]))+0.13\n print 'World2 (Orange)', mean(asarray([measure2(data) for data in datasets['world2']]))\n print 'Optimum left (Purple)', measure2(load('data/world2-opt.pickle'))\n print 'World3 (Blue)', mean(asarray([measure2(data) for data in datasets['world3']]))\n print 'Optimum right (Purple)', measure2(load('data/world3-opt.pickle'))\n print '-'*10\n\n\n\n def measure3(data):\n th = nonzero(data['xh'][:, 1]>0.)[0]\n if len(th)==0:\n return 0.\n tr = nonzero(data['xr'][:, 0]>0.)[0]\n if len(tr)==0:\n return 1.\n return 1. if th[0]<tr[0] else 0.\n\n f3 = open('csvs/dataIII.csv', 'w')\n writer = csv.DictWriter(f3, extrasaction='ignore', fieldnames=[\n 'user', 'traj', 'condition', 'score'\n ])\n writer.writeheader()\n writer.writerow(with_score(load('data/world4-opt.pickle'), measure3))\n for data in datasets['world5']:\n writer.writerow(with_score(data, measure3))\n for data in datasets['world4']:\n writer.writerow(with_score(data, measure3))\n\n print 'Scenario III (mean of x over 5 seconds)'\n print 'World5 (Gray)', mean(asarray([measure3(data) for data in datasets['world5']]))\n print 'World4 (Orange)', mean(asarray([measure3(data) for data in datasets['world4']]))\n print 'Optimum (Purple)', measure3(load('data/world4-opt.pickle'))\n\n\ndef kex_plot_1(name = \"\", human_is_first=True):\n # black and white, works\n #COLOR1 = (0.8, 0.8, 0.8)\n #COLOR2 = (0., 0., 0.)\n\n COLOR_ROBOT = (0. , 0.23, 0.68) # Ataneo blue\n COLOR_HUMAN = (0.29, 1.00, 0.00) # Chlorophyll green\n LIGHT_BLUE = COLOR_HUMAN\n LIGHT_ORANGE = COLOR_ROBOT\n \n T = dt*35\n def setup(flag1=True, flag2=True):\n # fix the axis\n gca().spines['right'].set_visible(flag2)\n gca().spines['top'].set_visible(flag2)\n gca().spines['left'].set_visible(flag1)\n gca().spines['bottom'].set_visible(flag1)\n gca().xaxis.set_ticks_position('bottom')\n gca().yaxis.set_ticks_position('left')\n\n if not flag1 and not flag2:\n tick_params(\n axis='x',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n tick_params(\n axis='y',\n which='both', # both major and minor ticks are affected\n bottom='off', # ticks along the bottom edge are off\n top='off', # ticks along the top edge are off\n labelbottom='off')\n gca().get_xaxis().set_ticks([])\n gca().get_yaxis().set_ticks([])\n xlim(0., T)\n #opt = load('saved_data/world_kex.pickle')\n\n\n #data = datasets['world_kex'][0]\n data = 0\n opt = 0\n datasets = 0\n\n\n opt = load('data/'+name+'.pickle', human_is_first)\n #print opt\n #exit()\n #print datasets\n #exit()\n data = opt\n\n # Specifies the position, name and labels of the first graph\n # Speed Graph\n figure(figsize=(9, 7))\n subplot(2, 2, 1)\n ylabel('Speed [m/s]')\n xlabel('Time [s] (a)')\n setup()\n ylim(0., 10.)\n xlim(0., 3.)\n sp1 = data['xh']\n sp2 = data['xr']\n for i in range(len(sp1)):\n sp1[i] = sp1[i]/0.13\n sp2[i] = sp2[i]/0.13\n\n s1 = plot(data['t'], data['xh'][:, 3], color=LIGHT_BLUE, linewidth=1.)\n s2 = plot(data['t'], data['xr'][:, 3], color=LIGHT_ORANGE, linewidth=1.)\n follow_match = matplotlib.patches.Patch(color=LIGHT_BLUE, label='Human')\n leader_match = matplotlib.patches.Patch(color=LIGHT_ORANGE, label='Robot')\n matplotlib.pyplot.legend(handles=[follow_match, leader_match])\n \n # Graph for the x-pos\n subplot(2, 2, 2)\n setup()\n ylabel('X Position [m]')\n xlabel('Time [s] (b)')\n ylim(0., 25.)\n xlim(0., 3.)\n x1 = plot(data['t'], data['xh'][:, 1], color=LIGHT_BLUE, linewidth=1.)\n x2 = plot(data['t'], data['xr'][:, 1], color=LIGHT_ORANGE, linewidth=1.)\n follow_match = matplotlib.patches.Patch(color=LIGHT_BLUE, label='Human')\n leader_match = matplotlib.patches.Patch(color=LIGHT_ORANGE, label='Robot')\n matplotlib.pyplot.legend(handles=[follow_match, leader_match])\n\n # Graph for the acceleration\n subplot(2, 2, 3)\n setup()\n ylabel('Acceleration [m/s^2]')\n xlabel('Time [s] (c)')\n ylim(-20., 20.)\n xlim(0., 3.)\n speed_human = data['xh'][:, 3]\n speed_robot = data['xr'][:, 3]\n accel_human = [0.0]\n accel_robot = [0.0]\n # we take the speeds and \n #print speed_human\n #exit()\n for i in range(1, len(speed_human)):\n accel_human.append((speed_human[i]-speed_human[i-1])*10)\n accel_robot.append((speed_robot[i]-speed_robot[i-1])*10)\n #print speed_human[i]\n #print accel_human[i]\n #print speed_human\n\n a1 = plot(data['t'], accel_human, color=LIGHT_BLUE, linewidth=1.)\n a2 = plot(data['t'], accel_robot, color=LIGHT_ORANGE, linewidth=1.)\n \n #a1 = plot(data['t'], data['xh'][:, 2], color=LIGHT_BLUE, linewidth=1.)\n #a2 = plot(data['t'], data['xr'][:, 2], color=LIGHT_ORANGE, linewidth=1.)\n follow_match = matplotlib.patches.Patch(color=LIGHT_BLUE, label='Human')\n leader_match = matplotlib.patches.Patch(color=LIGHT_ORANGE, label='Robot')\n matplotlib.pyplot.legend(handles=[follow_match, leader_match])\n \n \n # Graph for the y-pos\n subplot(2, 2, 4)\n setup()\n ylabel('Y Position [m]')\n xlabel('Time [s] (d)')\n ylim(-2., 2.)\n xlim(0., 3.)\n y1 = plot(data['t'], data['xh'][:, 0], color=LIGHT_BLUE, linewidth=1.)\n y2 = plot(data['t'], data['xr'][:, 0], color=LIGHT_ORANGE, linewidth=1.)\n follow_match = matplotlib.patches.Patch(color=LIGHT_BLUE, label='Human')\n leader_match = matplotlib.patches.Patch(color=LIGHT_ORANGE, label='Robot')\n matplotlib.pyplot.legend(handles=[follow_match, leader_match])\n \n\n \n #savefig('plots/world_behind_human_follower.pdf', transparent=True)\n #savefig('plots/world_behind_human_leader.pdf', transparent=True)\n #savefig('plots/world_infront_human_follower.pdf', transparent=True)\n #savefig('plots/world_infront_human_leader.pdf', transparent=True)\n #savefig('plots/world_switch_human_follower.pdf', transparent=True)\n #savefig('plots/world_switch_human_leader.pdf', transparent=True)\n savefig('plots/'+name+'.pdf', transparent=True)\n \n#plotAnimate()\n\n#plotNumbers()\n#show()\n#plot01()\n#plot23()\n#plot45()\n\n\n#kex_plot_1()\n\nnames = ['world_behind_human_follower',\n'world_behind_human_leader',\n'world_infront_human_follower',\n'world_infront_human_leader',\n'world_switch_human_follower',\n'world_switch_human_leader']\n\nhuman_is_first =[True, False, True, False, True, False]\n\nload_location = 'data'\nload_format = '.pickle'\n\nsave_location = 'plots'\nsave_format = '.pdf'\n\nfor i in range(len(names)):\n print \"Fixing graph: \", names[i]\n kex_plot_1(names[i], human_is_first[i])\n\n"
},
{
"alpha_fraction": 0.5544484257698059,
"alphanum_fraction": 0.5652827024459839,
"avg_line_length": 35.546241760253906,
"blob_id": "df9077b820d60142bc0f7d3604573d279de1e813",
"content_id": "65a53e0d4488c06152f4abdd57c468a6be66c9bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12645,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 346,
"path": "/src/car.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport utils\nimport theano as th\nimport theano.tensor as tt\nimport theano.tensor.slinalg as ts\nfrom trajectory import Trajectory, Trajectory2\n#from trajectory import Trajectory2 as Trajectory\nimport feature\n\nclass Car(object):\n def __init__(self, dyn, x0, color='yellow', T=5):\n self.data0 = {'x0': x0} # init state av bilen\n self.bounds = [(-1., 1.), (-1., 1.)] # kollisions boxen for bilen\n self.T = T # hur manga tidsteg fram den ska berakna\n self.dyn = dyn # dynamiken for bilen \n self.traj = Trajectory2(T, dyn) # trajectory for bilen som ar utraknad med hjalp av reward, ar i framtiden\n self.traj.x0.set_value(x0) # satter start vardet for trajectory\n self.linear = Trajectory2(T, dyn) # samma sak som traj, men i tiden nu \n self.linear.x0.set_value(x0)\n self.color = color # byter farg pa bilen\n self.default_u = np.zeros(self.dyn.nu) # gor en matris av storleken av kontroll variabeln med bara nollor for att ha ne referens\n self.movable = True\n def reset(self): # resetar alla variabler till deras start varden som finns i __init__\n self.traj.x0.set_value(self.data0['x0']) \n self.linear.x0.set_value(self.data0['x0'])\n for t in range(self.T):\n self.traj.u[t].set_value(np.zeros(self.dyn.nu))\n self.linear.u[t].set_value(self.default_u)\n def move(self): # flyttar fram bilen\n self.traj.tick()\n self.linear.x0.set_value(self.traj.x0.get_value())\n @property # tar value av x0\n def x(self):\n return self.traj.x0.get_value()\n @property # tar value av u[0]\n def u(self):\n return self.traj.u[0].get_value()\n @u.setter # satter en ny value for u[0]\n def u(self, value):\n if self.movable:\n self.traj.u[0].set_value(value)\n else:\n pass\n def control(self, steer, gas): # gor literally ingenting\n pass\n\n\n\nclass UserControlledCar(Car): # klassen for en bil som kan koras av en riktig person\n # expanderar pa klassen Car\n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n self.bounds = [(-1., 1.), (-1., 1.)]\n self.follow = None\n self.fixed_control = None\n self._fixed_control = None\n def fix_control(self, ctrl): # kan kora med ctrl, aka input fran user\n self.fixed_control = ctrl\n self._fixed_control = ctrl\n def control(self, steer, gas): # tar in steer och gas fran user och anvander de for att kora\n if self.fixed_control is not None:\n self.u = self.fixed_control[0]\n print self.fixed_control[0]\n if len(self.fixed_control)>1:\n self.fixed_control = self.fixed_control[1:]\n elif self.follow is None:\n self.u = [steer, gas]\n else:\n u = self.follow.u[0].get_value()\n if u[1]>=1.:\n u[1] = 1.\n if u[1]<=-1.:\n u[1] = -1.\n self.u = u\n def reset(self): # resetar bilen\n Car.reset(self)\n self.fixed_control = self._fixed_control\n\nclass SimpleOptimizerCar(Car): # expanderar Car klassen\n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n self.bounds = [(-1., 1.), (-1., 1.)]\n self.cache = [] # minns var den har varit forut\n self.index = 0 # minns pa vilken plats den ar\n self.sync = lambda cache: None # lamba kopierar i detta fall cache och satter den lika med None\n #self.r_temp = 0\n def reset(self): # resetar bilen \n Car.reset(self)\n self.index = 0\n @property\n def reward(self): # returnerar rewarden for bilen\n return self._reward\n @reward.setter\n def reward(self, reward): \n # tar fram reward med hjalp av input och reward fran bounded_control\n # bounded_control anvander sig av kollisions boxarna i varlden\n self._reward = reward+100.*feature.bounded_control(self.bounds)\n self.optimizer = None # skapar en tom optimizer\n def control(self, steer, gas):\n print len(self.cache) # VIKTIGT: printar ut vilken tidsteg ar nu\n self.cache.append(None)\n if len(self.cache) >= 30:\n print \"Exiting\"\n exit()\n if self.movable == False:\n self.index += 1\n\n # just to test, but the simple optimizer seem to be able to find the other cars and get a reward depending on that\n #print self.traj.reward(self.reward).eval()\n #IMPORTANT\n return\n\n\n if self.index<len(self.cache):\n self.u = self.cache[self.index]\n else:\n if self.optimizer is None:\n # skickar self.reward functionet till traj.reward\n # det som faktiskt blir skickad ar self._reward\n # detta behandlas i traj.reward for att far var reward for bilen\n r = self.traj.reward(self.reward)\n # skapar en instans av Maximizer for foljande reward och trajectory\n self.optimizer = utils.Maximizer(r, self.traj.u) #IMPORTANT: slow\n # maximerar rewarden med hjalp av maximizer\n self.optimizer.maximize()\n # cachar vad som har hand\n self.cache.append(self.u)\n # uppdaterar tiden nu\n self.sync(self.cache)\n # gar fram en tidsteg\n self.index += 1\n \nclass NestedOptimizerCar(Car):\n # skippa sa lange, dubbelkolla med elis om vi ska ha med den. \n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n self.bounds = [(-3., 3.), (-2., 2.)]\n @property\n def human(self):\n return self._human\n @human.setter\n def human(self, value):\n self._human = value\n self.traj_h = Trajectory(self.T, self.human.dyn)\n def move(self):\n Car.move(self)\n self.traj_h.tick()\n @property\n def rewards(self):\n return self._rewards\n @rewards.setter\n def rewards(self, vals):\n self._rewards = vals\n self.optimizer = None\n def control(self, steer, gas):\n if self.optimizer is None:\n reward_h, reward_r = self.rewards\n reward_h = self.traj_h.reward(reward_h)\n reward_r = self.traj.reward(reward_r)\n self.optimizer = utils.NestedMaximizer(reward_h, self.traj_h.u, reward_r, self.traj.u)\n self.traj_h.x0.set_value(self.human.x)\n self.optimizer.maximize(bounds = self.bounds)\n\n# DONE: Fix collision with object\n# TODO: Fix it to actuallly behave as a follower\nclass NestedOptimizerCarFollower(Car):\n # skippa sa lange, dubbelkolla med elis om vi ska ha med den. \n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n #self.bounds = [(-3., 3.), (-2., 2.)]\n self.bounds = [(-3., 3.), (-2., 2.)]\n self.leader1 = None\n\n # Obstacle-----\n @property\n def obstacle(self):\n return self._obstacle\n @obstacle.setter\n def obstacle(self, value):\n self._obstacle = value\n self.traj_o = Trajectory2(self.T, self.obstacle.dyn)\n self.r_temp = 0\n # -------------\n\n # Leader --------\n @property\n def leader(self):\n return self._leader\n @leader.setter\n def leader(self, value):\n self._leader = value\n self.traj_h = Trajectory2(self.T, self.leader.dyn)\n #----------------\n\n # Move and update traj for leader and obstacle---\n def move(self):\n Car.move(self)\n #self.traj_h.tick()\n #self.traj_o.tick()\n # -----------------------------------------------\n @property\n def rewards(self):\n return self._rewards\n @rewards.setter\n def rewards(self, vals):\n self._rewards = vals\n self.optimizer = None\n def control(self, steer, gas):\n if self.optimizer is None:\n #if True:\n reward_h, reward_r, reward_o = self.rewards\n self.t_temp = reward_o\n #reward_h = reward_h + reward_o\n \n\n reward_h = self.traj_h.reward(reward_h)\n reward_r = self.traj.reward(reward_r)\n reward_o = self.traj_o.reward(reward_o)\n\n self.optimizer = utils.NestedMaximizer(reward_h, self.traj_h.u, reward_r, self.traj.u)\n\n \n self.traj_h.x0.set_value(self.leader.x)\n self.traj_o.x0.set_value(self.obstacle.x)\n self.optimizer.maximize(bounds = self.bounds)\n #self.traj.u[0].set_value(self.traj_h.u[0].get_value())\n\n # They need to be same, they aren't \n print \"What leader thinks: \", self.leader1.traj_h.u[0].get_value()\n print \"What itself thinks: \", self.traj.u[0].get_value()\n \n #print \"What leader thinks: \", self.leader1.traj_h.x0.get_value()\n #print \"What itself thinks: \", self.traj.x0.get_value()\n \n #print \"Difference betwee leader and follower commands: \", (self.leader1.traj_h.u[0].get_value()-self.traj.u[0].get_value())\n \n #print self.obstacle.x\n #print self.traj_o.reward(self.t_temp).eval()\n\n\nclass NestedOptimizerCarFollower2(Car):\n # skippa sa lange, dubbelkolla med elis om vi ska ha med den. \n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n #self.bounds = [(-2., 2.), (-2., 2.)] #[(-3., 3.), (-2., 2.)]\n self.bounds = [(-1., 1.), (-1., 1.)] #[(-3., 3.), (-2., 2.)]\n\n self.leader = None\n\n # Move and update traj for leader and obstacle---\n def move(self):\n Car.move(self)\n # -----------------------------------------------\n @property\n def rewards(self):\n return self._rewards\n @rewards.setter\n def rewards(self, vals):\n self._rewards = vals\n self.optimizer = None\n def control(self, steer, gas):\n if self.optimizer is None:\n #if True:\n reward_h, reward_r= self.rewards\n #reward_h = reward_h + reward_o\n \n #reward_h = self.traj_h.reward(reward_h)\n #reward_r = self.traj.reward(reward_r)\n #reward_o = self.traj_o.reward(reward_o)\n \n self.optimizer = 1\n #self.optimizer = utils.NestedMaximizer(reward_h, self.traj_h.u, reward_r, self.traj.u)\n \n print \"----------------------\"\n for i in range(len(self.traj.u)):\n self.traj.u[i].set_value(self.leader.traj_h.u[i].get_value())\n #self.traj.u[1].set_value(self.leader.traj_h.u[1].get_value())\n #self.traj.u[2].set_value(self.leader.traj_h.u[2].get_value())\n #print self.leader.traj_h.u[0].get_value()\n print self.traj.u[0].get_value()\n \n\n# DONE: Fix collision with object\n# TODO: Fix it to actuallly behave as a leader\nclass NestedOptimizerCarLeader(Car):\n # skippa sa lange, dubbelkolla med elis om vi ska ha med den. \n def __init__(self, *args, **vargs):\n Car.__init__(self, *args, **vargs)\n #self.bounds = [(-2., 2.), (-2., 2.)] #[(-3., 3.), (-2., 2.)]\n self.bounds = [(-1., 1.), (-1., 1.)] #[(-3., 3.), (-2., 2.)]\n\n # Obstacle-----\n @property\n def obstacle(self):\n return self._obstacle\n @obstacle.setter\n def obstacle(self, value):\n self._obstacle = value\n self.traj_o = Trajectory2(self.T, self.obstacle.dyn)\n # -------------\n\n # Follower --------\n @property\n def follower(self):\n return self._follower\n @follower.setter\n def follower(self, value):\n self._follower = value\n self.traj_h = Trajectory2(self.T, self.follower.dyn)\n # -----------------\n\n # Move and update traj for follower and obstacle---\n def move(self):\n Car.move(self)\n self.traj_h.tick()\n self.traj_o.tick()\n # -----------------------------------------------\n\n @property\n def rewards(self):\n return self._rewards\n @rewards.setter\n def rewards(self, vals):\n self._rewards = vals\n self.optimizer = None\n def control(self, steer, gas):\n\n if self.optimizer is None:\n reward_h, reward_r= self.rewards\n \n\n reward_r = self.traj.reward(reward_r)\n reward_h = self.traj_h.reward(reward_h)\n\n \n # reward_r ar for leader\n # reward_h ar for follower\n self.optimizer = utils.NestedMaximizer(reward_h, self.traj_h.u, reward_r, self.traj.u)\n \n\n self.traj_h.x0.set_value(self.follower.x)\n self.traj_o.x0.set_value(self.obstacle.x)\n self.optimizer.maximize(bounds = self.bounds)\n \n\n #print \"Directly from leader: \", self.traj_h.u[0].get_value()\n"
},
{
"alpha_fraction": 0.5474563241004944,
"alphanum_fraction": 0.5942524075508118,
"avg_line_length": 40.532257080078125,
"blob_id": "cd7523c3bce8749c06a85ddaef87f0084624d273",
"content_id": "369a059f97d5b9224e89c0500e98d00dccfa1a44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 25750,
"license_type": "no_license",
"max_line_length": 197,
"num_lines": 620,
"path": "/src/world.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import lane\nimport car\nimport math\nimport feature\nimport dynamics\nimport visualize\nimport utils\nimport sys\nimport theano as th\nimport theano.tensor as tt\nimport numpy as np\nimport shelve\n\nimport static_obj # a copy of the car class that s unable to move\n\nth.config.optimizer_verbose = True\nth.config.allow_gc = False\nth.config.optimizer = 'fast_compile'\n\nclass Object(object):\n def __init__(self, name, x):\n self.name = name\n self.x = np.asarray(x)\n\nclass World(object):\n def __init__(self):\n # alla objekt som existerar i en world\n self.cars = []\n self.lanes = []\n self.roads = []\n self.fences = []\n self.objects = []\n def simple_reward(self, trajs=None, lanes=None, roads=None, fences=None, speed=1., speed_import=1.):\n # skapar simple reward for en bil\n if lanes is None:\n lanes = self.lanes\n if roads is None:\n roads = self.roads\n if fences is None:\n fences = self.fences\n if trajs is None:\n trajs = [c.linear for c in self.cars]\n elif isinstance(trajs, car.Car):\n trajs = [c.linear for c in self.cars if c!=trajs]\n elif isinstance(trajs, static_obj.Car):\n trajs = [c.linear for c in self.cars if c!=trajs]\n r = 0.1*feature.control()\n theta = [1., -50., 10., 10., -60.] # Simple model\n # theta = [.959, -46.271, 9.015, 8.531, -57.604]\n # skapar alla lanes, fences, roads, speed och trajectory for alla bilar\n for lane in lanes:\n r = r+theta[0]*lane.gaussian()\n for fence in fences:\n # increase the negative reward for the fences so that the cars dont go outside of the road\n #r = r+theta[1]*fence.gaussian()*1000000\n r = r+theta[1]*fence.gaussian()\n if roads == None:\n pass\n else:\n for road in roads:\n r = r+theta[2]*road.gaussian(10.)\n if speed is not None:\n r = r+speed_import*theta[3]*feature.speed(speed)\n try:#quick fix, if there is just 1 car it will not be a list\n for traj in trajs:\n r = r+theta[4]*traj.gaussian()\n except:\n r = r+theta[4]*trajs.gaussian()\n return r\n\n\ndef world_kex(know_model = True):\n dyn = dynamics.CarDynamics2(0.1)\n world = World()\n\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1)]\n #world.roads += [clane, clane.shifted(1)]\n world.fences += [clane.shifted(2), clane.shifted(-1)]\n\n # both behind: pos=0.027 (both begind) and pos=0.028 (different behaviours)\n # both infront: pos=0.128 (different behaviours) and pos=0.129 (both infront) to switch\n # We run:\n # T_stepls = 3\n # step_per_u = 2\n # speed = 0.80\n # we have 3 different results:\n # 1. both begind\n # 2. both infront\n # 3. different behaviour depending on role\n # to get the distance take the pos/0.13 to get it to irl meters\n\n\n left_is_follower = False\n pos = 0.15\n\n #pos = 0.15\n #pos=0.0\n\n\n T_steps = 3\n speed = 0.80\n #pos = 0.128\n\n #pos = 0.028\n \n\n\n # THIS WORKS\n # steps per u is 2\n #left_is_follower = False\n #T_steps = 3\n #pos = 0.10 #WORKS\n #speed = 0.80\n\n # Demonstration\n left_color = \"green\"\n right_color = \"blue-dark\"\n \n # Real\n #follower_color = \"yellow\"\n #leader_color = \"red\"\n\n # Follower must alwasy be created first, otherwise it won't move\n if left_is_follower:\n world.cars.append(car.NestedOptimizerCarFollower2(dyn, [-0.13, pos, math.pi/2., speed], color=left_color, T=T_steps))\n\n world.cars.append(car.NestedOptimizerCarLeader(dyn, [-0.0, 0.0, math.pi/2., speed], color=right_color, T=T_steps))\n else:\n world.cars.append(car.NestedOptimizerCarFollower2(dyn, [-0.0, 0.0, math.pi/2., speed], color=right_color, T=T_steps))\n\n world.cars.append(car.NestedOptimizerCarLeader(dyn, [-0.13, pos, math.pi/2., speed], color=left_color, T=T_steps))\n\n #world.cars.append(car.SimpleOptimizerCar(dyn, [-0.13, 2, math.pi/4., 0.], color='blue'))\n \n # THE OBSTACLE IT WORKS WITH\n #world.cars.append(car.SimpleOptimizerCar(dyn, [-0.20, 1, math.pi/4., 0.], color='blue'))\n\n # THE OBSTACLE FOR DEMONSTRATIONS\n world.cars.append(car.SimpleOptimizerCar(dyn, [-0.20, 0.7, math.pi/4., 0.], color='gray'))\n\n # default_u for the cars\n world.cars[0].default_u = np.asarray([0., 1.])\n world.cars[1].default_u = np.asarray([0., 1.])\n \n # Reward and default for the Obstacle ---\n world.cars[2].reward = world.simple_reward(world.cars[2], speed=0.)\n world.cars[2].default_u = np.asarray([0., 0.])\n world.cars[2].movable = False\n\n # tells the cars who is the follower and who is the leader\n world.cars[0].leader = world.cars[1]\n world.cars[1].follower = world.cars[0]\n world.cars[0].obstacle = world.cars[2]\n world.cars[1].obstacle = world.cars[2]\n\n r_leader = world.simple_reward([world.cars[1].traj_h, world.cars[1].traj_o, world.cars[1].traj_o], speed=speed)\n # leader doesnt need bounded controls, only the follower\n\n r_follower = world.simple_reward([world.cars[1].traj, world.cars[1].traj_o, world.cars[1].traj_o], speed=speed)+100.*feature.bounded_control(world.cars[0].bounds)\n\n r_o = 0.\n #r_o = world.simple_reward([world.cars[0].traj_o], speed=0.)\n\n world.cars[0].rewards = (r_leader, r_follower)\n world.cars[1].rewards = (r_follower, r_leader)\n # ------------------------------------\n\n return world\n\n\n\ndef world_kex_old(know_model=True):\n dyn = dynamics.CarDynamics2(0.1)\n #dyn.dt = 1.0\n #dyn.fiction = 0.0\n world = World()\n # clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n # world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n # world.roads += [clane]\n # world.fences += [clane.shifted(2), clane.shifted(-2), clane.shifted(2.5), clane.shifted(-2.5)]\n\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1)]\n #world.roads += [clane, clane.shifted(1)]\n world.fences += [clane.shifted(2), clane.shifted(-1)]\n\n human_is_follower = False\n\n # CAR 0 = Human\n # CAR 1 = Robot\n # CAR 2 = Obstacle\n\n # IMPORTANT: Folower must be created first\n # depending on what our human is, follower or leader we create the cars differently\n if human_is_follower:\n\n # Create the cars-----\n # Human Car\n #world.cars.append(car.NestedOptimizerCarFollower(dyn, [-0.13, 0.0, math.pi/2., 0.5], color='red', T=3))\n world.cars.append(car.NestedOptimizerCarFollower2(dyn, [-0.13, 0.0, math.pi/2., 0.5], color='red', T=3))\n \n # Robot Car\n world.cars.append(car.NestedOptimizerCarLeader(dyn, [-0., 0., math.pi/2., 0.5], color='yellow', T=3))\n #world.cars[0].leader = world.cars[1]\n #world.cars[0].leader1 = world.cars[1]\n # --------------------\n else:\n # Create the cars-----\n # Human Car\n world.cars.append(car.NestedOptimizerCarFollower2(dyn, [0., 0., math.pi/2., 0.5], color='yellow', T=3))\n world.cars.append(car.NestedOptimizerCarLeader(dyn, [-0.13, 0.0, math.pi/2., 0.5], color='red', T=3))\n # Robot Car\n #world.cars.append(car.NestedOptimizerCarFollower(dyn, [0., 0., math.pi/2., 0.5], color='yellow', T=3))\n #world.cars[1].leader = world.cars[0]\n #world.cars[1].leader1 = world.cars[0]\n # --------------------\n \n \n # Obstacle Car\n #world.cars.append(car.SimpleOptimizerCar(dyn, [-0.13, 0.5, math.pi/2., 0.5], color='blue')) # doesnt work because it cant force the car to turn around\n world.cars.append(car.SimpleOptimizerCar(dyn, [-0.13, 2, math.pi/4., 0.], color='blue'))\n # --------------------\n \n # Reward and default for the Human ---\n # speed did not change here\n # world.cars[0].reward = world.simple_reward(world.cars[0], speed=0.6)\n world.cars[0].default_u = np.asarray([0., 1.])\n # ------------------------------------\n\n # Reward and default for the Robot ---\n # world.cars[1].reward = world.simple_reward(world.cars[1], speed=0.6)\n world.cars[1].default_u = np.asarray([0., 1.])\n # ------------------------------------\n\n # Reward and default for the Obstacle ---\n world.cars[2].reward = world.simple_reward(world.cars[2], speed=0.)\n world.cars[2].default_u = np.asarray([0., 0.])\n world.cars[2].movable = False\n # ------------------------------------\n\n # CAR 0 = Human\n # CAR 1 = Robot\n # CAR 2 = Obstacle\n\n if human_is_follower:\n world.cars[0].leader = world.cars[1]\n world.cars[0].obstacle = world.cars[2]\n world.cars[1].follower = world.cars[0]\n world.cars[1].obstacle = world.cars[2]\n else:\n world.cars[1].follower = world.cars[0]\n world.cars[1].obstacle = world.cars[2]\n world.cars[0].leader = world.cars[1]\n world.cars[0].obstacle = world.cars[2]\n\n # CAR 0 = Human\n # CAR 1 = Robot\n # CAR 2 = Obstacle\n\n # TODO: Fix this part, unsure how to make the world.simplereward\n # calculates the dynamic(chaning) rewards for the cars depending on their speed and collision with other cars and obstacles\n\n #TODO: this is what is wrong, they need to be the same\n # TODO: cars dont want to slow down, find a solution that works\n if human_is_follower: \n # HUMAN\n #r_h = world.simple_reward([world.cars[1].traj], speed=0.6)+100.*feature.bounded_control(world.cars[0].bounds)+world.simple_reward(world.cars[0].traj_o, speed=0.) # Reward for the human\n r_h = world.simple_reward([world.cars[1].traj], speed=0.80)+100.*feature.bounded_control(world.cars[0].bounds)+1*world.simple_reward(world.cars[1].traj_o, speed=0.80) # Reward for the human\n\n # ROBOT\n \n r_r = world.simple_reward([world.cars[1].traj_h], speed=0.8)+100.*feature.bounded_control(world.cars[1].bounds)+1*world.simple_reward(world.cars[1].traj_o, speed=0.8) # Reward for the robot\n else:\n # HUMAN\n r_h = world.simple_reward([world.cars[1].traj_h], speed=0.8)+100.*feature.bounded_control(world.cars[0].bounds)+1*world.simple_reward(world.cars[1].traj_o, speed=0.8)# Reward for the human\n\n # ROBOT\n r_r = world.simple_reward([world.cars[1].traj], speed=0.8)+100.*feature.bounded_control(world.cars[1].bounds)+1*world.simple_reward(world.cars[1].traj_o, speed=0.8)# Reward for the robot\n \n r_o = 1.*feature.bounded_control(world.cars[2].bounds)\n #r_o = world.simple_reward([world.cars[0].traj_o], speed=0.)\n\n world.cars[0].rewards = (r_r, r_h, r_o)\n world.cars[1].rewards = (r_h, r_r, r_o)\n # ------------------------------------\n\n return world\n\n\ndef world_kex1(know_model=True):\n start_human= -0.13\n start_robot= -0.00\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2), clane.shifted(2.5), clane.shifted(-2.5)]\n #world.cars.append(car.SimpleOptimizerCar(dyn, [start_human, 0., math.pi/2., 0.5], color='red')) # red car is human\n world.cars.append(car.NestedOptimizerCar(dyn, [start_human, 0., math.pi/2., 0.5], color='red')) # red car is human\n if know_model: # yellow car is the robot that uses nested optimizer to find the way\n world.cars.append(car.NestedOptimizerCar(dyn, [start_robot, 0.0, math.pi/2., 0.5], color='yellow'))\n else:\n world.cars.append(car.SimpleOptimizerCar(dyn, [start_robot, 0.0, math.pi/2., 0.5], color='yellow')) \n world.cars[0].reward = world.simple_reward(world.cars[0], speed=0.6)\n world.cars[0].default_u = np.asarray([0., 1.])\n @feature.feature\n def goal(t, x, u): # doesnt need this\n k = -(10.*(x[0]+0.13)**2+0.5*(x[1]-2.)**2) #ASK Elis\n #print(\"--------\", x[0].auto_name)\n #print(\"--------\", x[1].auto_name)\n #exit()\n return k\n\n # object--------------\n world.cars.append(car.SimpleOptimizerCar(dyn, [-0.13, 0.5, math.pi/2., 0.0], color='blue')) # blue car is obstacle\n #world.cars.append(car.NestedOptimizerCar(dyn, [-0.13, 0.5, math.pi/2., 0.0], color='blue')) # blue car is obstacle\n #print(world.cars)\n #exit()\n world.cars[2].reward = world.simple_reward(world.cars[2], speed=0.0)\n #world.cars[2].reward = 1\n world.cars[2].default_u = np.asarray([0., 0.])\n world.cars[2].movable = False\n\n #------------------\n\n\n if know_model:\n world.cars[1].human = world.cars[0] # [1] is robot, asigns that the robot knows who is the human\n world.cars[1].obstacle = world.cars[2]\n world.cars[0].obstacle = world.cars[2]\n world.cars[0].human = world.cars[1]\n \n\n # reward with respect to the robot trajectory: world.cars[1].traj\n r_h = world.simple_reward([world.cars[1].traj], speed=0.5)+100.*feature.bounded_control(world.cars[0].bounds)+100.*feature.bounded_control(world.cars[2].bounds)\n\n #r_r = 10*goal+world.simple_reward([world.cars[1].traj_h], speed=0.5\n r_r = world.simple_reward([world.cars[1].traj_h], speed=0.5)+100.*feature.bounded_control(world.cars[2].bounds)\n\n r_h2 = world.simple_reward([world.cars[1].traj_h], speed=0.5)+100.*feature.bounded_control(world.cars[0].bounds)\n +100.*feature.bounded_control(world.cars[2].bounds)\n #r_r = 10*goal+world.simple_reward([world.cars[1].traj_h], speed=0.5\n r_r2 = world.simple_reward([world.cars[1].traj], speed=0.5)+100.*feature.bounded_control(world.cars[2].bounds)\n\n \n #r_obj = world.simple_reward([world.cars[1].traj_h], speed=0.0)\n world.cars[1].rewards = (r_h, r_r)#ADD: r_object\n world.cars[0].rewards = (r_h2, r_r2) #(optimize on, the car)\n #print(r_h)\n #print(r_r)\n #print(world.cars[1].rewards)\n #exit()\n else:\n r = 10*goal+world.simple_reward([world.cars[0].linear], speed=0.5)\n world.cars[1].reward = r\n \n #world.cars.append(static_obj.SimpleOptimizerCar(dyn, [-0.13, 0.5, math.pi/2., 0.0], color='blue')) # blue car is obstacle)\n\n\n return world\n\n\ndef playground():\n # detta ar en playground varld, den ar tom forutom en person bil\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.17)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n #world.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi/2., 0.], color='orange'))\n world.cars.append(car.UserControlledCar(dyn, [-0.17, -0.17, math.pi/2., 0.], color='white'))\n return world\n\ndef irl_ground():\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n d = shelve.open('cache', writeback=True)\n cars = [(-.13, .1, .5, 0.13),\n (.02, .4, .8, 0.5),\n (.13, .1, .6, .13),\n (-.09, .8, .5, 0.),\n (0., 1., 0.5, 0.),\n (-.13, -0.5, 0.9, 0.13),\n (.13, -.8, 1., -0.13),\n ]\n def goal(g):\n @feature.feature\n def r(t, x, u):\n return -(x[0]-g)**2\n return r\n for i, (x, y, s, gx) in enumerate(cars):\n if str(i) not in d:\n d[str(i)] = []\n world.cars.append(car.SimpleOptimizerCar(dyn, [x, y, math.pi/2., s], color='yellow'))\n world.cars[-1].cache = d[str(i)]\n def f(j):\n def sync(cache):\n d[str(j)] = cache\n d.sync()\n return sync\n world.cars[-1].sync = f(i)\n for c, (x, y, s, gx) in zip(world.cars, cars):\n c.reward = world.simple_reward(c, speed=s)+10.*goal(gx)\n world.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi/2., 0.7], color='red'))\n world.cars = world.cars[-1:]+world.cars[:-1]\n return world\n\n\n\ndef world_test():\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.SimpleOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))\n world.cars[1].reward = world.simple_reward(world.cars[1], speed=0.5)\n return world\n\ndef world0():\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))\n world.cars[1].human = world.cars[0]\n r_h = world.simple_reward([world.cars[1].traj])+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human_speed(t, x, u):\n return -world.cars[1].traj_h.x[t][3]**2\n r_r = world.simple_reward(world.cars[1], speed=0.5)\n world.cars[1].rewards = (r_h, r_r)\n return world\n\ndef world1(flag=False):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))\n world.cars[1].human = world.cars[0]\n if flag:\n world.cars[0].follow = world.cars[1].traj_h\n r_h = world.simple_reward([world.cars[1].traj], speed_import=.2 if flag else 1., speed=0.8 if flag else 1.)+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human_speed(t, x, u):\n return -world.cars[1].traj_h.x[t][3]**2\n r_r = 300.*human_speed+world.simple_reward(world.cars[1], speed=0.5)\n if flag:\n world.cars[0].follow = world.cars[1].traj_h\n world.cars[1].rewards = (r_h, r_r)\n #world.objects.append(Object('cone', [0., 1.8]))\n return world\n\ndef world2(flag=False):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2), clane.shifted(2.5), clane.shifted(-2.5)]\n world.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [0., 0.3, math.pi/2., 0.3], color='yellow'))\n world.cars[1].human = world.cars[0]\n world.cars[0].bounds = [(-3., 3.), (-1., 1.)]\n if flag:\n world.cars[0].follow = world.cars[1].traj_h\n r_h = world.simple_reward([world.cars[1].traj])+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human(t, x, u):\n return -(world.cars[1].traj_h.x[t][0])*10\n r_r = 300.*human+world.simple_reward(world.cars[1], speed=0.5)\n world.cars[1].rewards = (r_h, r_r)\n #world.objects.append(Object('firetruck', [0., 0.7]))\n return world\n\ndef world3(flag=False):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2), clane.shifted(2.5), clane.shifted(-2.5)]\n world.cars.append(car.UserControlledCar(dyn, [0., 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [0., 0.3, math.pi/2., 0.3], color='yellow'))\n world.cars[1].human = world.cars[0]\n world.cars[0].bounds = [(-3., 3.), (-1., 1.)]\n if flag:\n world.cars[0].follow = world.cars[1].traj_h\n r_h = world.simple_reward([world.cars[1].traj])+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human(t, x, u):\n return (world.cars[1].traj_h.x[t][0])*10\n r_r = 300.*human+world.simple_reward(world.cars[1], speed=0.5)\n world.cars[1].rewards = (r_h, r_r)\n #world.objects.append(Object('firetruck', [0., 0.7]))\n return world\n\ndef world4(flag=False):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n vlane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n hlane = lane.StraightLane([-1., 0.], [1., 0.], 0.13)\n world.lanes += [vlane, hlane]\n world.fences += [hlane.shifted(-1), hlane.shifted(1)]\n world.cars.append(car.UserControlledCar(dyn, [0., -.3, math.pi/2., 0.0], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [-0.3, 0., 0., 0.], color='yellow'))\n world.cars[1].human = world.cars[0]\n world.cars[0].bounds = [(-3., 3.), (-2., 2.)]\n if flag:\n world.cars[0].follow = world.cars[1].traj_h\n world.cars[1].bounds = [(-3., 3.), (-2., 2.)]\n @feature.feature\n def horizontal(t, x, u):\n return -x[2]**2\n r_h = world.simple_reward([world.cars[1].traj], lanes=[vlane], fences=[vlane.shifted(-1), vlane.shifted(1)]*2)+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human(t, x, u):\n return -tt.exp(-10*(world.cars[1].traj_h.x[t][1]-0.13)/0.1)\n r_r = human*10.+horizontal*30.+world.simple_reward(world.cars[1], lanes=[hlane]*3, fences=[hlane.shifted(-1), hlane.shifted(1)]*3+[hlane.shifted(-1.5), hlane.shifted(1.5)]*2, speed=0.9)\n world.cars[1].rewards = (r_h, r_r)\n return world\n\ndef world5():\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n vlane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n hlane = lane.StraightLane([-1., 0.], [1., 0.], 0.13)\n world.lanes += [vlane, hlane]\n world.fences += [hlane.shifted(-1), hlane.shifted(1)]\n world.cars.append(car.UserControlledCar(dyn, [0., -.3, math.pi/2., 0.0], color='red'))\n world.cars.append(car.NestedOptimizerCar(dyn, [-0.3, 0., 0., 0.0], color='yellow'))\n world.cars[1].human = world.cars[0]\n world.cars[1].bounds = [(-3., 3.), (-2., 2.)]\n @feature.feature\n def horizontal(t, x, u):\n return -x[2]**2\n r_h = world.simple_reward([world.cars[1].traj], lanes=[vlane], fences=[vlane.shifted(-1), vlane.shifted(1)]*2)+100.*feature.bounded_control(world.cars[0].bounds)\n @feature.feature\n def human(t, x, u):\n return -tt.exp(10*(world.cars[1].traj_h.x[t][1]-0.13)/0.1)\n r_r = human*10.+horizontal*2.+world.simple_reward(world.cars[1], lanes=[hlane]*3, fences=[hlane.shifted(-1), hlane.shifted(1)]*3+[hlane.shifted(-1.5), hlane.shifted(1.5)]*2, speed=0.9)\n world.cars[1].rewards = (r_h, r_r)\n return world\n\ndef world6(know_model=True):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2), clane.shifted(2.5), clane.shifted(-2.5)]\n world.cars.append(car.SimpleOptimizerCar(dyn, [-0.13, 0., math.pi/2., 0.5], color='red'))\n if know_model:\n world.cars.append(car.NestedOptimizerCar(dyn, [0., 0.05, math.pi/2., 0.5], color='yellow'))\n else:\n world.cars.append(car.SimpleOptimizerCar(dyn, [0., 0.05, math.pi/2., 0.5], color='yellow'))\n world.cars[0].reward = world.simple_reward(world.cars[0], speed=0.6)\n world.cars[0].default_u = np.asarray([0., 1.])\n @feature.feature\n def goal(t, x, u):\n return -(10.*(x[0]+0.13)**2+0.5*(x[1]-2.)**2)\n if know_model:\n world.cars[1].human = world.cars[0]\n r_h = world.simple_reward([world.cars[1].traj], speed=0.6)+100.*feature.bounded_control(world.cars[0].bounds)\n r_r = 10*goal+world.simple_reward([world.cars[1].traj_h], speed=0.5)\n world.cars[1].rewards = (r_h, r_r)\n else:\n r = 10*goal+world.simple_reward([world.cars[0].linear], speed=0.5)\n world.cars[1].reward = r\n return world\n\ndef world_features(num=0):\n dyn = dynamics.CarDynamics(0.1)\n world = World()\n clane = lane.StraightLane([0., -1.], [0., 1.], 0.13)\n world.lanes += [clane, clane.shifted(1), clane.shifted(-1)]\n world.roads += [clane]\n world.fences += [clane.shifted(2), clane.shifted(-2)]\n world.cars.append(car.UserControlledCar(dyn, [-0.13, 0., math.pi/2., 0.3], color='red'))\n world.cars.append(car.Car(dyn, [0., 0.1, math.pi/2.+math.pi/5, 0.], color='yellow'))\n world.cars.append(car.Car(dyn, [-0.13, 0.2, math.pi/2.-math.pi/5, 0.], color='yellow'))\n world.cars.append(car.Car(dyn, [0.13, -0.2, math.pi/2., 0.], color='yellow'))\n #world.cars.append(car.NestedOptimizerCar(dyn, [0.0, 0.5, math.pi/2., 0.3], color='yellow'))\n return world\n\nif __name__ == '__main__':\n world = playground()\n #world.cars = world.cars[:0]\n vis = visualize.Visualizer(0.1, magnify=1.2)\n vis.main_car = None\n vis.use_world(world)\n vis.paused = True\n @feature.feature\n def zero(t, x, u):\n return 0.\n r = zero\n #for lane in world.lanes:\n # r = r+lane.gaussian()\n #for fence in world.fences:\n # r = r-3.*fence.gaussian()\n r = r - world.cars[0].linear.gaussian()\n #vis.visible_cars = [world.cars[0]]\n vis.set_heat(r)\n vis.run()\n"
},
{
"alpha_fraction": 0.5530685782432556,
"alphanum_fraction": 0.5673285126686096,
"avg_line_length": 45.16666793823242,
"blob_id": "a627af1ec9cfae4a68797f51adfe84028b5f4235",
"content_id": "112e122425927fabf49b21ecd3d32a1a2d91ae78",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5540,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 120,
"path": "/src/trajectory.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import theano as th\nimport theano.tensor as tt\nimport utils\nimport numpy as np\nimport feature\nimport lane\n\nclass Trajectory(object):\n def __init__(self, T, dyn):\n self.dyn = dyn # dynamiken for systemet\n self.T = T # hur manga steg fram den ska kolla \n self.x0 = utils.vector(dyn.nx) # state vektorn\n self.u = [utils.vector(dyn.nu) for t in range(self.T)] # matris for de nastkommande T stegen\n self.x = [] # vektor for alla states\n z = self.x0 \n for t in range(T): # hitta dynamiken for alla tidsteg den ska plannera for\n z = dyn(z, self.u[t])\n self.x.append(z)\n self.next_x = th.function([], self.x[0]) # konverterar grafen av states till en callable object\n def tick(self):\n self.x0.set_value(self.next_x()) # lagg nasta state som x_0 state\n for t in range(self.T-1):\n self.u[t].set_value(self.u[t+1].get_value()) # lagg nasta kontroll variabel som nuvarande kontroll variabel\n self.u[self.T-1].set_value(np.zeros(self.dyn.nu)) # gor en ny tid T som ar langst bak till nollor\n def gaussian(self, height=.07, width=.03): # gor gausisk fordelning\n @feature.feature # gor en alias for att implementera f\n def f(t, x, u):\n d = (self.x[t][0]-x[0], self.x[t][1]-x[1])\n theta = self.x[t][2]\n dh = tt.cos(theta)*d[0]+tt.sin(theta)*d[1]\n dw = -tt.sin(theta)*d[0]+tt.cos(theta)*d[1]\n return tt.exp(-0.5*(dh*dh/(height*height)+dw*dw/(width*width)))\n return f\n def reward(self, reward):\n # gor en lista\n # listan bestar av summan av alla rewards\n # kallas pa rekursivt\n # raknar ut varje sma reward som finns\n # summerar ihop alla sma reward till stora REWARD\n r = [reward(t, self.x[t], self.u[t]) for t in range(self.T)]\n return sum(r)\n \"\"\"\n g = [utils.grad(r[t], self.x[t]) for t in range(self.T)]\n for t in reversed(range(self.T-1)):\n g[t] = g[t]+tt.dot(g[t+1], utils.jacobian(self.x[t+1], self.x[t]))\n for t in range(self.T):\n g[t] = tt.dot(g[t], utils.jacobian(self.x[t], self.u[t]))+utils.grad(r[t], self.u[t], constants=[self.x[t]])\n return sum(r), {self.u[t]: g[t] for t in range(self.T)}\n \"\"\"\n\nclass Trajectory2(object):\n def __init__(self, Tu, dyn, step_per_u=2):\n self.dyn = dyn # dynamiken for systemet\n self.Tu = Tu\n self.step_per_u = step_per_u\n self.Tx = step_per_u*Tu\n #self.Tx = Tx # hur manga steg fram den ska kolla \n self.x0 = utils.vector(dyn.nx) # state vektorn\n self.u = [utils.vector(dyn.nu) for t in range(self.Tu)] # matris for de nastkommande T stegen\n self.x = [] # vektor for alla states\n z = self.x0 \n for idx in range(Tu): # hitta dynamiken for alla tidsteg den ska plannera for\n for idx_u in range(step_per_u):\n z = dyn(z, self.u[idx])\n self.x.append(z)\n self.next_x = th.function([], self.x[0]) # konverterar grafen av states till en callable object\n def tick(self):\n self.x0.set_value(self.next_x()) # lagg nasta state som x_0 state\n #Below is probab not needed\n for t in range(self.Tu-1):\n self.u[t].set_value(self.u[t+1].get_value()) # lagg nasta kontroll variabel som nuvarande kontroll variabel\n self.u[self.Tu-1].set_value(np.zeros(self.dyn.nu)) # gor en ny tid T som ar langst bak till nollor\n def gaussian(self, height=.07, width=.03): # gor gausisk fordelning\n @feature.feature # gor en alias for att implementera f\n def f(t, x, u):\n d = (self.x[t][0]-x[0], self.x[t][1]-x[1])\n theta = self.x[t][2]\n dh = tt.cos(theta)*d[0]+tt.sin(theta)*d[1]\n dw = -tt.sin(theta)*d[0]+tt.cos(theta)*d[1]\n return tt.exp(-0.5*(dh*dh/(height*height)+dw*dw/(width*width)))\n return f\n def reward(self, reward):\n # gor en lista\n # listan bestar av summan av alla rewards\n # kallas pa rekursivt\n # raknar ut varje sma reward som finns\n # summerar ihop alla sma reward till stora REWARD\n r_list = []\n for idx_u in range(self.Tu):\n for idx in range(self.step_per_u):\n t = idx_u*self.step_per_u+idx\n r_list.append(reward(t, self.x[t], self.u[idx_u]))\n #r = [reward(t, self.x[t], self.u[t]) for t in range(self.T)]\n return sum(r_list)\n \"\"\"\n g = [utils.grad(r[t], self.x[t]) for t in range(self.T)]\n for t in reversed(range(self.T-1)):\n g[t] = g[t]+tt.dot(g[t+1], utils.jacobian(self.x[t+1], self.x[t]))\n for t in range(self.T):\n g[t] = tt.dot(g[t], utils.jacobian(self.x[t], self.u[t]))+utils.grad(r[t], self.u[t], constants=[self.x[t]])\n return sum(r), {self.u[t]: g[t] for t in range(self.T)}\n \"\"\"\n\nif __name__ == '__main__':\n from dynamics import CarDynamics\n import math\n dyn = CarDynamics(0.1)\n traj = Trajectory(5, dyn)\n l = lane.StraightLane([0., -1.], [0., 1.], .1)\n reward = feature.speed()+l.feature()#+feature.speed()\n r = traj.reward(reward)\n #traj.x0.value = np.asarray([0., 0., math.pi/2, 1.])\n traj.x0.set_value([0.1, 0., math.pi/2, 1.])\n optimizer = utils.Maximizer(r, traj.u)\n import time\n t = time.time()\n for i in range(1):\n optimizer.maximize(bounds=[(-1., 1.), (-2, 2.)])\n print (time.time()-t)/1.\n print [u.get_value() for u in traj.u]\n"
},
{
"alpha_fraction": 0.597726047039032,
"alphanum_fraction": 0.6036816239356995,
"avg_line_length": 35.939998626708984,
"blob_id": "b003b3e41691c93f90338887d2494987bfa82902",
"content_id": "58c81d6934f43b7c46ab26bf727e4721d17f565f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1847,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 50,
"path": "/src/feature.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import theano as th\nimport theano.tensor as tt\n\nclass Feature(object): #ASK Elis\n def __init__(self, f):\n self.f = f\n def __call__(self, *args): # *args kan vara av vilken storlek som helst\n return self.f(*args) # gor att self.f blir lika med alla argument som har blivit inskickad\n def __add__(self, r):\n return Feature(lambda *args: self(*args)+r(*args)) # addera ihop self arhumenten och other argumenten\n def __radd__(self, r):\n return Feature(lambda *args: r(*args)+self(*args)) # samma sak som __add__ fast a andra hallet\n def __mul__(self, r):\n return Feature(lambda *args: self(*args)*r) # multiplicerar self arg med en siffra\n def __rmul__(self, r):\n return Feature(lambda *args: r*self(*args)) # samma sak som __mul__ men a andra hallet\n def __pos__(self, r): \n return self # returnerar minnes platsen av instansen\n def __neg__(self):\n return Feature(lambda *args: -self(*args)) # byter tecken for argumenten\n def __sub__(self, r):\n return Feature(lambda *args: self(*args)-r(*args)) # self-r\n def __rsub__(self, r):\n return Feature(lambda *args: r(*args)-self(*args)) # r-self\n\ndef feature(f):\n return Feature(f) # returnerar en instans av Feature\n\ndef speed(s=1.):\n @feature\n def f(t, x, u): # bygger up argumenten for hastighet\n return -(x[3]-s)*(x[3]-s)\n return f\n\ndef control():\n @feature\n def f(t, x, u): # bygger upp argumenten for kontrol av bilen\n return -u[0]**2-u[1]**2 \n return f\n\ndef bounded_control(bounds, width=0.05):\n @feature\n def f(t, x, u): # bygger upp argumenten for \"kollisionen\" for alla objekt\n ret = 0.\n for i, (a, b) in enumerate(bounds):\n return -tt.exp((u[i]-b)/width)-tt.exp((a-u[i])/width)\n return f\n\nif __name__ == '__main__':\n pass\n"
},
{
"alpha_fraction": 0.4069293439388275,
"alphanum_fraction": 0.46603259444236755,
"avg_line_length": 32.45454406738281,
"blob_id": "15337460a8d5cec310bbb273239175a00544e523",
"content_id": "c63c0711ab3a3f9494139f3ab8cd50746fc410e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1472,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 44,
"path": "/src/dynamics.py",
"repo_name": "FrozenInc/bachelor_thesis_kth_2019",
"src_encoding": "UTF-8",
"text": "import theano as th\nimport theano.tensor as tt\n\n# Det har ar for bilarnas states\nclass Dynamics(object):\n def __init__(self, nx, nu, f, dt=None):\n self.nx = nx # state\n self.nu = nu # kontroll\n self.dt = dt # tid\n if dt is None:\n self.f = f\n else:\n self.f = lambda x, u: x+dt*f(x, u)\n def __call__(self, x, u):\n return self.f(x, u)\n\nclass CarDynamics(Dynamics):\n def __init__(self, dt=0.1, ub=[(-3., 3.), (-1., 1.)], friction=1.):\n def f(x, u):\n return tt.stacklists([\n x[3]*tt.cos(x[2]),\n x[3]*tt.sin(x[2]),\n x[3]*u[0],\n u[1]-x[3]*friction\n ])\n Dynamics.__init__(self, 4, 2, f, dt)\n\nclass CarDynamics2(Dynamics):\n def __init__(self, dt=0.5, ub=[(-0.104, 0.104), (-2*0.0878, 0.0878)], friction=0.007943232248521):\n def f(x,u):\n return tt.stacklists([\n ((u[1]-friction*x[3]**2)*dt**2/2+x[3]*dt)*tt.cos(x[2])+x[0],\n ((u[1]-friction*x[3]**2)*dt**2/2+x[3]*dt)*tt.sin(x[2])+x[1],\n ((u[1]-friction*x[3]**2)*dt**2/2+x[3]*dt)*u[0]+x[2],\n (u[1]-friction*x[3]**2)*dt+x[3]\n ])\n Dynamics.__init__(self, 4, 2, f, dt=None)\n self.dt = dt # haxy solution for setting dt back to value after specifying f.\n\nif __name__ == '__main__':\n dyn = CarDynamics(0.1)\n x = tt.vector()\n u = tt.vector()\n dyn(x, u)\n"
}
] | 8 |
AsbelNgetich/python_ | https://github.com/AsbelNgetich/python_ | 8a1c9d00ca8963fb4c3f445030291f500de1eb78 | 2200a515c4fc34a92652ba94e31923ab46819c57 | 38fcbac6f8f59454df8373d64b8541f9c451bd6a | refs/heads/master | 2023-08-27T03:32:58.326507 | 2021-10-12T12:19:30 | 2021-10-12T12:19:30 | 364,983,687 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.4857482314109802,
"alphanum_fraction": 0.5344418287277222,
"avg_line_length": 21.105262756347656,
"blob_id": "1404ac7fe4647b197c4cf44bccd632b752352772",
"content_id": "207d721a27566d12b7ddca096c1a950fcc450068",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 842,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 38,
"path": "/fundamentals/functions/functions_Intermediate/update_values.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "#...........................Update values.....................................\n\n# 1. Change the value 10 in x to 15. Once you're done, x should now\n# be [ [5,2,3], [15,8,9] ].\n\nx = [[5,2,3],[10,8,9]] \n\nx[1][0] = 15\n\nprint(x)\n\n# 2. Change the last_name of the first student from 'Jordan' to 'Bryant'\n\nstudents = [\n {'first_name': 'Michael', 'last_name' : 'Jordan'},\n {'first_name' : 'John', 'last_name' : 'Rosales'}\n]\n\nstudents[0][\"last_name\"] = \"Bryant\"\n\nprint(students)\n\n# 3 In the sports_directory, change 'Messi' to 'Andres'\n\nsports_directory = {\n 'basketball' : ['Kobe', 'Jordan', 'James', 'Curry'], \n 'soccer' : ['Messi', 'Ronaldo', 'Rooney']\n}\n\nsports_directory[\"soccer\"][0] = 'Andres'\nprint(sports_directory[\"soccer\"][0])\n\n# 4 Change the value 20 in z to 30\n\nz = [ {'x': 10, 'y': 20} ]\n\nz[0][\"y\"]=30\nprint(z[0][\"y\"])\n\n\n"
},
{
"alpha_fraction": 0.5766331553459167,
"alphanum_fraction": 0.5766331553459167,
"avg_line_length": 30.215686798095703,
"blob_id": "b58001132d47a323d8519a24233fcd442ed65916",
"content_id": "b971cc93009505ccdb32c8e84609182334c43fe8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1592,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 51,
"path": "/flask_mysql/crud/users_crud_modularized/flask_app/models/user.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask_app.config.mysqlconnection import connectToMySQL\n\n\n\nclass User:\n def __init__(self,data):\n self.id = data['id']\n self.first_name = data['first_name']\n self.last_name = data['last_name']\n self.email = data['email']\n self.created_at = data['created_at']\n self.updated_at = data['updated_at']\n\n @classmethod\n def get_all(cls):\n query = \"SELECT * FROM users\"\n my_conn_obj = connectToMySQL('users_schema') \n users_from_db = my_conn_obj.query_db(query)\n users = []\n for u in users_from_db:\n users.append(cls(u))\n return users\n\n @classmethod\n def save(cls,data):\n query = \"INSERT INTO users(first_name,last_name,email) VALUES (%(fn)s,%(ln)s,%(email)s);\"\n my_db = connectToMySQL(\"users_schema\")\n userid = my_db.query_db(query,data)\n return userid\n\n @classmethod\n def update_user(cls,data):\n \n query = \"UPDATE users SET first_name = %(fn)s ,last_name = %(ln)s ,email= %(email)s WHERE id = %(id)s;\"\n my_db = connectToMySQL(\"users_schema\")\n userid = my_db.query_db(query,data)\n return\n\n @classmethod\n def delete_user(cls,data):\n query= \" DELETE FROM users WHERE id= %(id)s;\"\n my_db = connectToMySQL(\"users_schema\")\n my_db.query_db(query,data)\n return\n \n @classmethod\n def get_user(cls,data):\n query= \"SELECT * FROM users WHERE id= %(id)s;\"\n my_db = connectToMySQL(\"users_schema\")\n user_info = my_db.query_db(query,data)\n return user_info\n"
},
{
"alpha_fraction": 0.643856942653656,
"alphanum_fraction": 0.6562985777854919,
"avg_line_length": 21.172412872314453,
"blob_id": "74a6eed8bd1bc8ecd0e59636326851474cbad2fc",
"content_id": "db0f5dfdb289d27aff38e6c90d2a42f278a88b93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 643,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 29,
"path": "/flask/great_number_game/server.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, request, redirect, session\nimport random\n\napp = Flask(__name__)\napp.secret_key = \"myrandomesecretkey\"\n\[email protected]('/')\ndef index():\n # compute_pick = random.randit(1,100)\n\n return render_template('index.html')\n\[email protected]('/compare_numbers', methods=['POST'])\ndef compare_numbers():\n\n print(\"Got number Info\")\n comp_number = random.randint(1,100)\n my_number = int(request.form['number'])\n print(my_number)\n print(comp_number)\n session['comp_number']=comp_number\n session['my_number'] = my_number\n return redirect('/')\n\n\n\n\nif __name__==\"__main__\":\n app.run(debug=True)\n"
},
{
"alpha_fraction": 0.5252100825309753,
"alphanum_fraction": 0.5336134433746338,
"avg_line_length": 18.83333396911621,
"blob_id": "811a8ff6363c5ea37b145b28f73b53dcf764c29b",
"content_id": "5d4b95677fdd54227933665e2fca91ff776ce23c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 238,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 12,
"path": "/fundamentals/oop/playground.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\nclass foo():\n def __init__(self, name):\n foo.name = name\n foo.num = boo(45)\nclass boo():\n def __init__(self, number):\n boo.number = number\n\nmy_foo = foo(\"mines\")\n\n\nprint(my_foo.name + \" \" + str(foo.num.number))"
},
{
"alpha_fraction": 0.6066176295280457,
"alphanum_fraction": 0.609375,
"avg_line_length": 32.96875,
"blob_id": "9c2f772a2c0188cc845f8721c62069524fbd0a5c",
"content_id": "dec6858db2b4deedff13bdc952d4ea680b2a1a9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1088,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 32,
"path": "/fundamentals/functions/functions_Intermediate/list_iteration.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "# Create a function iterateDictionary(some_list) that, given a list of dictionaries, \n# the function loops through each dictionary in the list and prints each key and the \n# associated value. For example, given the following list:\nstudents = [\n {'first_name': 'Michael', 'last_name' : 'Jordan'},\n {'first_name' : 'John', 'last_name' : 'Rosales'},\n {'first_name' : 'Mark', 'last_name' : 'Guillen'},\n {'first_name' : 'KB', 'last_name' : 'Tonel'}\n ]\n\n\ndef iterateDictionary(some_list):\n for k in some_list:\n print(f\"first_name - {k['first_name']}, last_name - {k['last_name']}\")\n\n\niterateDictionary(students)\n\n#3 Get Values From a List of Dictionaries\nold_students = [\n {'first_name': 'Michael', 'last_name' : 'Jordan'},\n {'first_name' : 'John', 'last_name' : 'Rosales'},\n {'first_name' : 'Mark', 'last_name' : 'Guillen'},\n {'first_name' : 'KB', 'last_name' : 'Tonel'}\n ]\n\ndef iterateDictionary2(key_name, some_list):\n for k in some_list:\n print(k[key_name])\n\n\niterateDictionary2(\"last_name\",old_students)\n\n"
},
{
"alpha_fraction": 0.6734693646430969,
"alphanum_fraction": 0.704081654548645,
"avg_line_length": 24.979591369628906,
"blob_id": "bc3c535ac34c4156589107f71b99c461ecef9921",
"content_id": "0c8e1221c0d5dfc34a68de87f2199a99c3d24298",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1274,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 49,
"path": "/fundamentals/oop/user.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\nclass User():\n def __init__(self,name):\n self.name = name\n self.account_balance = 0\n\n def make_deposit(self,amount):\n self.account_balance += amount\n \n def make_withdrawal(self,amount):\n self.account_balance -=amount\n \n def display_user_balance(self):\n return self.account_balance\n\n @classmethod\n def transfer_money(cls,transer_from, transfer_to, cash_amount):\n transer_from.make_withdrawal(cash_amount)\n transfer_to.make_deposit(cash_amount)\n\n\n# 1st user \nrunner = User(\"asbel\")\nrunner.make_deposit(50)\nrunner.make_deposit(70)\nrunner.make_deposit(100)\nrunner.make_withdrawal(40)\nprint(runner.name, runner.display_user_balance())\n\n# # 2nd user\n# jogger = User(\"kim\")\n# jogger.make_deposit(100)\n# jogger.make_deposit(240)\n# jogger.make_withdrawal(80)\n# jogger.make_withdrawal(60)\n# print(jogger.name, jogger.display_user_balance())\n\n# 3rd user\nracer = User(\"Alex\")\nracer.make_deposit(1000)\nracer.make_withdrawal(240)\nracer.make_withdrawal(100)\nracer.make_withdrawal(200)\nprint(racer.name, racer.display_user_balance())\n\n# Tranfer monery from one user to another\nUser.transfer_money(runner,racer,100)\n\nprint(runner.name, runner.display_user_balance())\nprint(racer.name, racer.display_user_balance())\n"
},
{
"alpha_fraction": 0.58908611536026,
"alphanum_fraction": 0.6009204387664795,
"avg_line_length": 26.160715103149414,
"blob_id": "76edb7b3f523ba6568fb2a73a9bf57347d273ca3",
"content_id": "947cfef61e602f436481205f23ee2118ddb7e741",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1521,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 56,
"path": "/fundamentals/oop/users_with_bank_acounts.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "class User():\n def __init__(self, name):\n self.name = name\n self.account = BankAccount(int_rate = 0.02, balance = 0)\n\n def make_deposit(self,amount):\n self.account.deposit(amount)\n return self\n \n def make_withdrawal(self,amount):\n self.account.withdraw(amount)\n return self\n \n def display_user_balance(self):\n return self.account.display_account_info()\n\nclass BankAccount():\n\n bank_accounts=[]\n\n def __init__(self, int_rate = 0.07, balance = 0):\n self.int_rate = int_rate\n self.balance = balance\n\n BankAccount.bank_accounts.append(self)\n\n def display_account_info(self):\n return \"Interest rate:\"+ str(self.int_rate) + \" balance: \" + str(self.balance) + \" Interest: \" + str(self.yieldinterest())\n \n def deposit(self,amount):\n self.balance += amount\n return self\n\n def withdraw(self,amount): \n if(self.is_insufficient(amount,self.balance)): \n print(\"You have insufficient funds. You've been fined $5.\")\n self.balance -= 5\n else: \n self.balance -= amount\n return self\n\n def yieldinterest(self):\n interest = self.int_rate * self.balance\n return interest\n\n @staticmethod\n def is_insufficient(amount,balance):\n if(balance-amount < 0):\n return True\n else:\n return False\n\nbig_bell = User(\"asbel\")\nresult = big_bell.account.deposit(1000).withdraw(200).display_account_info()\n\nprint(result)\n"
},
{
"alpha_fraction": 0.6073839068412781,
"alphanum_fraction": 0.6073839068412781,
"avg_line_length": 24.979381561279297,
"blob_id": "e779d1a4924e26e4f82075f8c4f321b9acd68e79",
"content_id": "eec63192ac8158d2ac916919c98a83b690afa99d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2519,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 97,
"path": "/flask_mysql/crud/users_schema/server.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, request, redirect\nfrom mysqlconnection import connectToMySQL # import the function that will return an instance of a connection\n\napp = Flask(__name__)\[email protected](\"/users\")\ndef index():\n mysql = connectToMySQL(\"users_schema\")\n users = mysql.query_db(\"SELECT * FROM users;\")\n return render_template(\"index.html\", all_users = users)\n\[email protected](\"/users/new\" )\ndef new_user():\n\n return render_template(\"new_user.html\")\n\[email protected](\"/users/create\", methods=[\"POST\"] )\ndef create_user():\n\n query = \"INSERT INTO users(first_name,last_name,email) VALUES (%(fn)s,%(ln)s,%(email)s);\"\n data={\n \"fn\": request.form[\"fname\"],\n \"ln\": request.form[\"lname\"],\n \"email\": request.form[\"email\"]\n }\n\n my_db = connectToMySQL(\"users_schema\")\n userid = my_db.query_db(query,data)\n\n\n return redirect(\"/users/\" + str(userid))\n\[email protected](\"/users/<int:user_id>\", methods=[\"GET\",\"POST\"] )\ndef user_info(user_id):\n \n query= \"SELECT * FROM users WHERE id= %(id)s;\"\n data={\n \"id\":user_id\n }\n\n mysql = connectToMySQL(\"users_schema\")\n user = mysql.query_db(query,data)\n \n return render_template(\"user_info.html\", my_user=user )\n\[email protected](\"/users/<int:user_id>/edit\", methods=[\"GET\",\"POST\"] )\ndef edit_user_form(user_id ):\n\n print(\"inside user edit function\")\n print(user_id)\n query= \"SELECT * FROM users WHERE id= %(id)s;\"\n data={\n \"id\":user_id\n }\n\n mysql = connectToMySQL(\"users_schema\")\n user = mysql.query_db(query,data)\n \n \n return render_template(\"edit_user.html\", my_user=user)\n\[email protected](\"/users/<int:user_id>/delete\", methods=[\"GET\",\"POST\"] )\ndef delete_user(user_id ):\n\n print(\"inside delete function\")\n print(user_id)\n\n query= \" DELETE FROM users WHERE id= %(id)s;\"\n data={\n \"id\":user_id\n }\n\n mysql = connectToMySQL(\"users_schema\")\n mysql.query_db(query,data)\n \n \n return redirect(\"/users\")\n\[email protected](\"/users/update\", methods=[\"GET\",\"POST\"] )\ndef update_user():\n#\n query = \"UPDATE users SET first_name = %(fn)s ,last_name = %(ln)s ,email= %(email)s WHERE id = %(id)s;\"\n data={\n \"fn\": request.form[\"fname\"],\n \"ln\": request.form[\"lname\"],\n \"email\": request.form[\"email\"],\n \"id\":request.form[\"id\"]\n\n }\n print(\"Inside update form\")\n print(data)\n my_db = connectToMySQL(\"users_schema\")\n my_db.query_db(query,data)\n return redirect(\"/users\")\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)"
},
{
"alpha_fraction": 0.5759999752044678,
"alphanum_fraction": 0.578000009059906,
"avg_line_length": 25.3157901763916,
"blob_id": "7f9b0d0eb1a646112eee4638b13fbda64c9886e4",
"content_id": "f5ac36556711df80ad6624188c451578d1af2444",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 500,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 19,
"path": "/fundamentals/oop/self_user.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "class User():\n def __init__(self,name):\n self.name = name\n self.account_balance = 0\n\n def __str__(self):\n return \"Name: \" + str(self.name) + \" Account balance: \" + str(self.account_balance)\n \n\n def make_deposit(self,amount):\n self.account_balance += amount\n return self\n \n def make_withdrawal(self,amount):\n self.account_balance -=amount\n return self\n \n def display_user_balance(self):\n return self.account_balance\n"
},
{
"alpha_fraction": 0.5965434908866882,
"alphanum_fraction": 0.6209773421287537,
"avg_line_length": 30.018518447875977,
"blob_id": "39c5af31258f3b371ec6f95b9aeae3234e6182e0",
"content_id": "4a62601402eb8c31f02f01c50953cc4e2754f6a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1678,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 54,
"path": "/fundamentals/oop/bank_account.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\nclass BankAccount():\n bank_accounts=[]\n\n def __init__(self,int_rate = 0.07,balance = 0):\n self.int_rate = int_rate\n self.balance = balance\n BankAccount.bank_accounts.append(self)\n def display_account_info(self):\n return \"Interest rate:\"+ str(self.int_rate) + \" balance: \" + str(self.balance) + \" Interest: \" + str(self.yieldinterest())\n \n def deposit(self,amount):\n self.balance += amount\n return self\n\n def withdraw(self,amount): \n if(self.is_insufficient(amount,self.balance)): \n print(\"You have insufficient funds. You've been fined $5.\")\n self.balance -= 5\n else: \n self.balance -= amount\n return self\n\n def yieldinterest(self):\n interest = self.int_rate * self.balance\n return interest\n\n @staticmethod\n def is_insufficient(amount,balance):\n if(balance-amount < 0):\n return True\n else:\n return False\n\n @classmethod\n def all_accounts(cls):\n for account in cls.bank_accounts:\n print(f\"{str(account)} Interest rate: {account.int_rate} balance: {account.balance}\")\n \n return cls\n\n def __str__(cls):\n return \"Interest rate: \" + str(cls.int_rate) + \" Account balance: \" + str(cls.balance)\n \n\nhobby_account = BankAccount(.08, 70)\nhobby_account.deposit(100).deposit(200).deposit(50).withdraw(20)\nprint(hobby_account.display_account_info())\n\nsavings_account = BankAccount(.05,300)\nsavings_account.deposit(500).deposit(400).withdraw(80).withdraw(60).withdraw(100).withdraw(40)\nprint(savings_account.display_account_info())\n\n\nBankAccount.all_accounts()\n\n\n"
},
{
"alpha_fraction": 0.6254355311393738,
"alphanum_fraction": 0.6254355311393738,
"avg_line_length": 25.90625,
"blob_id": "94a8911ec20a42ab075f23cfb6000e90e4bc7015",
"content_id": "017fd55ec43f5a7486098e7142f25923071e0f83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1722,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 64,
"path": "/flask_mysql/crud/users_crud_modularized/flask_app/controllers/users.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\nfrom flask_app import app\nfrom flask import render_template,redirect,request,session,flash\nfrom flask_app.config.mysqlconnection import connectToMySQL\nfrom flask_app.models.user import User\n\n# All users\[email protected](\"/users\")\ndef index():\n return render_template(\"index.html\", all_users = User.get_all())\n\n# Create user template/route\[email protected](\"/users/new\" )\ndef new_user():\n return render_template(\"new_user.html\")\n\n# Create user action route\[email protected](\"/users/create\", methods=[\"POST\"] )\ndef create_user():\n data={\n \"fn\": request.form[\"fname\"],\n \"ln\": request.form[\"lname\"],\n \"email\": request.form[\"email\"]\n }\n userid = User.save(data)\n return redirect(\"/users/\" + str(userid))\n\n# Single user information route\[email protected](\"/users/<int:user_id>\", methods=[\"GET\",\"POST\"] )\ndef user_info(user_id): \n data={\n \"id\":user_id\n } \n return render_template(\"user_info.html\", my_user= User.get_user(data) )\n\n\n#route for editing user\[email protected](\"/users/<int:user_id>/edit\", methods=[\"GET\",\"POST\"] )\ndef edit_user_form(user_id ):\n data={\n \"id\":user_id\n }\n \n return render_template(\"edit_user.html\", my_user= User.get_user(data))\n\n# delete user route\[email protected](\"/users/<int:user_id>/delete\", methods=[\"GET\",\"POST\"] )\ndef delete_user(user_id ): \n data={\n \"id\":user_id\n }\n User.delete_user(data)\n return redirect(\"/users\")\n\n# update user route\[email protected](\"/users/update\", methods=[\"GET\",\"POST\"] )\ndef update_user():\n data={\n \"fn\": request.form[\"fname\"],\n \"ln\": request.form[\"lname\"],\n \"email\": request.form[\"email\"],\n \"id\":request.form[\"id\"]\n }\n User.update_user(data)\n return redirect(\"/users\")"
},
{
"alpha_fraction": 0.5401869416236877,
"alphanum_fraction": 0.6280373930931091,
"avg_line_length": 22.2391300201416,
"blob_id": "e341bbed8e2cf4429f72a82bda7e6a24dcdd52f0",
"content_id": "9058e4dacd0dcdc774b3b1a37fe28ee6fd41c2bd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1070,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 46,
"path": "/fundamentals/fundamentals/for_loop_basic1.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\n\n# 1. Print intergers from 0 to 150\nfor i in range(0,151):\n print(i)\n\n# 2.Print all the multiples of 5 from 5 to 1,000\nfor i in range(5,1001):\n if (i%5)==0:\n print(i)\n\n# 3.Print integers 1 to 100. If divisible by 5, print \"Coding\" \n# instead. If divisible by 10, print \"Coding Dojo\".\nfor i in range(1,101):\n if (i%10)==0:\n print(\"Doding Dojo\")\n elif (i%5)==0:\n print(\"Coding\")\n else:\n print(i)\n\n# 4. Add odd integers from 0 to 500,000, and print the final sum.\nsum = 0\nfor i in range(0,500001):\n if(i%2)!=0:\n sum +=i\nprint(sum)\n\n#5 Print positive numbers starting at 2018, counting down by fours.\nfor i in range(2018,0,-4):\n print(i)\n\n#6 Set three variables: lowNum, highNum, mult. Starting at lowNum\n# and going through highNum, print only the integers that are a multiple\n# of mult. For example, if lowNum=2, highNum=9, and mult=3, the loop should\n# print 3, 6, 9 (on successive lines)\n\nlowNum=1\nhighNum=21\nmult=7\n\nfor i in range(1,22):\n if(i%mult)==0:\n print(i)\n\nx = [5,34,10,1,6]\nx += [100]\nprint(x)"
},
{
"alpha_fraction": 0.6069546937942505,
"alphanum_fraction": 0.6343519687652588,
"avg_line_length": 22.170732498168945,
"blob_id": "8902b56cd7db88a2e3d7a7e2053b9e8dfe107fb6",
"content_id": "582ef8c2c84122b65baf72789ac9408f439e8e9c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 949,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 41,
"path": "/flask/fundamentals/hello_flask/hello.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask\napp = Flask(__name__)\n\n#localhost:5000/ - have it say \"Hello World!\"\[email protected]('/')\ndef hello_word():\n return 'Hello World!'\n\n#localhost:5000/dojo - have it say \"Dojo!\"\[email protected]('/dojo')\ndef coding_dojo():\n return 'Dojo!'\n\n#localhost:5000/say/flask - have it say \"Hi Flask!\"\[email protected]('/say/<name>')\ndef say_flask(name):\n if (name.isnumeric()):\n return \"Please reenter a correct name\" \n else:\n return f'Hi {name}!'\n \n\n\n# localhost:5000/repeat/35/hello - have it say \"hello\" 35 times\[email protected]('/repeat/<number>/<greetings>')\ndef multiply_hello(number,greetings):\n if(number.isnumeric() and greetings.isnumeric() == False):\n return int(number) * (greetings + \" \")\n else:\n return \"Error occured. Please type the correct format\" \n\[email protected](404)\ndef handle_404(e):\n return 'Sorry! No response. Try again'\n\n\n\n\n\nif __name__==\"__main__\":\n app.run(debug=True)"
},
{
"alpha_fraction": 0.6528878808021545,
"alphanum_fraction": 0.6993204951286316,
"avg_line_length": 25.765151977539062,
"blob_id": "466d65808db557f0d58b0c15a8f0bf40f8505ef7",
"content_id": "0325aa6348cab29cc8bd8ed21461a74e4165513b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3532,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 132,
"path": "/fundamentals/functions/functions_basic_I.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "#1 This function returns a 5 that gets printed on the screen\ndef number_of_food_groups():\n return 5\nprint(number_of_food_groups())\n\n#2 prints an error because the 1st function is not defined \ndef number_of_military_branches():\n return 5\nprint(number_of_days_in_a_week_silicon_or_triangle_sides() + number_of_military_branches())\n\n#3 prints 5 because you can only have one return statement in a function. The value\n# of the first return statement is returned and the rest is skipped.\ndef number_of_books_on_hold():\n return 5\n return 10\nprint(number_of_books_on_hold())\n\n#4 Prints 5 because once the program executes the return statement, the rest \n# of the statements in that function are skipped.\ndef number_of_fingers():\n return 5\n print(10)\nprint(number_of_fingers())\n\n#5 Prints 5 then none because when the function gets called on line 28 it prints\n# then number 5 but it doesnt have a return statement. So nothing gets assigned to \n# the variable x since nothing is being returned\ndef number_of_great_lakes():\n print(5)\nx = number_of_great_lakes()\nprint(x)\n\n#6 Throws and error because add function isn't returning values that could be added in \n# the print statement\ndef add(b,c):\n print(b+c)\n\nprint(add(1,2) + add(2,3))\n\n#7 Prints 25 because it casts the numbers to a string and concatenates them\ndef concatenate(b,c):\n return str(b)+str(c)\nprint(concatenate(2,5))\n\n#8 Prints the value of b then returns 10 because b is greater than 10\ndef number_of_oceans_or_fingers_or_continents():\n b = 100\n print(b)\n if b < 10:\n return 5\n else:\n return 10\n return 7\nprint(number_of_oceans_or_fingers_or_continents())\n\n#9 Prints 7 then 14 then 21\n# b is less than c, so it returns 7\n# b is greater than c so it returns 21\n# The last statement adds the returned values\ndef number_of_days_in_a_week_silicon_or_triangle_sides(b,c):\n if b<c:\n return 7\n else:\n return 14\n return 3\nprint(number_of_days_in_a_week_silicon_or_triangle_sides(2,3))\nprint(number_of_days_in_a_week_silicon_or_triangle_sides(5,3))\nprint(number_of_days_in_a_week_silicon_or_triangle_sides(2,3) + number_of_days_in_a_week_silicon_or_triangle_sides(5,3))\n\n#10 Prints 8 because it returns the sum of the two values\ndef addition(b,c):\n return b+c\n return 10\nprint(addition(3,5))\n\n#11 This code would give an error because in line 80 a string is being compared to a number\n# if it were correct syntax, the print out would have been 500 500 True or false 500\n# because the variable b in the function foobar() would have been assigned a boolean value\n# its also only local in foobar\nb = 500\nprint(b)\ndef foobar():\n b = True # \"keyword operator from-rainbow\">= 300\n print(b)\nprint(b)\nfoobar()\nprint(b)\n\n#12 Prints 500 500 300 500 foobar() returns the 300 but doesnt get assigned to anything\nb = 500\nprint(b)\ndef foobar():\n b = 300\n print(b)\n return b\nprint(b)\nfoobar()\nprint(b)\n\n#13 Prints 500 500 300 300 foobar() returns the 300 but doesnt get assigned to b\nb = 500\nprint(b)\ndef foobar():\n b = 300\n print(b)\n return b\nprint(b)\nb=foobar()\nprint(b)\n\n#14 Prints 1 then calls on bar() which prints 3 then continues to print 2\ndef foo():\n print(1)\n bar()\n print(2)\ndef bar():\n print(3)\nfoo()\n\n#15 prints 1 then calls bar()-which prints 3 and returns 5. which is then printed\n# and returns 10 that's assigned to y and printed\n#1 3 5 10\ndef foo():\n print(1)\n x = bar()\n print(x)\n return 10\ndef bar():\n print(3)\n return 5\ny = foo()\nprint(y)"
},
{
"alpha_fraction": 0.5571512579917908,
"alphanum_fraction": 0.5671160817146301,
"avg_line_length": 34.9052619934082,
"blob_id": "76dfa8f9bd8c4ee9a2a6394ad6ee495638d87584",
"content_id": "7f5b63ad63900a521c3d22c8de5ee8de543d80f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3412,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 95,
"path": "/flask_mysql/validation/login_and_registration/flask_app/models/user.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask_app.config.mysqlconnection import connectToMySQL\nfrom flask import flash\nfrom flask_app import app\nfrom flask_bcrypt import Bcrypt \nimport re\n\nbcrypt = Bcrypt(app)\n\nclass User:\n def __init__(self,data):\n self.id = data['id']\n self.first_name = data['first_name']\n self.last_name = data['last_name']\n self.email = data['email']\n self.password = data['password']\n self.created_at = data['created_at']\n self.updated_at = data['updated_at']\n\n # register user class method\n @classmethod\n def register_user(cls,data):\n\n hashed_pw = bcrypt.generate_password_hash(data['password'])\n data['hashed_pw']= hashed_pw\n\n query = \"INSERT INTO users (first_name,last_name,email,password) VALUES (%(first_name)s,%(last_name)s,%(email)s,%(hashed_pw)s);\"\n my_db = connectToMySQL(\"login_and_registration\")\n userid = my_db.query_db(query,data)\n return userid\n\n # get user class method\n @classmethod\n def get_user(cls, data):\n\n query= \"SELECT * FROM users WHERE email= %(email)s;\"\n my_db = connectToMySQL(\"login_and_registration\")\n user_info = my_db.query_db(query,data)\n\n if len(user_info) <= 0:\n return None\n else:\n return User(user_info[0]) \n\n\n @staticmethod\n def validate_user(data):\n is_valid = True \n EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\\.[a-zA-Z]+$') \n ALPHABET_REGEX = re.compile(r'^[a-zA-Z]+$')\n PASSWORD_REGEX = re.compile(r'^(?=.*[\\d])(?=.*[A-Z])(?=.*[a-z])^.{8,50}$')\n \n\n\n\n if len(data['first_name']) < 2 or len(data['first_name']) > 50 :\n flash(\"First Name must be at least 2 to 50 characters.\")\n is_valid = False\n if len(data['last_name']) < 2 or len(data['last_name']) > 50:\n flash(\"Last Name must be at least 2 to 50 characters.\")\n is_valid = False\n if not PASSWORD_REGEX.match(data['password']):\n flash(\"Password formart needs to consist of an uppercase, a lowercase and atleast a number!\")\n flash(\"Password must also be at least 8 to 50 characters!\")\n is_valid = False \n\n # if len(data['password']) < 8 or len(data['password']) > 60:\n # flash(\"Password must be at least 8 to 60 characters.\")\n # is_valid = False\n if data['password'] != data['confirm_password'] :\n flash(\"Password and Confirm password fields doesn't match.\")\n is_valid = False\n if not EMAIL_REGEX.match(data['email']):\n flash(\"Invalid email address!\")\n is_valid = False\n if not ALPHABET_REGEX.match(data['first_name']):\n flash(\"First Name can only be alphabetical!\")\n is_valid = False\n if not ALPHABET_REGEX.match(data['last_name']):\n flash(\"Last Name can only be alphabetical!\")\n is_valid = False\n \n return is_valid\n\n @staticmethod\n def validate_login(data):\n is_valid = True \n EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\\.[a-zA-Z]+$') \n if not EMAIL_REGEX.match(data['email']):\n flash(\"Please enter a valid email address!\")\n is_valid = False\n if data['password'] == \"\":\n flash(\"Password fields cannot be blank! \")\n is_valid = False\n\n return is_valid\n\n"
},
{
"alpha_fraction": 0.7253731489181519,
"alphanum_fraction": 0.7253731489181519,
"avg_line_length": 50.61538314819336,
"blob_id": "0d5ae28f65e468a619365c2511a3fb15a98a6b9b",
"content_id": "aee46c8d92b819e9e3bd793481104856fff1020a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 670,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 13,
"path": "/fundamentals/fundamentals/printouts.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "capitals = {\"Washington\":\"Olympia\",\"California\":\"Sacramento\",\"Idaho\":\"Boise\",\"Illinois\":\"Springfield\",\"Texas\":\"Austin\",\"Oklahoma\":\"Oklahoma City\",\"Virginia\":\"Richmond\"}\n# another way to iterate through the keys\nfor key in capitals.keys():\n print(key)\n# output: Washington, California, Idaho, Illinois, Texas, Oklahoma, Virginia\n#to iterate through the values\nfor val in capitals.values():\n print(val)\n# output: Olympia, Sacramento, Boise, Springfield, Austin, Oklahoma City, Richmond\n#to iterate through both keys and values\nfor key, val in capitals.items():\n print(key, \" = \", val)\n# output: Washington = Olympia, California = Sacramento, Idaho = Boise, etc"
},
{
"alpha_fraction": 0.5509123206138611,
"alphanum_fraction": 0.5679811835289001,
"avg_line_length": 27.33333396911621,
"blob_id": "0a4236b99c86a219f7ca0ee5e5490afb3f237757",
"content_id": "64455847a0fa94594f3d071a6bda706242fbf8b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1699,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 60,
"path": "/fundamentals/fundamentals/rock_paper_scissors.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "import random\n\n\nplayer_total=0\ncomputer_total=0\n\nwhile True:\n player_input = int(input(\"Please pick a number. Pick #1 for Rock #2 for paper and #3. for scissors): \"))\n\n\n player_pick=\"\"\n if player_input==1:\n player_pick=\"Rock\"\n elif player_input==2:\n player_pick=\"Paper\"\n elif player_input==3:\n player_pick=\"scissors\"\n\n computer_input = random.randint(1,3)\n computer_pick=\"\"\n\n if computer_input==1:\n computer_pick=\"Rock\"\n elif computer_input==2:\n computer_pick=\"Paper\"\n elif computer_input==3:\n computer_pick=\"scissors\"\n\n print(f\"\\nYou picked {player_pick}, and the computer picked {computer_pick}.\\n\")\n\n if player_input == computer_input:\n print(f\"Both players selected {player_pick}. It's a tie\")\n elif player_input == 1:\n if computer_input == 3:\n player_total +=1\n print(\"Rock beats Scissors. You win.\")\n else:\n computer_total +=1\n print(\"Paper covers rock. You lose.\")\n elif player_input == 2:\n if computer_input == 1:\n player_total +=1\n print(\"Paper covers rock. You win.\")\n else:\n computer_total +=1\n print(\"Scissors cuts paper! You lose.\")\n elif player_input == 3:\n if computer_input == 2:\n player_total +=1\n print(\"Scissors cuts paper. You win.\")\n else:\n computer_total +=1\n print(\"Rock beats scissors. You lose.\")\n \n if player_total==3:\n print(\"You have 3 points, you won the game!\")\n break\n elif computer_total==3:\n print(\"The computer has 3 points, you lost to a Computer :(\")\n break"
},
{
"alpha_fraction": 0.5820587277412415,
"alphanum_fraction": 0.5820587277412415,
"avg_line_length": 23.612245559692383,
"blob_id": "d30b09a24e89896b2be343b28a228ac64527e2ef",
"content_id": "c9de425f31bf85b13f468216f27f6c9d2482f6c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2419,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 98,
"path": "/flask_mysql/validation/login_and_registration/flask_app/controllers/users.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask_app import app\nfrom flask import render_template,redirect,request,session,flash\nfrom flask_app.config.mysqlconnection import connectToMySQL\nfrom flask_app.models.user import User\nfrom flask_bcrypt import Bcrypt \n\n\nbcrypt = Bcrypt(app)\n\n\n# home page\[email protected](\"/\")\ndef index():\n return render_template(\"login_page.html\")\n\n\n# register user route\[email protected](\"/register\", methods = [\"POST\"])\ndef register_user():\n\n data={\n \"first_name\": request.form[\"first_name\"],\n \"last_name\": request.form[\"last_name\"],\n \"email\": request.form[\"email\"],\n \"password\": request.form[\"password\"],\n \"confirm_password\": request.form[\"confirm_password\"]\n }\n\n isvalid = User.validate_user(data)\n\n if isvalid:\n new_email = { \n \"email\": request.form[\"email\"]\n }\n user = User.get_user(new_email)\n if user == None: \n User.register_user(data)\n session['user_first_name'] = request.form[\"first_name\"]\n return redirect(\"/success\") \n else:\n flash(\"Your email has already been registered!\")\n return redirect(\"/\")\n\n return redirect(\"/\")\n\n#Login route\[email protected](\"/login\", methods = [\"POST\"] )\ndef login_user():\n\n data= {\n \"email\": request.form[\"email\"],\n \"password\": request.form[\"password\"]\n \n }\n #check for validations\n isvalid = User.validate_login(data)\n\n if isvalid:\n new_data = { \n \"email\": request.form[\"email\"]\n }\n user = User.get_user(new_data)\n if user == None:\n flash(\"Your email is invalid!\")\n return redirect('/')\n \n if not bcrypt.check_password_hash(user.password, request.form['password']):\n flash('Your Password is incorrect!')\n return redirect('/')\n \n session['user_id'] = user.id\n session['user_first_name'] = user.first_name\n session['user_email']= user.email\n return redirect(\"/success\") \n else:\n return redirect('/')\n\n \n \n\n\n#success route\[email protected](\"/success\")\ndef main_page():\n\n if not session.get(\"user_first_name\") is None:\n return render_template(\"main_page.html\")\n else:\n return redirect('/')\n\n\n#Log out route\[email protected](\"/logout\")\ndef logout_user():\n #clear sessions\n session.clear()\n # redirect back to login page\n return redirect(\"/\")\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6291161179542542,
"alphanum_fraction": 0.6308492422103882,
"avg_line_length": 32.47058868408203,
"blob_id": "6b09a391f93cb698f0c816a15a9592ba43de2859",
"content_id": "a69a53a427cc3255ecfed848a3685b80f9728ad1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 577,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 17,
"path": "/fundamentals/functions/functions_Intermediate/dictionary_iteration.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\n# 4Create a function printInfo(some_dict) that given a dictionary whose values are \n# all lists, prints the name of each key along with the size of its list, and then\n# prints the associated values within each key's list\n\ndojo = {\n 'locations': ['San Jose', 'Seattle', 'Dallas', 'Chicago', 'Tulsa', 'DC', 'Burbank'],\n 'instructors': ['Michael', 'Amy', 'Eduardo', 'Josh', 'Graham', 'Patrick', 'Minh', 'Devon']\n}\n \ndef printInfo(some_dict):\n \n for key in some_dict:\n n= len(some_dict[key])\n \n print(n, key, some_dict[key])\n\nprintInfo(dojo)\n "
},
{
"alpha_fraction": 0.6825344562530518,
"alphanum_fraction": 0.6996060609817505,
"avg_line_length": 30.73958396911621,
"blob_id": "bee47c9ffbb0c7d62b5d11a6987e3188574ac316",
"content_id": "06752b2eac59b6eb5f2e2128fe9eda9a2e43da0a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3046,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 96,
"path": "/fundamentals/fundamentals/file.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "num1 = 42 # variable declaration, initialize number\nnum2 = 2.3 # variable declaration, initialize float\nboolean = True # variable declaration, initialize boolean\nstring = 'Hello World' # variable declaration, initialize string\npizza_toppings = ['Pepperoni', 'Sausage', 'Jalepenos', 'Cheese', 'Olives'] # variable declaration, initialize list\nperson = {'name': 'John', 'location': 'Salt Lake', 'age': 37, 'is_balding': False}# variable declaration, initialize dictionary\nfruit = ('blueberry', 'strawberry', 'banana')# variable declaration, initialize Tuple\nprint(type(fruit))#type check\nprint(pizza_toppings[1]) #log statement,access value\npizza_toppings.append('Mushrooms') #add value to a list\nprint(person['name'])#log statement,access value\nperson['name'] = 'George' #change value\nperson['eye_color'] = 'blue' #change value\nprint(fruit[2])#log statement,access value\n\n\n\nif num1 > 45:# conditional if statement checks if value greater than 45\n print(\"It's greater\")# log statement\nelse: # conditional else\n print(\"It's lower\")log statement\n\nif len(string) < 5: # if statement\n print(\"It's a short word!\") #log statement\nelif len(string) > 15: #elseif statement\n print(\"It's a long word!\") #log statement\nelse: #else\n print(\"Just right!\") # log statement\n\n\nfor x in range(5): #for loop, starts from 0 to 4\n print(x) #log statement\n\n\nfor x in range(2,5): #for loop, starts from 2 to 4\n print(x) #log statement\n\n\nfor x in range(2,10,3): #for loop, starts from 2 to 9 with increments of 3\n print(x) # log statement\n\n\nx = 0 # variable declarization and initialization\nwhile(x < 5): # while loop with a condition. x less than 5\n print(x) # log statement\n x += 1 # x is incremented by 1\n\npizza_toppings.pop() #Delete value\npizza_toppings.pop(1) #Delete item at a specific index\n\nprint(person) # log statement\nperson.pop('eye_color') # Eelete specific item\nprint(person) # log statement\n\nfor topping in pizza_toppings: # for loop\n if topping == 'Pepperoni': #if statement\n continue # continue\n print('After 1st if statement') #log statement\n if topping == 'Olives': # if statement\n break #breaks out of the loop if condition is met\n\ndef print_hello_ten_times(): # function definition\n for num in range(10): # for loop\n print('Hello') #log statement\n\nprint_hello_ten_times() # functin call\n\ndef print_hello_x_times(x): # function definition with a parameter\n for num in range(x): # for loop\n print('Hello') # log statement\n\nprint_hello_x_times(4) # function call with arguments\n\n\n\"\"\" function definition with, variable declaration\nand initialization as a parameter \"\"\"\ndef print_hello_x_or_ten_times(x = 10): \n for num in range(x): #for loop\n print('Hello') #for loop\n\nprint_hello_x_or_ten_times() #function call\nprint_hello_x_or_ten_times(4) # fuction call with arguments\n\n\n\"\"\"\nBonus section\n\"\"\"\n\n# print(num3)\n# num3 = 72\n# fruit[0] = 'cranberry'\n# print(person['favorite_team'])\n# print(pizza_toppings[7])\n# print(boolean)\n# fruit.append('raspberry')\n# fruit.pop(1)"
},
{
"alpha_fraction": 0.6192373633384705,
"alphanum_fraction": 0.6243597269058228,
"avg_line_length": 26.841270446777344,
"blob_id": "34640dfaebbb069b0e7b72add77e3cf480f0fddd",
"content_id": "3e6ad7b6a9809612d0474fdbf1fa9e2892eecc26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1757,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 63,
"path": "/fundamentals/functions/functions_Intermediate/students_and_grades.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\n\n#Ask how many students\n#Make a loop that loops the amount of entries\n#Enter student name, grade and select a course\n#ask to view report when complete.\n#If they want a report, print the list.\n\nnum_of_students = int(input(\"How many students do you have? \"))\n\ndef iscorrect_name(s_name):\n if s_name.isalpha():\n return True\n else:\n return False\n\ndef iscorrect_grade(s_grade):\n if s_grade.isnumeric():\n return True\n else:\n return False\n\ndef iscorrect_course(s_course):\n if (s_course >= 1) and (s_course <= 3):\n return True\n else:\n return False\n\n\ndef create_student():\n \n student_name = input(\"Student's Name: \") \n while(iscorrect_name(student_name)== False):\n student_name = input(\"You made an Error. Please re-enter Student's Name: \") \n\n student_grade = input(\"student's Grade: \")\n while(iscorrect_grade(student_grade)== False):\n print(student_grade)\n student_grade = input(\"You made an Error. Please re-enter Student's Grade: \") \n\n\n course_picked = int(input(\"Select a course: 1 - Math, 2 - Science, 3 - History: \"))\n while(iscorrect_course(course_picked) == False):\n course_picked = int(input(\"You made an Error. Please re-select course: \"))\n\n if course_picked == 1:\n course = \"Math\"\n elif course_picked == 2:\n course = \"Science\"\n elif course_picked == 3:\n course = \"History\"\n\n student={}\n student[\"name\"] = student_name\n student[\"grade\"] = student_grade\n student[\"course\"] = course \n return student\n\nwhole_class=[]\nfor i in range(0,num_of_students):\n x = create_student()\n whole_class.append(x)\n\nfor s in whole_class:\n print(f\"Name: {s['name']}, Grade: {s['grade']}, Course: {s['course']}\")\n\n"
},
{
"alpha_fraction": 0.5637891292572021,
"alphanum_fraction": 0.596256673336029,
"avg_line_length": 36.29999923706055,
"blob_id": "a7b83118aab420d189253d2b86c324e24603b88e",
"content_id": "efe841b265af93bb52ee6dabfe5bd131bd5d4d70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2618,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 70,
"path": "/fundamentals/functions/functions_basic_II.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "\n# 1 ...............................................................\n\n# Create a function that accepts a number as an input. Return a new list \n# that counts down by one, from the number (as the 0th element) down to 0 \n# (as the last element).Example: countdown(5) should return [5,4,3,2,1,0]\n\ndef countdown(list_size):\n new_list=[]\n for i in range(list_size,-1,-1):\n new_list.append(i)\n return new_list\nprint(countdown(7))\n\n# 2..............................................................................\n\n# Create a function that will receive a list with two numbers. Print the first value \n#and return the second.\n# Example: print_and_return([1,2]) should print 1 and return 2\n\ndef print_and_return(numbers):\n print(numbers[0])\n return numbers[1]\nprint(print_and_return([8,15]))\n\n# 3................................................................................\n\n#Create a function that accepts a list and returns the sum of the first value in the \n#list plus the list's length.Example: first_plus_length([1,2,3,4,5]) should return 6 \n#(first value: 1 + length: 5)\n\ndef my_add_function(nums):\n return len(nums) + nums[0]\nprint(my_add_function([2,6,8,4,9,8,4]))\n\n# 4..................................................................................\n\n#Values Greater than Second - Write a function that accepts a list and creates a new \n#list containing only the values from the original list that are greater than its 2nd \n#value. Print how many values this is and then return the new list. If the list has less \n#than 2 elements, have the function return False\n# Example: values_greater_than_second([5,2,3,2,1,4]) should print 3 and return [5,3,4]\n# Example: values_greater_than_second([3]) should return False\n\ndef greater_than(sample):\n if len(sample)<2:\n return False\n else:\n brand_new_list=[]\n total=0\n for i in range(0,len(sample)):\n if sample[i]>sample[1]:\n total +=1\n brand_new_list.append(sample[i])\n print(total)\n return brand_new_list\n\nprint(greater_than([10,5,8,20,2,7,9]))\n\n# Write a function that accepts two integers as parameters: size and value. \n# The function should create and return a list whose length is equal to the \n# given size, and whose values are all the given value Example: length_and_value(4,7) \n# should return [7,7,7,7] \n# Example: length_and_value(6,2) should return [2,2,2,2,2,2]\n\ndef length_and_value(size,value):\n new_creation=[]\n for i in range(0,size):\n new_creation.append(value)\n return new_creation\nprint(length_and_value(5,4))\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.612500011920929,
"alphanum_fraction": 0.6156250238418579,
"avg_line_length": 16.66666603088379,
"blob_id": "941dfce1d10a249e09c50f134ea1f07cdfa918f6",
"content_id": "82bb3c02a5d85d6bb339fbe255ec08ae12392ad5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 320,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 18,
"path": "/flask/fundamentals/checker_board/server.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template\nimport math\n\napp = Flask(__name__)\n\[email protected]('/')\ndef index():\n return render_template(\"index.html\")\n\[email protected]('/<int:x>')\ndef flexible_row(x):\n x = int(x/2)\n \n return render_template(\"flexible_rows.html\",x=x)\n\n\nif __name__==\"__main__\":\n app.run(debug=True)\n\n\n"
},
{
"alpha_fraction": 0.6140888333320618,
"alphanum_fraction": 0.6186829805374146,
"avg_line_length": 17.685714721679688,
"blob_id": "36f08a86087c3e3ed7a590c1ab44b4a4d8236215",
"content_id": "ba402a0513e4abab91346cb5e8ab63b11bd4d6ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 653,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 35,
"path": "/flask/counter/server.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask, request, redirect, render_template, session\n\napp = Flask(__name__)\napp.secret_key = \"myverysecretkey\" \n\[email protected]('/')\ndef index():\n\n if 'counter' not in session:\n session['counter'] = 0\n else:\n session['counter'] +=1\n return render_template('index.html')\n\[email protected]('/destroy_session')\ndef destroy_session():\n\n session.clear()\n\n return redirect('/')\n\[email protected]('/reset_counter')\ndef reset_counter():\n session.clear()\n return redirect('/')\n\[email protected]('/add_visits')\ndef add_visits():\n session['counter'] +=2\n return redirect('/')\n\n\n\nif __name__ == '__main__':\n app.run(debug=True)"
},
{
"alpha_fraction": 0.6340000033378601,
"alphanum_fraction": 0.6340000033378601,
"avg_line_length": 16.89285659790039,
"blob_id": "2873a23edfe3f5c065b5df62d9b1eef69ff68f83",
"content_id": "f16b23b55b703e106a8b827d20e757a7ea40e7df",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 500,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 28,
"path": "/flask/fundamentals/playground/server.py",
"repo_name": "AsbelNgetich/python_",
"src_encoding": "UTF-8",
"text": "from flask import Flask , render_template\n\napp = Flask(__name__)\n\[email protected]('/')\ndef index():\n return render_template(\"index.html\")\n\[email protected]('/play')\ndef play():\n\n return render_template(\"play.html\")\n\[email protected]('/play/<int:x>')\ndef play_times(x):\n\n return render_template(\"play_times.html\", x = x)\n\n\n\[email protected]('/play/<int:x>/<color>')\ndef colored_boxes(x,color):\n\n return render_template(\"colored_boxes.html\", x = x, color=color)\n\n\nif __name__==\"__main__\":\n app.run(debug= True)"
}
] | 25 |
samuelcolvin/sasstastic | https://github.com/samuelcolvin/sasstastic | 65100779e5978642466123a80e44e2a863dfb774 | fac226d6dafab978de509986787e5b3472f3587a | dfec04d9589462131c8ef3343399c3a2ab762ac1 | refs/heads/master | 2023-08-31T08:02:01.293730 | 2020-05-27T11:12:12 | 2020-05-27T11:12:12 | 265,602,562 | 27 | 2 | MIT | 2020-05-20T15:08:40 | 2021-04-19T04:29:06 | 2021-05-13T20:53:57 | Python | [
{
"alpha_fraction": 0.5825655460357666,
"alphanum_fraction": 0.5839830040931702,
"avg_line_length": 31.813953399658203,
"blob_id": "66dfbe8cdceb50a92c3c04bf14311deaf44ebaa2",
"content_id": "8eaf2af7a87c75169ba850ff8825832fd3b1262c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1411,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 43,
"path": "/sasstastic/logs.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import logging\nimport logging.config\n\nimport click\n\n\nclass ClickHandler(logging.Handler):\n formats = {\n logging.DEBUG: {'fg': 'white', 'dim': True},\n logging.INFO: {'fg': 'green'},\n logging.WARN: {'fg': 'yellow'},\n }\n\n def emit(self, record):\n log_entry = self.format(record)\n if record.levelno == logging.INFO and log_entry.startswith('>>'):\n click.secho(log_entry[2:], fg='cyan')\n else:\n fmt = self.formats.get(record.levelno, {'fg': 'red'})\n click.secho(log_entry, **fmt)\n\n\ndef log_config(log_level: str) -> dict:\n \"\"\"\n Setup default config. for dictConfig.\n :param log_level: str name or django debugging int\n :return: dict suitable for ``logging.config.dictConfig``\n \"\"\"\n assert log_level in {'DEBUG', 'INFO', 'WARNING', 'ERROR'}, f'wrong log level {log_level}'\n return {\n 'version': 1,\n 'disable_existing_loggers': True,\n 'formatters': {'default': {'format': '%(message)s'}, 'indent': {'format': ' %(message)s'}},\n 'handlers': {\n 'sasstastic': {'level': log_level, 'class': 'sasstastic.logs.ClickHandler', 'formatter': 'default'},\n },\n 'loggers': {'sasstastic': {'handlers': ['sasstastic'], 'level': log_level, 'propagate': False}},\n }\n\n\ndef setup_logging(log_level):\n config = log_config(log_level)\n logging.config.dictConfig(config)\n"
},
{
"alpha_fraction": 0.6252729296684265,
"alphanum_fraction": 0.6258187890052795,
"avg_line_length": 32.009010314941406,
"blob_id": "8222c81056b7bfe20bd7aa2d32639453a500a94d",
"content_id": "f83dc5e2ace8d7be04dc06ffb58a6cfdeff9cd57",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3664,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 111,
"path": "/sasstastic/config.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import logging\nimport re\nfrom pathlib import Path\nfrom typing import Any, Dict, List, Optional, Pattern\n\nimport yaml\nfrom pydantic import BaseModel, HttpUrl, ValidationError, validator\nfrom pydantic.error_wrappers import display_errors\n\nfrom .common import SasstasticError, is_file_path\n\ntry:\n from yaml import CLoader as Loader\nexcept ImportError:\n from yaml import Loader\n\n__all__ = 'SourceModel', 'DownloadModel', 'ConfigModel', 'load_config'\nlogger = logging.getLogger('sasstastic.config')\n\n\nclass SourceModel(BaseModel):\n url: HttpUrl\n extract: Optional[Dict[Pattern, Optional[Path]]] = None\n to: Optional[Path] = None\n\n @validator('url', pre=True)\n def remove_spaces_from_url(cls, v):\n return v and v.replace(' ', '')\n\n @validator('extract', each_item=True)\n def check_extract_path(cls, v):\n if v is not None and v.is_absolute():\n raise ValueError('extract path may not be absolute, remove the leading slash')\n return v\n\n @validator('to', always=True)\n def check_to(cls, v, values):\n if values.get('extract'):\n # extracting, to can be None\n return v\n elif is_file_path(v):\n # to is already a valid path\n return v\n elif v is not None and v.is_absolute():\n raise ValueError('path may not be absolute, remove the leading slash')\n\n try:\n url: HttpUrl = values['url']\n except KeyError:\n return v\n else:\n filename = (url.path or '/').rsplit('/', 1)[1]\n if not filename.endswith(('.css', '.sass', '.scss')):\n raise ValueError(f'no filename found in url \"{url}\" and file path not given via \"to\"')\n return (v or Path('.')) / filename\n\n\nclass DownloadModel(BaseModel):\n dir: Path\n sources: List[SourceModel]\n\n\nclass ConfigModel(BaseModel):\n download: Optional[DownloadModel] = None\n build_dir: Path\n output_dir: Path\n lock_file: Path = Path('.sasstastic.lock')\n include_files: Pattern = re.compile(r'^[^_].+\\.(?:css|sass|scss)$')\n exclude_files: Optional[Pattern] = None\n replace: Optional[Dict[Pattern, Dict[Pattern, str]]] = None\n file_hashes: bool = False\n dev_mode: bool = True\n config_file: Path\n\n @classmethod\n def parse_obj(cls, config_file: Path, obj: Dict[str, Any]) -> 'ConfigModel':\n if isinstance(obj, dict):\n obj['config_file'] = config_file\n m: ConfigModel = super().parse_obj(obj)\n\n config_directory = config_file.parent\n if not m.download.dir.is_absolute():\n m.download.dir = config_directory / m.download.dir\n\n if not m.build_dir.is_absolute():\n m.build_dir = config_directory / m.build_dir\n\n if not m.output_dir.is_absolute():\n m.output_dir = config_directory / m.output_dir\n\n if not m.lock_file.is_absolute():\n m.lock_file = config_directory / m.lock_file\n return m\n\n\ndef load_config(config_file: Path) -> ConfigModel:\n if not config_file.is_file():\n logger.error('%s does not exist', config_file)\n raise SasstasticError('config files does not exist')\n try:\n with config_file.open('r') as f:\n data = yaml.load(f, Loader=Loader)\n except yaml.YAMLError as e:\n logger.error('invalid YAML file %s:\\n%s', config_file, e)\n raise SasstasticError('invalid YAML file')\n\n try:\n return ConfigModel.parse_obj(config_file, data)\n except ValidationError as exc:\n logger.error('Error parsing %s:\\n%s', config_file, display_errors(exc.errors()))\n raise SasstasticError('error parsing config file')\n"
},
{
"alpha_fraction": 0.5424354076385498,
"alphanum_fraction": 0.723247230052948,
"avg_line_length": 19.846153259277344,
"blob_id": "47558fa4669bb1616af667bc0a72c6b5c493ae1b",
"content_id": "a72adf3a201460e8d55f0dc4b0e8c9571095968c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 271,
"license_type": "permissive",
"max_line_length": 48,
"num_lines": 13,
"path": "/tests/requirements.txt",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "black==19.10b0\ncoverage==5.0.3\nflake8==3.7.9\nflake8-quotes==2.1.1\nisort==4.3.21\nmypy==0.761\ngit+https://github.com/PyCQA/pycodestyle@5c60447\ngit+https://github.com/PyCQA/pyflakes@c688d2b\npytest==5.3.5\npytest-cov==2.8.1\npytest-mock==2.0.0\npytest-sugar==0.9.2\ntwine==3.1.1\n"
},
{
"alpha_fraction": 0.5489886403083801,
"alphanum_fraction": 0.5572060942649841,
"avg_line_length": 33.89706039428711,
"blob_id": "904e589feaaba1e194b5642c7b2121f78f0cbf4b",
"content_id": "19f2f83839787e669001c430cb8cf3f5466004b3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9504,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 272,
"path": "/sasstastic/compile.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import hashlib\nimport json\nimport logging\nimport re\nimport shutil\nimport tempfile\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom time import time\nfrom typing import Optional, Union\n\nimport click\nimport sass\n\nfrom .common import SasstasticError\nfrom .config import ConfigModel\n\n__all__ = ('compile_sass',)\nlogger = logging.getLogger('sasstastic.compile')\nSTARTS_DOWNLOAD = re.compile('^(?:DOWNLOAD|DL)/')\nSTARTS_SRC = re.compile('^SRC/')\n\n\ndef compile_sass(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):\n if dev_mode is None:\n dev_mode = config.dev_mode\n else:\n dev_mode = dev_mode\n mode = 'dev' if dev_mode else 'prod'\n out_dir: Path = alt_output_dir or config.output_dir\n logger.info('\\ncompiling \"%s/\" to \"%s/\" (mode: %s)', config.build_dir, out_dir, mode)\n with tmpdir() as tmp_path:\n SassCompiler(config, tmp_path, dev_mode).build()\n fast_move(tmp_path, out_dir)\n\n\nclass SassCompiler:\n def __init__(self, config: ConfigModel, tmp_out_dir: Path, dev_mode: bool):\n self._config = config\n self._build_dir = config.build_dir\n self._tmp_out_dir = tmp_out_dir\n self._dev_mode = dev_mode\n self._src_dir = self._build_dir\n self._replace = config.replace or {}\n self._download_dir = config.download.dir\n self._importers = [(5, self._clever_imports)]\n\n dir_hash = hashlib.md5(str(self._build_dir).encode()).hexdigest()\n self._size_cache_file = Path(tempfile.gettempdir()) / 'grablib_cache.{}.json'.format(dir_hash)\n\n self._output_style = 'nested' if self._dev_mode else 'compressed'\n\n self._old_size_cache = {}\n self._new_size_cache = {}\n self._errors = 0\n self._files_generated = 0\n\n def build(self) -> None:\n start = time()\n\n if self._dev_mode:\n self._src_dir = out_dir_src = self._tmp_out_dir / '.src'\n\n shutil.copytree(str(self._build_dir), str(out_dir_src))\n files = sum(f.is_file() for f in out_dir_src.glob('**/*'))\n logger.info('>> %28s/* ➤ %-30s %3d files', self._build_dir, '.src/', files)\n\n try:\n self._download_dir = out_dir_src / self._download_dir.relative_to(self._build_dir)\n except ValueError:\n # download dir is not inside the build dir, need to copy libs too\n out_dir_libs = self._tmp_out_dir / '.libs'\n shutil.copytree(str(self._download_dir), str(out_dir_libs))\n files = sum(f.is_file() for f in out_dir_libs.glob('**/*'))\n logger.info('%28s/* ➤ %-30s %3d files', self._download_dir, '.libs/', files)\n self._download_dir = out_dir_src\n\n if self._size_cache_file.exists():\n with self._size_cache_file.open() as f:\n self._old_size_cache = json.load(f)\n\n for path in self._src_dir.glob('**/*.*'):\n self.process_file(path)\n\n with self._size_cache_file.open('w') as f:\n json.dump(self._new_size_cache, f, indent=2)\n\n time_taken = (time() - start) * 1000\n plural = '' if self._files_generated == 1 else 's'\n if not self._errors:\n logger.info('%d css file%s generated in %0.0fms, 0 errors', self._files_generated, plural, time_taken)\n else:\n logger.error(\n '%d css file%s generated in %0.0fms, %d errors', self._files_generated, plural, time_taken, self._errors\n )\n raise SasstasticError('sass errors')\n\n def process_file(self, f: Path):\n if not f.is_file():\n return\n if not self._config.include_files.search(f.name):\n return\n if self._config.exclude_files and self._config.exclude_files.search(str(f)):\n return\n\n if is_relative_to(f, self._download_dir):\n return\n\n rel_path = f.relative_to(self._src_dir)\n css_path = (self._tmp_out_dir / rel_path).with_suffix('.css')\n\n map_path = css_path.with_name(css_path.name + '.map') if self._dev_mode else None\n\n try:\n css = sass.compile(\n filename=str(f),\n source_map_filename=map_path and str(map_path),\n output_style=self._output_style,\n precision=10,\n importers=self._importers,\n )\n except sass.CompileError as e:\n self._errors += 1\n logger.error('%s compile error:\\n%s', f, e)\n return\n\n log_msg = None\n file_hashes = self._config.file_hashes\n try:\n css_path.parent.mkdir(parents=True, exist_ok=True)\n if self._dev_mode:\n css, css_map = css\n\n if file_hashes:\n css_path = insert_hash(css_path, css)\n map_path = insert_hash(map_path, css)\n file_hashes = False\n\n # correct the link to map file in css\n css = re.sub(r'/\\*# sourceMappingURL=\\S+ \\*/', f'/*# sourceMappingURL={map_path.name} */', css)\n map_path.write_text(css_map)\n css, log_msg = self._regex_modify(rel_path, css)\n finally:\n self._log_file_creation(rel_path, css_path, css)\n if log_msg:\n logger.debug(log_msg)\n\n if file_hashes:\n css_path = insert_hash(css_path, css)\n css_path.write_text(css)\n self._files_generated += 1\n\n def _regex_modify(self, rel_path, css):\n log_msg = None\n\n for path_regex, regex_map in self._replace.items():\n if re.search(path_regex, str(rel_path)):\n logger.debug('%s has regex replace matches for \"%s\"', rel_path, path_regex)\n for pattern, repl in regex_map.items():\n hash1 = hash(css)\n css = re.sub(pattern, repl, css)\n if hash(css) == hash1:\n log_msg = ' \"{}\" ➤ \"{}\" didn\\'t modify the source'.format(pattern, repl)\n else:\n log_msg = ' \"{}\" ➤ \"{}\" modified the source'.format(pattern, repl)\n return css, log_msg\n\n def _log_file_creation(self, rel_path, css_path, css):\n src, dst = str(rel_path), str(css_path.relative_to(self._tmp_out_dir))\n\n size = len(css.encode())\n p = str(css_path)\n self._new_size_cache[p] = size\n old_size = self._old_size_cache.get(p)\n c = None\n if old_size:\n change_p = (size - old_size) / old_size * 100\n if abs(change_p) > 0.5:\n c = 'green' if change_p <= 0 else 'red'\n change_p = click.style('{:+0.0f}%'.format(change_p), fg=c)\n logger.info('>> %30s ➤ %-30s %9s %s', src, dst, fmt_size(size), change_p)\n if c is None:\n logger.info('>> %30s ➤ %-30s %9s', src, dst, fmt_size(size))\n\n def _clever_imports(self, src_path):\n _new_path = None\n if STARTS_SRC.match(src_path):\n _new_path = self._build_dir / STARTS_SRC.sub('', src_path)\n elif STARTS_DOWNLOAD.match(src_path):\n _new_path = self._download_dir / STARTS_DOWNLOAD.sub('', src_path)\n\n return _new_path and [(str(_new_path),)]\n\n\n@contextmanager\ndef tmpdir():\n d = tempfile.mkdtemp()\n try:\n yield Path(d)\n finally:\n shutil.rmtree(d)\n\n\ndef _move_dir(src: str, dst: str, exists: bool):\n if exists:\n shutil.rmtree(dst)\n shutil.move(src, dst)\n\n\ndef fast_move(src_dir: Path, dst_dir: Path):\n \"\"\"\n Move all files and directories from src_dir to dst_dir, files are moved first. This tries to be relatively fast.\n \"\"\"\n\n to_move = []\n to_rename = []\n for src_path in src_dir.iterdir():\n if src_path.is_file():\n to_rename.append((src_path, dst_dir / src_path.relative_to(src_dir)))\n else:\n assert src_path.is_dir(), src_path\n dst = dst_dir / src_path.relative_to(src_dir)\n to_move.append((str(src_path), str(dst), dst.exists()))\n\n dst_dir.mkdir(parents=True, exist_ok=True)\n s = time()\n # files in the root of src_dir are moved first, these are generally the scss files which\n # should be updated first to avoid styles not changing when a browser reloads\n for src, dst in to_rename:\n src.rename(dst)\n for src, dst, exists in to_move:\n if exists:\n shutil.rmtree(dst)\n shutil.move(src, dst)\n logger.debug('filed from %s/ to %s/ in %0.1fms', src_dir, dst_dir, (time() - s) * 1000)\n\n\ndef insert_hash(path: Path, content: Union[str, bytes], *, hash_length=7):\n \"\"\"\n Insert a hash based on the content into the path after the first dot.\n\n hash_length 7 matches git commit short references\n \"\"\"\n if isinstance(content, str):\n content = content.encode()\n hash_ = hashlib.md5(content).hexdigest()[:hash_length]\n if '.' in path.name:\n new_name = re.sub(r'\\.', f'.{hash_}.', path.name, count=1)\n else:\n new_name = f'{path.name}.{hash_}'\n return path.with_name(new_name)\n\n\nKB, MB = 1024, 1024 ** 2\n\n\ndef fmt_size(num):\n if num <= KB:\n return f'{num:0.0f}B'\n elif num <= MB:\n return f'{num / KB:0.1f}KB'\n else:\n return f'{num / MB:0.1f}MB'\n\n\ndef is_relative_to(p1: Path, p2: Path) -> bool:\n try:\n p1.relative_to(p2)\n except ValueError:\n return False\n else:\n return True\n"
},
{
"alpha_fraction": 0.6871508359909058,
"alphanum_fraction": 0.6876164078712463,
"avg_line_length": 34.79999923706055,
"blob_id": "8caacfb24469a94f39135c9126905010592e6285",
"content_id": "6e2979a376c86b464cce4f8e969f645758da4756",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2148,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 60,
"path": "/sasstastic/cli.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import logging\nfrom pathlib import Path\nfrom typing import Optional\n\nimport typer\n\nfrom .config import SasstasticError, load_config\nfrom .logs import setup_logging\nfrom .main import download_and_compile, watch\nfrom .version import VERSION\n\ncli = typer.Typer()\nlogger = logging.getLogger('sasstastic.cli')\n\n\ndef version_callback(value: bool):\n if value:\n print(f'sasstastic: v{VERSION}')\n raise typer.Exit()\n\n\nOUTPUT_HELP = 'Custom directory to output css files, if omitted the \"output_dir\" field from the config file is used.'\nDEV_MODE_HELP = 'Whether to compile in development or production mode, if omitted the value is taken from config.'\nWATCH_HELP = 'Whether to watch the config file and build directory then download and compile after file changes.'\nVERBOSE_HELP = 'Print more information to the console.'\nVERSION_HELP = 'Show the version and exit.'\n\n\[email protected]()\ndef build(\n config_path: Path = typer.Argument('sasstastic.yml', exists=True, file_okay=True, dir_okay=True, readable=True),\n output_dir: Optional[Path] = typer.Option(\n None, '-o', '--output-dir', file_okay=False, dir_okay=True, readable=True, help=OUTPUT_HELP\n ),\n dev_mode: bool = typer.Option(None, '--dev/--prod', help=DEV_MODE_HELP),\n watch_mode: bool = typer.Option(False, '--watch/--dont-watch', help=WATCH_HELP),\n verbose: bool = typer.Option(False, help=VERBOSE_HELP),\n version: bool = typer.Option(None, '--version', callback=version_callback, is_eager=True, help=VERSION_HELP),\n):\n \"\"\"\n Fantastic SASS and SCSS compilation.\n\n Takes a single argument: a path to a sasstastic.yml config file, or a directory containing a sasstastic.yml file.\n \"\"\"\n setup_logging('DEBUG' if verbose else 'INFO')\n if config_path.is_dir():\n config_path /= 'sasstastic.yml'\n logger.info('config path: %s', config_path)\n try:\n config = load_config(config_path)\n if watch_mode:\n watch(config, output_dir, dev_mode)\n else:\n download_and_compile(config, output_dir, dev_mode)\n except SasstasticError:\n raise typer.Exit(1)\n\n\nif __name__ == '__main__':\n cli()\n"
},
{
"alpha_fraction": 0.7055837512016296,
"alphanum_fraction": 0.7081218361854553,
"avg_line_length": 22.176469802856445,
"blob_id": "b0ae3aaa160c3368f45dbefb1718d50d94f46bc8",
"content_id": "3ce555c08871edee52a7901b0122fc0bde185cf8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 394,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 17,
"path": "/sasstastic/__init__.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "# flake8: noqa\nfrom .common import SasstasticError\nfrom .compile import compile_sass\nfrom .config import ConfigModel, load_config\nfrom .download import download_sass\nfrom .main import download_and_compile\nfrom .version import VERSION\n\n__all__ = (\n 'download_sass',\n 'compile_sass',\n 'SasstasticError',\n 'load_config',\n 'ConfigModel',\n 'download_and_compile',\n 'VERSION',\n)\n"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.6811594367027283,
"avg_line_length": 20.230770111083984,
"blob_id": "78dc83feebbe8063ec3780f9135c1e773650d665",
"content_id": "2c8d6ad5e1390e938965fab2560d3a5503bacb50",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 276,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 13,
"path": "/sasstastic/common.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import re\nfrom pathlib import Path\nfrom typing import Optional\n\n__all__ = ('SasstasticError', 'is_file_path')\n\n\nclass SasstasticError(RuntimeError):\n pass\n\n\ndef is_file_path(p: Optional[Path]) -> bool:\n return p is not None and re.search(r'\\.[a-zA-Z0-9]{1,5}$', p.name)\n"
},
{
"alpha_fraction": 0.6074513792991638,
"alphanum_fraction": 0.6220302581787109,
"avg_line_length": 30.931034088134766,
"blob_id": "711adbeda64cf63c334729cf50009af9b9216e7a",
"content_id": "5cc4ebfad22867d9cbe5d0903263d2b47e92993d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1852,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 58,
"path": "/setup.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "from importlib.machinery import SourceFileLoader\nfrom pathlib import Path\n\nfrom setuptools import setup\n\ndescription = 'Fantastic SASS and SCSS compilation for python'\nTHIS_DIR = Path(__file__).resolve().parent\ntry:\n long_description = THIS_DIR.joinpath('README.md').read_text()\nexcept FileNotFoundError:\n long_description = description\n\n# avoid loading the package before requirements are installed:\nversion = SourceFileLoader('version', 'sasstastic/version.py').load_module()\n\nsetup(\n name='sasstastic',\n version=str(version.VERSION),\n description=description,\n long_description=long_description,\n long_description_content_type='text/markdown',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Information Technology',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: Unix',\n 'Operating System :: POSIX :: Linux',\n 'Environment :: MacOS X',\n 'Topic :: Internet',\n ],\n author='Samuel Colvin',\n author_email='[email protected]',\n url='https://github.com/samuelcolvin/sasstastic',\n license='MIT',\n packages=['sasstastic'],\n package_data={'sasstastic': ['py.typed']},\n entry_points=\"\"\"\n [console_scripts]\n sasstastic=sasstastic.__main__:cli\n \"\"\",\n python_requires='>=3.7',\n zip_safe=True,\n install_requires=[\n 'libsass>=0.20.0',\n 'httpx>=0.12.1',\n 'pydantic>=1.5',\n 'PyYAML>=5.3.1',\n 'typer>=0.1.0',\n 'watchgod>=0.6',\n ],\n)\n"
},
{
"alpha_fraction": 0.5263158082962036,
"alphanum_fraction": 0.6929824352264404,
"avg_line_length": 13.25,
"blob_id": "f4238fe65cab112125190adaa987be136ebc2e22",
"content_id": "fc1f4f988e68e91285a1796620e650a35d70fb73",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 114,
"license_type": "permissive",
"max_line_length": 25,
"num_lines": 8,
"path": "/requirements.txt",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "-r tests/requirements.txt\n\nlibsass==0.20.0\nhttpx==0.12.1\npydantic==1.5.1\nPyYAML==5.3.1\ntyper==0.1.0\nwatchgod==0.6\n"
},
{
"alpha_fraction": 0.6755598187446594,
"alphanum_fraction": 0.6760361790657043,
"avg_line_length": 35.18965530395508,
"blob_id": "71db0cf76c53dace5c66ec4c1b1f0d25848cb238",
"content_id": "7144384da609355ded4fb69d83258d3bfd990341",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2099,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 58,
"path": "/sasstastic/main.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import asyncio\nimport logging\nfrom pathlib import Path\nfrom typing import Optional\n\nimport watchgod\n\nfrom .compile import compile_sass\nfrom .config import ConfigModel, load_config\nfrom .download import Downloader, download_sass\n\nlogger = logging.getLogger('sasstastic.main')\n__all__ = 'download_and_compile', 'watch', 'awatch'\n\n\ndef download_and_compile(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):\n logger.info('build path: %s/', config.build_dir)\n logger.info('output path: %s/', alt_output_dir or config.output_dir)\n\n download_sass(config)\n compile_sass(config, alt_output_dir, dev_mode)\n\n\ndef watch(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):\n try:\n asyncio.run(awatch(config, alt_output_dir, dev_mode))\n except KeyboardInterrupt:\n pass\n\n\nasync def awatch(config: ConfigModel, alt_output_dir: Optional[Path] = None, dev_mode: Optional[bool] = None):\n logger.info('build path: %s/', config.build_dir)\n logger.info('output path: %s/', alt_output_dir or config.output_dir)\n\n await Downloader(config).download()\n compile_sass(config, alt_output_dir, dev_mode)\n\n config_file = str(config.config_file)\n async for changes in watch_multiple(config_file, config.build_dir):\n changed_paths = {c[1] for c in changes}\n if config_file in changed_paths:\n logger.info('changes detected in config file, downloading sources...')\n config = load_config(config.config_file)\n await Downloader(config).download()\n\n if changed_paths != {config_file}:\n logger.info('changes detected in the build directory, re-compiling...')\n compile_sass(config, alt_output_dir, dev_mode)\n\n\nasync def watch_multiple(*paths):\n watchers = [watchgod.awatch(p) for p in paths]\n while True:\n done, pending = await asyncio.wait([w.__anext__() for w in watchers], return_when=asyncio.FIRST_COMPLETED)\n for t in pending:\n t.cancel()\n for t in done:\n yield t.result()\n"
},
{
"alpha_fraction": 0.5580581426620483,
"alphanum_fraction": 0.5603408813476562,
"avg_line_length": 38.5843391418457,
"blob_id": "f6fbd627f1a2a58cdd8dd94225db4bf46c5f1763",
"content_id": "5b2a0fb53fa37867275069287ad76837ff2ed752",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6577,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 166,
"path": "/sasstastic/download.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "import asyncio\nimport hashlib\nimport json\nimport logging\nimport re\nimport zipfile\nfrom io import BytesIO\nfrom itertools import chain\nfrom pathlib import Path\nfrom typing import Dict, Set, Tuple\n\nfrom httpx import AsyncClient\n\nfrom .common import SasstasticError, is_file_path\nfrom .config import ConfigModel, SourceModel\n\n__all__ = ('download_sass', 'Downloader')\nlogger = logging.getLogger('sasstastic.download')\n\n\ndef download_sass(config: ConfigModel):\n asyncio.run(Downloader(config).download())\n\n\nclass Downloader:\n def __init__(self, config: ConfigModel):\n self._download_dir = config.download.dir\n self._sources = config.download.sources\n self._client = AsyncClient()\n self._lock_check = LockCheck(self._download_dir, config.lock_file)\n\n async def download(self):\n if not self._sources:\n logger.info('\\nno files to download')\n return\n\n to_download = [s for s in self._sources if self._lock_check.should_download(s)]\n if to_download:\n logger.info(\n '\\ndownloading %d files to %s, %d up-to-date',\n len(to_download),\n self._download_dir,\n len(self._sources) - len(to_download),\n )\n try:\n await asyncio.gather(*[self._download_source(s) for s in to_download])\n finally:\n await self._client.aclose()\n self._lock_check.save()\n else:\n logger.info('\\nno new files to download, %d up-to-date', len(self._sources))\n self._lock_check.delete_stale()\n\n async def _download_source(self, s: SourceModel):\n logger.debug('%s: downloading...', s.url)\n r = await self._client.get(s.url)\n if r.status_code != 200:\n logger.error('Error downloading %r, unexpected status code: %s', s.url, r.status_code)\n raise SasstasticError(f'unexpected status code {r.status_code}')\n\n loop = asyncio.get_running_loop()\n if s.extract is None:\n path = await loop.run_in_executor(None, self._save_file, s.to, r.content)\n self._lock_check.record(s, s.to, r.content)\n logger.info('>> downloaded %s ➤ %s', s.url, path)\n else:\n count = await loop.run_in_executor(None, self._extract_zip, s, r.content)\n logger.info('>> downloaded %s ➤ extract %d files', s.url, count)\n\n def _extract_zip(self, s: SourceModel, content: bytes):\n zcopied = 0\n with zipfile.ZipFile(BytesIO(content)) as zipf:\n logger.debug('%s: %d files in zip archive', s.url, len(zipf.namelist()))\n\n for filepath in zipf.namelist():\n if filepath.endswith('/'):\n continue\n regex_pattern, match, file_path = None, None, None\n for r, t in s.extract.items():\n match = r.match(filepath)\n if match:\n regex_pattern, file_path = r, t\n break\n if regex_pattern is None:\n logger.debug('%s: \"%s\" no target found', s.url, filepath)\n elif file_path is None:\n logger.debug('%s: \"%s\" skipping (regex: \"%s\")', s.url, filepath, regex_pattern)\n else:\n if not is_file_path(file_path):\n file_name = match.groupdict().get('filename') or match.groups()[-1]\n file_path = file_path / file_name\n logger.debug('%s: \"%s\" ➤ \"%s\" (regex: \"%s\")', s.url, filepath, file_path, regex_pattern)\n content = zipf.read(filepath)\n self._lock_check.record(s, file_path, content)\n self._save_file(file_path, content)\n zcopied += 1\n return zcopied\n\n def _save_file(self, save_to: Path, content) -> Path:\n p = self._download_dir / save_to\n p.parent.mkdir(parents=True, exist_ok=True)\n p.write_bytes(content)\n return p\n\n\nclass LockCheck:\n \"\"\"\n Avoid downloading unchanged files by consulting a \"lock file\" cache.\n \"\"\"\n\n file_description = (\n \"# this files records information about files downloaded by sasstastic \\n\" # noqa: Q000\n \"# to allow unnecessary downloads to be skipped.\\n\" # noqa: Q000\n \"# You should't edit it manually and should include it in version control.\"\n )\n\n def __init__(self, root_dir: Path, lock_file: Path):\n self._root_dir = root_dir\n self._lock_file = lock_file\n if lock_file.is_file():\n lines = (ln for ln in lock_file.read_text().split('\\n') if not re.match(r'\\s*#', ln))\n c = json.loads('\\n'.join(lines))\n self._cache: Dict[str, Set[Tuple[str, str]]] = {k: {tuple(f) for f in v} for k, v in c.items()}\n else:\n self._cache = {}\n self._active: Set[str] = set()\n\n def should_download(self, s: SourceModel) -> bool:\n k = self._hash_source(s)\n files = self._cache.get(k)\n if files is None:\n return True\n else:\n self._active.add(k)\n return not any(self._file_unchanged(*v) for v in files)\n\n def record(self, s: SourceModel, path: Path, content: bytes):\n k = self._hash_source(s)\n r = str(path), hashlib.md5(content).hexdigest()\n self._active.add(k)\n files = self._cache.get(k)\n if files is None:\n self._cache[k] = {r}\n else:\n files.add(r)\n\n def save(self):\n lines = ',\\n'.join(f' \"{k}\": {json.dumps(sorted(v))}' for k, v in self._cache.items() if k in self._active)\n self._lock_file.write_text(f'{self.file_description}\\n{{\\n{lines}\\n}}')\n\n def delete_stale(self):\n d_files = set(chain.from_iterable((p for p, _ in f) for u, f in self._cache.items() if u in self._active))\n for p in self._root_dir.glob('**/*'):\n rel_path = str(p.relative_to(self._root_dir))\n if rel_path not in d_files and p.is_file():\n p.unlink()\n logger.info('>> %s stale and deleted', rel_path)\n\n def _file_unchanged(self, path: str, file_hash: str) -> bool:\n p = self._root_dir / path\n return p.is_file() and hashlib.md5(p.read_bytes()).hexdigest() == file_hash\n\n @staticmethod\n def _hash_source(s: SourceModel):\n j = str(s.url), None if s.extract is None else {str(k): str(v) for k, v in s.extract.items()}, str(s.to)\n return hashlib.md5(json.dumps(j).encode()).hexdigest()\n"
},
{
"alpha_fraction": 0.7225919365882874,
"alphanum_fraction": 0.7334501147270203,
"avg_line_length": 38.65277862548828,
"blob_id": "51b9259794aea85035719426f06a0159a6b2da74",
"content_id": "05363bb524de5d5761c08ed9de28873ccc7293ca",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2855,
"license_type": "permissive",
"max_line_length": 186,
"num_lines": 72,
"path": "/README.md",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "# sasstastic\n\n[](https://github.com/samuelcolvin/sasstastic/actions?query=event%3Apush+branch%3Amaster+workflow%3ACI)\n[](https://codecov.io/gh/samuelcolvin/sasstastic)\n[](https://pypi.python.org/pypi/sasstastic)\n[](https://github.com/samuelcolvin/sasstastic)\n[](https://github.com/samuelcolvin/sasstastic/blob/master/LICENSE)\n\n**Fantastic SASS and SCSS compilation for python**\n\n## Installation\n\n```bash\npip install sasstastic\n```\n\nrun\n\n```bash\nsasstastic --help\n```\n\nTo check sasstastic is install and get help info.\n\n## Usage\n\nDefine a config file `sasstastic.yml`:\n\n```yaml\ndownload:\n # downloaded files will be saved in this directory\n dir: styles/.libs\n sources:\n # download a font css file from google fonts and save it to google-fonts.css\n - url: >\n https://fonts.googleapis.com/css?\n family=Merriweather:400,400i,700,700i|Titillium+Web|Ubuntu+Mono&display=swap\n to: google-fonts.css\n\n # download a style sheet from select2, this will be saved to \"select2.css\" as\n # the name can be inferred from the url\n - url: 'https://raw.githubusercontent.com/select2/select2/4.0.13/dist/css/select2.css'\n\n # download the full bootstrap 4 bundle and extract the scss files to the bootstrap/ directory\n - url: https://github.com/twbs/bootstrap/archive/v4.4.1.zip\n extract:\n 'bootstrap-4.4.1/scss/(.+)$': bootstrap/\n\n\n# SCSS and SASS files will be build from this directory\nbuild_dir: styles/\n# and saved to this directory\noutput_dir: css/\n```\n\nThen run `sasstastic` to build your sass files.\n\nnote:\n* if you `sasstastic.yml` file isn't in the current working directory you can pass the path to that file\n as an argument to sasstastic, e.g. `sasstastic path/to/sasstastic.yml` or just `sasstastic path/to/`\n* by default the paths defined in `sasstastic.yml`: `download.dir`, `build_dir` and `output_dir` are \n **relative to the the `sasstastic.yml` file\n* you can override the output directory `ouput_dir` using the `-o` argument to the CLI, see `sasstastic --help`\n for more info\n* sasstastic can build in \"development\" or \"production\" mode:\n * in **development** mode css is not compressed, a map file is created and all files from `build_dir` and \n `download.dir` are copied into `output_dir` so map files work correctly\n * in **production** mode css is compressed, no other files are added to `output_dir`\n\n### Watch mode\n\nYou can watch a directory and config file and run sasstastic when files change using `sasstastic --watch`.\n"
},
{
"alpha_fraction": 0.7175572514533997,
"alphanum_fraction": 0.7213740348815918,
"avg_line_length": 22.81818199157715,
"blob_id": "24f36ece81ea7c69482ff7f91093d37c2105558a",
"content_id": "acf7cf835079de552b223f8caa5935aa40646dc8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 262,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 11,
"path": "/tests/test_cli.py",
"repo_name": "samuelcolvin/sasstastic",
"src_encoding": "UTF-8",
"text": "from typer.testing import CliRunner\n\nfrom sasstastic.cli import cli\n\nrunner = CliRunner()\n\n\ndef test_print_commands():\n result = runner.invoke(cli, ['--help'])\n assert result.exit_code == 0\n assert 'Fantastic SASS and SCSS compilation' in result.output\n"
}
] | 13 |
ShreyasGithub/pyspark_ccf | https://github.com/ShreyasGithub/pyspark_ccf | f8e73fa393b27f30b905a067efbc6ff1ec4df108 | 3444bdb05cd633fe694574cd2a5d9cbc99353b6b | 9bbe68d43e6aee4ab82e3c0eac0ba5ad8cfb1c2c | refs/heads/master | 2023-03-30T05:54:15.262080 | 2021-04-06T13:29:50 | 2021-04-06T13:29:50 | 349,717,090 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.65345698595047,
"alphanum_fraction": 0.6795952916145325,
"avg_line_length": 14.389610290527344,
"blob_id": "40bb3478c3460421922f0542c49fa504e1bebc5a",
"content_id": "b92fc2ba521142dc00259b1e61519c0842822372",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1186,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 77,
"path": "/notebooks/simple_streaming.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.environ['PYSPARK_DRIVER_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[2]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('simple_streaming').getOrCreate()\n\n\n# In[3]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[4]:\n\n\nfrom pyspark.streaming import StreamingContext\n\nbatch_interval = 10\nstreaming_context = StreamingContext(spark_session.sparkContext, batch_interval)\n\n\n# In[5]:\n\n\nlines_dstream = streaming_context.socketTextStream(\"localhost\", 9999)\n\n\n# In[6]:\n\n\n#shreyas encryptor\nreplace_dict = {\n 'l': '1',\n 's': '5',\n 'e': '9',\n 'a': '6',\n}\n\nencrypted_dstream = lines_dstream.map( lambda line: ''.join([replace_dict[c] if c in replace_dict else c for c in line.lower()]))\n\nencrypted_dstream.pprint()\n\n\n# In[7]:\n\n\nstreaming_context.start()\n\n\n# In[10]:\n\n\n# streaming_context.stop()\n\n\n# In[9]:\n\n\n# nc -lk 9999\n\n"
},
{
"alpha_fraction": 0.6884859800338745,
"alphanum_fraction": 0.6960207223892212,
"avg_line_length": 17.21030044555664,
"blob_id": "0d0d39e906639f6b567cd038eae1e5af898d838c",
"content_id": "058279df8f3830887a5dc29ac136c7c8086505bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4247,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 233,
"path": "/notebooks/product_prediction.py.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[ ]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.environ['PYSPARK_DRIVER_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[ ]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('product_prediction').getOrCreate()\n\n\n# In[ ]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[ ]:\n\n\naudit_data_frame = spark_session.read.csv('../data/audit_data_frame.csv', inferSchema=True, header=True)\n\n\n# In[ ]:\n\n\naudit_data_frame.printSchema()\n\n\n# In[ ]:\n\n\nfor column in audit_data_frame.columns:\n audit_data_frame.select(column).describe().show()\n\n\n# # feature extraction\n\n# In[ ]:\n\n\nfrom tokenizer import tokenize\nfrom pyspark.sql.functions import split\nspark_tokenize = spark_session.udf.register('tokenizer', tokenize)\naudit_data_frame = audit_data_frame.withColumn('INVOICE_PACKAGE_DESCRIPTION_CLEANED',\n split(spark_tokenize('INVOICE_PACKAGE_DESCRIPTION'), \" \"))\naudit_data_frame.head(1)\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import CountVectorizer, NGram, StringIndexer\n\n\n# In[ ]:\n\n\nngram_generator = NGram(n=2, inputCol='INVOICE_PACKAGE_DESCRIPTION_CLEANED',\n outputCol='INVOICE_PACKAGE_DESCRIPTION_NGRAM')\naudit_data_frame = ngram_generator.transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[ ]:\n\n\ncount_vec_1 = CountVectorizer(inputCol='INVOICE_PACKAGE_DESCRIPTION_CLEANED',outputCol='cnt_vec_1', minDF=4)\naudit_data_frame = count_vec_1.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[ ]:\n\n\ncount_vec_2 = CountVectorizer(inputCol='INVOICE_PACKAGE_DESCRIPTION_NGRAM',outputCol='cnt_vec_2', minDF=4)\naudit_data_frame = count_vec_2.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# # add product_fdc_id\n\n# In[ ]:\n\n\nentity_package_data_frame = pandas.read_csv('../data/catalog_with_price.csv')\n\n\n# In[ ]:\n\n\nentity_package_data_frame.head()\n\n\n# In[ ]:\n\n\npackage_id_product_id_map = entity_package_data_frame.dropna(subset=['PACKAGE_FDC_ID', 'ESD_PRODUCT_FDC_ID']) .set_index('PACKAGE_FDC_ID')['ESD_PRODUCT_FDC_ID'].to_dict()\n\n\n# In[ ]:\n\n\n# package_id_product_id_map\n\n\n# In[ ]:\n\n\nfrom pyspark.sql.types import NullType\naudit_data_frame = audit_data_frame.dropna(subset=['PACKAGE_FDC_ID'])\nget_product_id = spark_session.udf.register('get_product_id',\n lambda package_id: package_id_product_id_map[package_id] \\\n if package_id in package_id_product_id_map else NullType())\naudit_data_frame = audit_data_frame.withColumn('PRODUCT_FDC_ID', get_product_id('PACKAGE_FDC_ID'))\naudit_data_frame = audit_data_frame.dropna(subset=['PRODUCT_FDC_ID'])\naudit_data_frame.head(1)\n\n\n# In[ ]:\n\n\naudit_data_frame.count()\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import StringIndexer\nstr_indexer = StringIndexer(inputCol='PRODUCT_FDC_ID', outputCol='label')\naudit_data_frame = str_indexer.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import VectorAssembler\nvec_assembler = VectorAssembler(inputCols=['cnt_vec_1', 'cnt_vec_2'], outputCol='features')\naudit_data_frame = vec_assembler.transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# # train test split\n\n# In[ ]:\n\n\nfinal_data = audit_data_frame[['features', 'label']]\nfinal_data.head(1)\n\n\n# In[ ]:\n\n\ntrain_data, test_data = final_data.randomSplit([0.7, 0.3])\n\n\n# # model training\n\n# In[ ]:\n\n\nfrom pyspark.ml.classification import NaiveBayes\n\n\n# In[ ]:\n\n\nmodel = NaiveBayes()\nmodel = model.fit(train_data)\n\n\n# # model evaluation\n\n# In[ ]:\n\n\nfrom pyspark.ml.evaluation import MulticlassClassificationEvaluator\n\n\n# In[ ]:\n\n\nacc_eval = MulticlassClassificationEvaluator()\n\n\n# In[ ]:\n\n\ntest_results = model.transform(test_data)\n\n\n# In[ ]:\n\n\ntest_results = test_results.filter(test_results['prediction'] > 0)\n\n\n# In[ ]:\n\n\ntest_results.count()\n\n\n# In[ ]:\n\n\nprint('F1')\nacc_eval.evaluate(test_results)\n\n\n# In[ ]:\n\n\nprint('accuracy')\nacc_eval.evaluate(test_results, {acc_eval.metricName: \"accuracy\"})\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.6607539057731628,
"alphanum_fraction": 0.6662971377372742,
"avg_line_length": 13.516129493713379,
"blob_id": "91bcfdb928d8e64a941ab2149af3a05cf85c53ac",
"content_id": "e47424ed1b46a51ab7d4f3800c9fac64456f69ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1804,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 124,
"path": "/notebooks/explore_spark_data_frames.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[ ]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\n\n# In[ ]:\n\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\n\n# In[ ]:\n\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[ ]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('explore_data_frame').getOrCreate()\n\n\n# In[ ]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[ ]:\n\n\nfood_data_frame = spark_session.read.csv('../data/food.csv', inferSchema=True, header=True)\n\n\n# In[ ]:\n\n\nfood_data_frame.printSchema()\n\n\n# In[ ]:\n\n\nfood_data_frame.show()\n\n\n# In[ ]:\n\n\nfood_data_frame.groupBy('data_type').count().show()\n\n\n# In[ ]:\n\n\nfood_data_frame.select('description').show()\n\n\n# In[ ]:\n\n\nstr_len = spark_session.udf.register('str_len', lambda text: len(text))\ndescription_len_data_frame = food_data_frame.select(str_len('description'))\ndescription_len_data_frame.show()\n\n\n# In[ ]:\n\n\ndescription_len_data_frame['str_len(description)']\n\n\n# In[ ]:\n\n\nfood_data_frame = food_data_frame.withColumn('description_len',str_len('description'))\nfood_data_frame.show()\n\n\n# In[ ]:\n\n\nfood_data_frame.head()\n\n\n# In[ ]:\n\n\ndef distributed_fuzzy_ratio(text_1, text_2='Metabolizable Energy of Almonds'):\n from fuzzywuzzy import fuzz\n return fuzz.ratio(text_1, text_2)\n \nfuzzy_match = spark_session.udf.register('fuzzy_match', distributed_fuzzy_ratio)\n\n\n# In[ ]:\n\n\nfood_data_frame = food_data_frame.withColumn('match_score', fuzzy_match('description'))\nfood_data_frame.show()\n\n\n# In[ ]:\n\n\nfood_data_frame.filter((food_data_frame['description'].like('%Metab%'))).show()\n\n\n# In[ ]:\n\n\nfood_data_frame.filter(food_data_frame['data_type'] == 'experimental_food').show()\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.6885703802108765,
"alphanum_fraction": 0.6930379867553711,
"avg_line_length": 18.240142822265625,
"blob_id": "5d50989fd4cb1f4fec7784720d4ba3e6baaa84f0",
"content_id": "886fda12f157c249390a0eee87beed01c9480af1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5372,
"license_type": "no_license",
"max_line_length": 307,
"num_lines": 279,
"path": "/notebooks/car_price_prediction.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[ ]:\n\n\n\"\"\"Problem Statement\nA Chinese automobile company Geely Auto aspires to enter the US market by setting up their manufacturing unit there and producing cars locally to give competition to their US and European counterparts.\n\nThey have contracted an automobile consulting company to understand the factors on which the pricing of cars depends. Specifically, they want to understand the factors affecting the pricing of cars in the American market, since those may be very different from the Chinese market. The company wants to know:\n\nWhich variables are significant in predicting the price of a car\nHow well those variables describe the price of a car\nBased on various market surveys, the consulting firm has gathered a large data set of different types of cars across the America market.\"\"\"\n\n\n# In[ ]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.environ['PYSPARK_DRIVER_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[ ]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('car_price_prediction').getOrCreate()\n\n\n# In[ ]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[ ]:\n\n\ncar_data_frame = spark_session.read.csv('../data/CarPrice_Assignment.csv', inferSchema=True, header=True)\n\n\n# In[ ]:\n\n\ncar_data_frame.printSchema()\n\n\n# In[ ]:\n\n\ncar_data_frame.show(vertical=True, n=5)\n\n\n# In[ ]:\n\n\ncolumnList = [item[0] for item in car_data_frame.dtypes if not item[1].startswith('string')]\nfor column in columnList:\n car_data_frame.select(column).describe().show()\n\n\n# # data profiling\n\n# In[ ]:\n\n\nfrom pyspark.sql.functions import isnull, when, count, col\nnacounts = car_data_frame.select([count(when(isnull(c), c)).alias(c) for c in car_data_frame.columns])\nnacounts.show(vertical=True)\n\n\n# # feature extraction\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import StringIndexer\nfrom pyspark.ml.feature import OneHotEncoder\n\n\n# In[ ]:\n\n\ncar_data_frame.columns\n\n\n# In[ ]:\n\n\ncategorical_columns = ['fueltype',\n 'aspiration',\n 'doornumber',\n 'carbody',\n 'drivewheel',\n 'enginelocation',\n'enginetype',\n 'cylindernumber',\n'fuelsystem']\n\n\n# In[ ]:\n\n\nstring_index_encoder = StringIndexer(inputCols=categorical_columns,\n outputCols=[c + '_str_ind' for c in categorical_columns],\n stringOrderType='alphabetAsc')\ntrain_car_data_frame = string_index_encoder.fit(car_data_frame).transform(car_data_frame)\ntrain_car_data_frame.head(1)\n\n\n# In[ ]:\n\n\none_hot_encoder = OneHotEncoder(inputCols=[c + '_str_ind' for c in categorical_columns],\n outputCols=[c + '_vec' for c in categorical_columns],\n dropLast=False)\ntrain_car_data_frame = one_hot_encoder.fit(train_car_data_frame).transform(train_car_data_frame)\ntrain_car_data_frame.head(1)\n\n\n# In[ ]:\n\n\nnumeric_columns = ['wheelbase',\n 'carlength',\n 'carwidth',\n 'carheight',\n 'curbweight',\n'enginesize',\n 'boreratio',\n 'stroke',\n 'compressionratio',\n 'horsepower',\n 'peakrpm',\n 'citympg',\n 'highwaympg',]\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import VectorAssembler\n\nvector_assembler = VectorAssembler(inputCols=[c + '_vec' for c in categorical_columns] + numeric_columns,\n outputCol='unscaled_features')\nvector_data = vector_assembler.transform(train_car_data_frame)\n\n\n# In[ ]:\n\n\nvector_data.head(1)\n\n\n# In[ ]:\n\n\nvector_data = vector_data.withColumnRenamed('price', 'label')\nfinal_data = vector_data[['unscaled_features', 'label']]\nfinal_data.head(1)\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.feature import StandardScaler\nscaler = StandardScaler(inputCol='unscaled_features', outputCol='features')\nfinal_data = scaler.fit(final_data).transform(final_data)\n \nfinal_data.head(1)\n\n\n# # split train/test\n\n# In[ ]:\n\n\ntrain_data, test_data = final_data.randomSplit([0.7,0.3])\n\n\n# # Model training\n\n# In[ ]:\n\n\nfrom pyspark.ml.regression import RandomForestRegressor\nmodel = RandomForestRegressor(numTrees=100)\nmodel = model.fit(train_data)\n\n\n# # model evaluation\n\n# In[ ]:\n\n\nmodel.featureImportances\n\n\n# In[ ]:\n\n\nfrom pyspark.ml.evaluation import RegressionEvaluator\n\n\n# In[ ]:\n\n\ntest_results = model.transform(test_data)\n\n\n# In[ ]:\n\n\nevaluator = RegressionEvaluator()\nprint('RMSE')\nevaluator.evaluate(test_results)\n\n\n# In[ ]:\n\n\nprint('R_sqr')\nevaluator.evaluate(test_results, {evaluator.metricName: \"r2\"})\n\n\n# In[ ]:\n\n\nprint('MAE')\nevaluator.evaluate(test_results, {evaluator.metricName: \"mae\"})\n\n\n# In[ ]:\n\n\ncar_data_frame.select('price').describe().show()\n\n\n# In[ ]:\n\n\ntransformed_column_names = []\nfor column in [c for c in categorical_columns]:\n for row in car_data_frame.select(column).distinct().orderBy(column).collect():\n# print(column, row[column])\n transformed_column_names.append(column + '_' + row[column])\n\ntransformed_column_names = transformed_column_names + numeric_columns\ntransformed_column_names\n\n\n# In[ ]:\n\n\nlen(transformed_column_names)\n\n\n# In[ ]:\n\n\nfor ind, importance in enumerate(model.featureImportances):\n print(transformed_column_names[ind], round(importance, 2))\n\n\n# In[ ]:\n\n\nfrom pyspark.sql.functions import desc\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.6816208362579346,
"alphanum_fraction": 0.7049823999404907,
"avg_line_length": 17.660232543945312,
"blob_id": "cb416990fdb6d083a03ad3307d4109becf6d0710",
"content_id": "0d556c90b0038c51c76edce9c6a807f54f2713f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4837,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 259,
"path": "/notebooks/product_prediction.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[48]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.environ['PYSPARK_DRIVER_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[49]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('product_prediction').getOrCreate()\n\n\n# In[50]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[51]:\n\n\naudit_data_frame = spark_session.read.csv('../data/audit_data_frame_2021_04_02.csv',\n inferSchema=True, header=True)\n\n\n# In[52]:\n\n\naudit_data_frame.printSchema()\n\n\n# In[53]:\n\n\nfor column in audit_data_frame.columns:\n audit_data_frame.select(column).describe().show()\n\n\n# # feature extraction\n\n# In[54]:\n\n\nfrom tokenizer import tokenize\nfrom pyspark.sql.functions import split\nspark_tokenize = spark_session.udf.register('tokenizer', tokenize)\naudit_data_frame = audit_data_frame.withColumn('INVOICE_PACKAGE_DESCRIPTION_CLEANED',\n split(spark_tokenize('INVOICE_PACKAGE_DESCRIPTION'), \" \"))\naudit_data_frame.head(1)\n\n\n# In[55]:\n\n\nfrom pyspark.ml.feature import CountVectorizer, NGram, StringIndexer\n\n\n# In[56]:\n\n\nngram_generator = NGram(n=2, inputCol='INVOICE_PACKAGE_DESCRIPTION_CLEANED',\n outputCol='INVOICE_PACKAGE_DESCRIPTION_NGRAM')\naudit_data_frame = ngram_generator.transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[57]:\n\n\ncount_vec_1 = CountVectorizer(inputCol='INVOICE_PACKAGE_DESCRIPTION_CLEANED',outputCol='cnt_vec_1', minDF=4)\naudit_data_frame = count_vec_1.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[58]:\n\n\ncount_vec_2 = CountVectorizer(inputCol='INVOICE_PACKAGE_DESCRIPTION_NGRAM',outputCol='cnt_vec_2', minDF=4)\naudit_data_frame = count_vec_2.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# # add product_fdc_id\n\n# In[59]:\n\n\nentity_package_data_frame = pandas.read_csv('../data/catalog_with_price.csv')\n\n\n# In[60]:\n\n\nentity_package_data_frame.head()\n\n\n# In[61]:\n\n\npackage_id_product_id_map = entity_package_data_frame.dropna(subset=['PACKAGE_FDC_ID', 'ESD_PRODUCT_FDC_ID']) .set_index('PACKAGE_FDC_ID')['ESD_PRODUCT_FDC_ID'].to_dict()\n\n\n# In[62]:\n\n\n# package_id_product_id_map\n\n\n# In[63]:\n\n\nfrom pyspark.sql.types import NullType\naudit_data_frame = audit_data_frame.dropna(subset=['PACKAGE_FDC_ID'])\nget_product_id = spark_session.udf.register('get_product_id',\n lambda package_id: package_id_product_id_map[package_id] \\\n if package_id in package_id_product_id_map else NullType())\naudit_data_frame = audit_data_frame.withColumn('PRODUCT_FDC_ID', get_product_id('PACKAGE_FDC_ID'))\naudit_data_frame = audit_data_frame.dropna(subset=['PRODUCT_FDC_ID'])\naudit_data_frame.head(1)\n\n\n# In[64]:\n\n\naudit_data_frame.count()\n\n\n# # data exploration\n\n# In[65]:\n\n\naudit_data_frame.createOrReplaceTempView(\"table1\")\nspark_session.sql(\"\"\"\nselect PRODUCT_FDC_ID from table1\ngroup by PRODUCT_FDC_ID having count(*) > 100;\n\"\"\").count()\n\n\n# In[66]:\n\n\nproduct_row_list = spark_session.sql(\"\"\"\nselect PRODUCT_FDC_ID from table1\ngroup by PRODUCT_FDC_ID having count(*) > 100;\n\"\"\").collect()\n\nproduct_list = [row['PRODUCT_FDC_ID'] for row in product_row_list]\naudit_data_frame = audit_data_frame.filter(audit_data_frame['PRODUCT_FDC_ID'].isin(product_list))\naudit_data_frame.count()\n\n\n# In[67]:\n\n\nfrom pyspark.ml.feature import StringIndexer\nstr_indexer = StringIndexer(inputCol='PRODUCT_FDC_ID', outputCol='label')\naudit_data_frame = str_indexer.fit(audit_data_frame).transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# In[68]:\n\n\nfrom pyspark.ml.feature import VectorAssembler\nvec_assembler = VectorAssembler(inputCols=['cnt_vec_1', 'cnt_vec_2'], outputCol='features')\naudit_data_frame = vec_assembler.transform(audit_data_frame)\naudit_data_frame.head(1)\n\n\n# # train test split\n\n# In[69]:\n\n\nfinal_data = audit_data_frame[['features', 'label']]\nfinal_data.head(1)\n\n\n# In[70]:\n\n\ntrain_data, test_data = final_data.randomSplit([0.7, 0.3])\n\n\n# # model training\n\n# In[71]:\n\n\nfrom pyspark.ml.classification import NaiveBayes\n\n\n# In[72]:\n\n\nmodel = NaiveBayes()\nmodel = model.fit(train_data)\n\n\n# # model evaluation\n\n# In[73]:\n\n\nfrom pyspark.ml.evaluation import MulticlassClassificationEvaluator\n\n\n# In[74]:\n\n\nacc_eval = MulticlassClassificationEvaluator()\n\n\n# In[75]:\n\n\ntest_results = model.transform(test_data)\n\n\n# In[76]:\n\n\ntest_results = test_results.filter(test_results['prediction'] > 0)\n\n\n# In[77]:\n\n\ntest_results.count()\n\n\n# In[78]:\n\n\nprint('F1')\nacc_eval.evaluate(test_results)\n\n\n# In[79]:\n\n\nprint('accuracy')\nacc_eval.evaluate(test_results, {acc_eval.metricName: \"accuracy\"})\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.6470588445663452,
"alphanum_fraction": 0.6789737343788147,
"avg_line_length": 12.244812965393066,
"blob_id": "ee3ae868d8331f165cf6ee74d815170d75155732",
"content_id": "110d4bf37ae7729172fcefe8f5fc833c7a988107",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3196,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 241,
"path": "/notebooks/movie_reccomendation_system.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:\n\n\nget_ipython().magic(u'config Completer.use_jedi = False')\n\nfrom pyspark.sql import SparkSession\nimport numpy\nimport pandas\n\nimport os\nos.environ['PYSPARK_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.environ['PYSPARK_DRIVER_PYTHON'] = '/var/www/py_spark_ccf/PY_SPARK_CCF_ENV/bin/python3'\nos.getcwd()\n\n\n# In[2]:\n\n\nspark_session = SparkSession.builder.master(\"spark://costrategix-pc:7077\") .appName('movie_reccomendation_system').getOrCreate()\n\n\n# In[3]:\n\n\nspark_session.sparkContext.getConf().getAll()\n\n\n# In[4]:\n\n\nratings_data_frame = spark_session.read.csv('../data/ratings.csv', inferSchema=True, header=True)\n\n\n# In[5]:\n\n\nratings_data_frame.count()\n\n\n# In[6]:\n\n\nratings_data_frame.printSchema()\n\n\n# In[7]:\n\n\nratings_data_frame.show(vertical=True, n=5)\n\n\n# In[8]:\n\n\nratings_data_frame = ratings_data_frame.dropna()\nratings_data_frame.count()\n\n\n# In[9]:\n\n\nratings_data_frame.select('userId').distinct().count()\n\n\n# In[10]:\n\n\nratings_data_frame.select('movieId').distinct().count()\n\n\n# In[11]:\n\n\nratings_data_frame.createOrReplaceTempView(\"table1\")\nspark_session.sql(\"\"\"\nselect movieId from table1\ngroup by movieId having count(*) > 10000;\n\"\"\").count()\n\n\n# In[12]:\n\n\nmovie_row_list = spark_session.sql(\"\"\"\nselect movieId from table1\ngroup by movieId having count(*) > 10000;\n\"\"\").collect()\n\nmovie_list = [row['movieId'] for row in movie_row_list]\n\n\n# In[13]:\n\n\nratings_data_frame = ratings_data_frame.filter(ratings_data_frame['movieId'].isin(movie_list))\n\n\n# In[14]:\n\n\nratings_data_frame.count()\n\n\n# In[15]:\n\n\nspark_session.sql(\"\"\"\nselect userId from table1\ngroup by userId having count(*) > 1000;\n\"\"\").count()\n\n\n# In[16]:\n\n\nuser_row_list = spark_session.sql(\"\"\"\nselect userId from table1\ngroup by userId having count(*) > 1000;\n\"\"\").collect()\n\nuser_list = [row['userId'] for row in user_row_list]\n\n\n# In[17]:\n\n\nratings_data_frame = ratings_data_frame.filter(ratings_data_frame['userId'].isin(user_list))\n\n\n# In[18]:\n\n\nratings_data_frame.count()\n\n\n# In[19]:\n\n\ntrain_data, test_data = ratings_data_frame.randomSplit([0.7, 0.3])\n\n\n# In[20]:\n\n\nfrom pyspark.ml.recommendation import ALS\nmodel = ALS(maxIter=10, userCol=\"userId\", itemCol=\"movieId\", ratingCol=\"rating\")\n\n\n# In[21]:\n\n\nmodel = model.fit(train_data)\n\n\n# In[22]:\n\n\ntest_data.head(1)\n\n\n# In[23]:\n\n\ntest_user_data = test_data.filter(test_data['userId'] == 229)\n\n\n# In[24]:\n\n\ntest_user_data.collect()\n\n\n# In[25]:\n\n\nsingle_user = test_user_data.select(['movieId','userId'])\n\n\n# In[26]:\n\n\nreccomendations = model.transform(single_user)\nreccomendations.orderBy('movieId').collect()\n\n\n# In[27]:\n\n\nfrom pyspark.ml.evaluation import RegressionEvaluator\n\n\n# In[28]:\n\n\ntest_data.count()\n\n\n# In[29]:\n\n\ntest_results = model.transform(test_data)\n\n\n# In[30]:\n\n\ntest_results.head(5)\n\n\n# In[31]:\n\n\nevaluator = RegressionEvaluator(labelCol='rating', predictionCol='prediction')\nprint('RMSE')\nevaluator.evaluate(test_results)\n\n\n# In[32]:\n\n\nprint('R_sqr')\nevaluator.evaluate(test_results, {evaluator.metricName: \"r2\"})\n\n\n# In[33]:\n\n\nprint('MAE')\nevaluator.evaluate(test_results, {evaluator.metricName: \"mae\"})\n\n\n# In[34]:\n\n\ntest_data.select('rating').describe().show()\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.45374271273612976,
"alphanum_fraction": 0.49207305908203125,
"avg_line_length": 44.29090881347656,
"blob_id": "cbd3947139d1ca1191b4d3ab864972a9a5af3fbf",
"content_id": "65d04937ffc227cf4b2bd15f3c67650eba2c59bd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4983,
"license_type": "no_license",
"max_line_length": 151,
"num_lines": 110,
"path": "/notebooks/tokenizer.py",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "import re\n\ndef token_based_regex_processing(token):\n token = re.sub(r'^([0-9\\.]+\\/|\\/[0-9\\.]+)$', lambda match: re.sub(r'(\\/)', r'', match.group(0), flags=re.U), token, re.U)\n token = re.sub(r'([\\\\\\/]$|^[\\\\\\/])', r'', token, re.U)\n token = re.sub(r'(^[\\.]+$)', r'', token, re.U)\n token = re.sub(r'^([0-9]+\\.)$', lambda match: re.sub(r'([0-9]+\\.)', r'\\g<1>0', match.group(0), re.U), token, re.U)\n token = re.sub(r'(^\\.[0-9]+$)', lambda match: re.sub(r'(\\.[0-9]+)', r'0\\1', match.group(0), re.U), token, re.U)\n\n token = re.sub(r'^([0-9\\.]+\\/[0-9\\.]+)\\/([0-9\\.]+\\/[0-9\\.]+)$', r'\\1 \\2', token, re.U)\n \n \"750.000 = 750\"\n token = re.sub(r'^([0-9]+\\.[0]+)$', lambda match: str(int(float(match.group(0)))), token, re.U)\n \n return token\n\ndef remove_duplicate_tokens(token_list):\n result_token_list = []\n \n for token in token_list:\n if token not in result_token_list or re.search(r'^[0-9\\.]+$', token) != None:\n result_token_list.append(token)\n \n return ' '.join(result_token_list)\n\n\n''' if there is dot(.) in the string it is replaced with space '''\ndef replace_dot(sample):\n return re.sub(r'([^0-9\\s]+\\.[^0-9\\s]*)', lambda match: re.sub(r'([\\.])', r' ', match.group(0), re.U) , sample, re.U)\n\n\n\n'''\nThis function will apply some regex rules like remove unwanted caracters, reduce sequential spaces and\n will split sample into tokens by space.\n'''\ndef tokenize(sample):\n \n #print('raw string:')\n #print(sample)\n \n #if type(sample) != unicode:\n # sample = sample.decode('utf8')\n \n sample = sample.lower()\n sample = re.sub(r'([^0-9a-z\\.\\s\\(\\)\\\\\\/\\-\\&#]+)', r'', sample, re.U)\n# sample = re.sub(r'([\\&])', r' \\1 ', sample, re.U)\n sample = re.sub(r'(\\([^\\(\\)]+\\))', lambda match: re.sub(r'([\\(\\)]+)', r' ', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([#])', r' no ', sample, re.U)\n sample = re.sub(r'([0-9\\.]+\\-[0-9\\.]+\\-*[0-9\\.]*)', lambda match: re.sub(r'([\\-])', r'/', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([a-z]+&[a-z]+)', lambda match: re.sub(r'([&])', r' and ', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([\\-])', r' ', sample, re.U)\n \n '''1x5 to 1/5'''\n sample = re.sub(r'([0-9]+[x][0-9]+)', lambda match: re.sub(r'([x])', r'/', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([0-9\\.]+[a-z]+)', lambda match: re.sub(r'([0-9\\.]+)', r'\\1 ', match.group(0), re.U), sample, re.U)\n \n \n '''specific to package and volume'''\n sample = re.sub(r'(\\s*[\\\\\\/]\\s*)', lambda match: match.group(0).strip(), sample, re.U)\n sample = re.sub(r'([0-9\\.]+\\/[a-z]+)', lambda match: re.sub(r'([\\/])', r'\\/1 ', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([a-z]+\\/|\\/[a-z]+)', lambda match: re.sub(r'([\\/])', r' ', match.group(0), re.U), sample, re.U)\n# sample = re.sub(r'([0-9\\.]+\\&[0-9\\.]+)', lambda match:\\\n# re.sub(r'([\\&])', r' ', match.group(0), re.U), sample, re.U)\n# sample = re.sub(r'([0-9\\.\\\\\\/a-z]+)', lambda match:\\\n# re.sub(r'([0-9\\.\\\\\\/]+)', r' \\1 ', match.group(0), re.U), sample, re.U)\n sample = re.sub(r'([a-z]+[0-9\\.]+)', lambda match: re.sub(r'([0-9\\.]+)', r' \\1', match.group(0), re.U), sample, re.U)\n \n sample = re.sub(r'(\\')', r'', sample, re.U)\n sample = re.sub(r'([\\\\\\/]+)', r'/', sample, re.U)\n \n '''12.13.5 = 12/23.5'''\n sample = re.sub(r'([0-9\\.]+\\.[0-9\\.]+\\.[0-9\\.]+)', lambda match: re.sub(r'([\\.])', r'/', match.group(0), count=1, flags=re.U), sample, re.U)\n \n '''12/.75 = 12/0.75'''\n sample = re.sub(r'\\/\\.[0-9]+', lambda match: re.sub(r'\\/\\.', r'/0.', match.group(0), re.U), sample, re.U)\n \n ''' ./20 = 20'''\n sample = re.sub(r'[^0-9]\\.\\/[0-9]', lambda match: match.group(0).replace('/', ' '), sample, re.U)\n \n '''.1.0 = 1.0'''\n sample = re.sub(r'\\.[0-9]+\\.[0-9]+', lambda match: match.group(0).lstrip('.'), sample, re.U)\n \n '''.750 = .75'''\n sample = re.sub(r'\\.[1-9]+[0]+', lambda match: match.group(0).rstrip('0'), sample, re.U)\n \n '''750.000 = 750'''\n sample = re.sub(r'([0-9]+\\.[0]+)', lambda match: match.group(0).rstrip('0')[:-1], sample, re.U)\n\n \n '''4/6/12 = 4/6 12'''\n split_slash = lambda splits: '/'.join(splits[:2]) + ' ' + splits[2]\n sample = re.sub(r'[0-9]+/[0-9]+/[0-9\\.]+', lambda match: split_slash(match.group(0).split('/')), sample, re.U)\n \n sample = replace_dot(sample)\n \n sample = re.sub(r'(\\s+)', r' ', sample, re.U)\n \n #print('after regex processing:')\n #print(sample)\n \n token_list = [token.strip() for token in sample.strip().split(' ')]\n \n token_list = [token_based_regex_processing(token) for token in token_list]\n \n #print('after token based regex processing:')\n #print(token_list)\n \n return remove_duplicate_tokens(token_list)\n# return token_list\n\n"
},
{
"alpha_fraction": 0.8059701323509216,
"alphanum_fraction": 0.8507462739944458,
"avg_line_length": 10.333333015441895,
"blob_id": "a1d998ef9c310df3a0f2bb3db8ed05f27da7b2f3",
"content_id": "cc83fc68cc9b38c5460cb4e8288c7c88bd988830",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 67,
"license_type": "no_license",
"max_line_length": 18,
"num_lines": 6,
"path": "/requirements.txt",
"repo_name": "ShreyasGithub/pyspark_ccf",
"src_encoding": "UTF-8",
"text": "numpy\npandas\npyspark==3.0.2\nipykernel\nfuzzywuzzy\npython-Levenshtein"
}
] | 8 |
blackishgray/url_shortner | https://github.com/blackishgray/url_shortner | 03756b0110bf63b659c9540a47fb1cf8b680ffe1 | 52e69a5aefe0ae9411d549b38032f89c053f63cd | 2aa084932ba4121bbb888d7190b31a8054a1c325 | refs/heads/master | 2023-08-28T02:17:33.335301 | 2021-10-31T12:52:18 | 2021-10-31T12:52:18 | 423,146,574 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.48245614767074585,
"alphanum_fraction": 0.6842105388641357,
"avg_line_length": 15.285714149475098,
"blob_id": "d63ba74da43818c3a5974754556701d635bf1444",
"content_id": "84f1e184cc37be256618fad86e53e541f2445460",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 228,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 14,
"path": "/requirements.txt",
"repo_name": "blackishgray/url_shortner",
"src_encoding": "UTF-8",
"text": "antiorm==1.2.1\nclick==8.0.3\ncolorama==0.4.4\ndb==0.1.1\ndb-sqlite3==0.0.1\nFlask==2.0.2\nFlask-SQLAlchemy==2.4.4\ngreenlet==1.1.2\nitsdangerous==2.0.1\nJinja2==3.0.2\nMarkupSafe==2.0.1\npsycopg2==2.9.1\nSQLAlchemy==1.3.19\nWerkzeug==2.0.2\n"
},
{
"alpha_fraction": 0.6568118333816528,
"alphanum_fraction": 0.6604074835777283,
"avg_line_length": 27.454545974731445,
"blob_id": "b6454f0514b97b9e26fad00c6ae82afb506e7727",
"content_id": "dd712a56273acdb96e5552c3028fbb0f460b0951",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2503,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 88,
"path": "/app.py",
"repo_name": "blackishgray/url_shortner",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, url_for, request, redirect\nimport os \nfrom flask_sqlalchemy import SQLAlchemy\nimport random\nimport string\napp = Flask(__name__, static_url_path='/static')\n\napp.config['SQLALCHEMY_DATABASE_URI'] =\"sqlite:///urls.db\"\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n\ndb = SQLAlchemy(app)\n\[email protected]_first_request\ndef create_tables():\n db.create_all()\n\nclass Urls(db.Model):\n id_ = db.Column(\"id_\", db.Integer, primary_key=True)\n long = db.Column(\"long\", db.String())\n short = db.Column(\"short\", db.String(10))\n\n def __init__(self, long, short):\n self.long = long\n self.short = short\n\n\[email protected]('/')\ndef index():\n\treturn render_template('index.html')\n\ndef shorten_url():\n\tletters = string.ascii_lowercase + string.ascii_uppercase\n\twhile True:\n\t\trand_letters = random.choices(letters, k=3)\n\t\trand_letters = \"\".join(rand_letters)\n\t\tshort_url = Urls.query.filter_by(short=rand_letters).first()\n\t\tif not short_url:\n\t\t\treturn rand_letters\n\[email protected]('/url_process', methods=['POST', 'GET'])\ndef url_process():\n\tif request.method == 'POST':\n\t\turl_received = request.form[\"url_pro\"]\n\t\tfound_url = Urls.query.filter_by(long=url_received).first()\n\t\tif found_url:\n\t\t\treturn redirect(url_for(\"display_short_url\", url=found_url.short))\n\t\telse:\n\t\t\tshort_url = shorten_url()\n\t\t\tprint(short_url)\n\t\t\tnew_url = Urls(url_received, short_url)\n\t\t\tdb.session.add(new_url)\n\t\t\tdb.session.commit()\n\t\t\treturn redirect(url_for(\"display_short_url\", url=short_url))\n\telse:\n\t\treturn render_template('index.html')\n\[email protected]('/display/<url>')\ndef display_short_url(url):\n return render_template('results.html', short_url_display=url)\n\[email protected]('/<short_url>')\ndef redirection(short_url):\n long_url = Urls.query.filter_by(short=short_url).first()\n if long_url:\n return redirect(long_url.long)\n else:\n return f'<h1>Url doesnt exist</h1>'\n\[email protected]_processor\ndef override_url_for():\n return dict(url_for=dated_url_for)\n\n\[email protected]('/all_url')\ndef display_all():\n return render_template('all.html', vals=Urls.query.all())\n\ndef dated_url_for(endpoint, **values):\n if endpoint == 'static':\n filename = values.get('filename', None)\n if filename:\n file_path = os.path.join(app.root_path,\n endpoint, filename)\n values['q'] = int(os.stat(file_path).st_mtime)\n return url_for(endpoint, **values)\n\nif __name__=='__main__':\n\tapp.run(port=5000, debug=True)"
}
] | 2 |
voje/aoc2018 | https://github.com/voje/aoc2018 | 2ae9337237d0df78e2052845921965b760a2a53a | 3fbf40f7bf0e0237e1c81acc33e179145a107237 | a5c143f61d851673026097f55cedb8c3d6dcd0ff | refs/heads/master | 2020-04-09T02:50:11.746729 | 2019-01-14T13:56:23 | 2019-01-14T13:56:23 | 159,956,583 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7616279125213623,
"alphanum_fraction": 0.7674418687820435,
"avg_line_length": 33.400001525878906,
"blob_id": "cbfcf9e34016eac2c22a2a6d62388046444fa75d",
"content_id": "8ad50a8e71d5c3e41249a3438ac10bb57018ecb8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 172,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 5,
"path": "/day5/README.md",
"repo_name": "voje/aoc2018",
"src_encoding": "UTF-8",
"text": "# Day5\n\nFor some reason, my answer is wrong. \nI've double checked with a script from reddit and tripple checked my input data. \nIs it because I skipped the last challenge?\n"
},
{
"alpha_fraction": 0.4492512345314026,
"alphanum_fraction": 0.4625623822212219,
"avg_line_length": 17.212121963500977,
"blob_id": "4bff01ede3a896f733494417212e2a6448aee3ce",
"content_id": "679ca4d301fdb8ef1ce22bbeaaf211be6f8bc966",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 601,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 33,
"path": "/day5/day5.py",
"repo_name": "voje/aoc2018",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\n# wrong answer ...\n\nimport math\n\ndef match(a, b):\n matches = (abs(ord(a) - ord(b)) == 32)\n # print(\"{} {} {}\".format(a, b, matches))\n return matches\n\ndef compact(txt):\n i = 0\n while i < (len(txt)-1):\n if match(txt[i], txt[i+1]):\n txt = txt[:i] + txt[i+2:]\n else:\n i += 1\n return txt\n\nif __name__ == \"__main__\":\n\n with open(\"input_0.txt\") as f:\n txt = f.read()\n\n change = True\n while change:\n l = len(txt)\n txt = compact(txt) \n change = (len(txt) != l)\n\n print(len(txt))\n print (txt)\n"
},
{
"alpha_fraction": 0.5636363625526428,
"alphanum_fraction": 0.7939394116401672,
"avg_line_length": 29,
"blob_id": "c3f3f663cd0fdb2b19f57811b53ef2f16bce97bd",
"content_id": "313daac8a5c254956a46dadcf02b42c84094dc98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 330,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 11,
"path": "/README.md",
"repo_name": "voje/aoc2018",
"src_encoding": "UTF-8",
"text": "# AoC 2018\n\n## Trick with curl:\nYou need to be logged in to receive input data. \nCurl works if you pass the session cookie: \n```bash\n$ curl --cookie \"session=53616c7465645f5fb618b3273f1d40f2e956c7bbd8a2bfbd4399e3d9746d25ac715c11e518d84ab27b0932ce8e1bb83a\" \\\nhttps://adventofcode.com/2018/day/1/input\n```\nTest 123\nAnother test 123\n"
},
{
"alpha_fraction": 0.514340341091156,
"alphanum_fraction": 0.533460795879364,
"avg_line_length": 19.076923370361328,
"blob_id": "b0baf6ae38684246fb64566904329c7d6c40bdc3",
"content_id": "3c94a81a47bdadbdb62fb86e4cacbd8016bcd7f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 523,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 26,
"path": "/day2/day2_0.py",
"repo_name": "voje/aoc2018",
"src_encoding": "UTF-8",
"text": "import string\n\nwith open(\"input_0.txt\") as f:\n # get rid of newlines\n indata = [line[:-1] for line in f] \n\nprint(indata[:10])\n\ntwos = 0\nthrees = 0\n\nfor instring in indata:\n tw = False\n thr = False\n for letter in string.ascii_lowercase:\n c = instring.count(letter)\n if c == 2 and not tw:\n twos += 1\n tw = True\n elif c == 3 and not thr:\n threes += 1\n thr = True\n\nprint(\"twos: \", twos)\nprint(\"threes: \", threes)\nprint(\"mul: \", twos * threes)\n\n"
},
{
"alpha_fraction": 0.5548780560493469,
"alphanum_fraction": 0.5670731663703918,
"avg_line_length": 15.896552085876465,
"blob_id": "d7631321da05bd45c61cb32366d06455d7516c1f",
"content_id": "a71ff9605d69d88854629b3aa2cd0e408988baf1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 492,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 29,
"path": "/day1/day1_1.py",
"repo_name": "voje/aoc2018",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\ninit = []\n\nwith open(\"input_1.txt\") as f:\n init = [int(line) for line in f]\n\nprint(init)\nnewlist = [sum(init[:(i+1)]) for i,e in enumerate(init)]\nprint(newlist)\n\ndef cycle(lst):\n while True:\n for el in lst:\n yield el\n\nmy_cycle = cycle(init)\n\"\"\"\nfor i in range(20):\n print(my_cycle.next())\n\"\"\"\n\nwhile True:\n new = my_cycle.next() + newlist[-1]\n # print(new)\n if new in newlist:\n print(new)\n break\n newlist.append(new)\n\n\n"
}
] | 5 |
Nivya1/Basics | https://github.com/Nivya1/Basics | 71b62923a804d3405b688f110d1266e563506232 | c86173b88d62b86f21fafa883aa3b8f7ca68d867 | 08cde051b22dea31fbc7f4238c1875d7a6aa8c47 | refs/heads/master | 2021-01-12T07:45:39.376688 | 2016-12-23T02:37:08 | 2016-12-23T02:37:08 | 77,008,341 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.7053003311157227,
"alphanum_fraction": 0.7151943445205688,
"avg_line_length": 59.60869598388672,
"blob_id": "5abeca6ae6f48ed99bbe8c2b4edaf4c9cae942cb",
"content_id": "ee6ae01bc75e2de0a289f04ef7c1ecf647b2e471",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1415,
"license_type": "no_license",
"max_line_length": 242,
"num_lines": 23,
"path": "/login/forms.py",
"repo_name": "Nivya1/Basics",
"src_encoding": "UTF-8",
"text": "import re\r\nfrom django import forms\r\nfrom django.contrib.auth.models import User\r\nfrom django.utils.translation import ugettext_lazy as _\r\n\r\nclass RegistrationForm(forms.Form):\r\n\tusername = forms.RegexField(regex = r'^\\w+$', widget = forms.TextInput(attrs = dict(required = True, max_length = 30)), label = _(\"Username\"), error_messages = {'invalid': _(\"This value must contain only letters, numbers and Underscores.\")})\r\n\temail = forms.EmailField(widget = forms.TextInput(attrs = dict(required = True, max_length = 30)), label = _(\"Email Address\"))\r\n\tpassword1 = forms.CharField(widget = forms.PasswordInput(attrs = dict(required = True, max_length = 30, render_value = False)), label = _(\"Password\"))\r\n\tpassword2 = forms.CharField(widget = forms.PasswordInput(attrs = dict(required = True, max_length = 30, render_value = False)), label = _(\"Password(again)\"))\r\n\r\n\tdef clean_username(self):\r\n\t\ttry:\r\n\t\t\tuser = User.objects.get(username__iexact = self.cleaned_data['username'])\r\n\t\texcept User.DoesNotExist:\r\n\t\t\treturn self.cleaned_data['username']\r\n\t\traise forms.ValidationError(_(\"The username already exists. Please try another one.\"))\r\n\r\n\tdef clean(self):\r\n\t\tif 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:\r\n\t\t\tif self.cleaned_data['password1'] != self.cleaned_data['password2']:\r\n\t\t\t\traise forms.ValidationError(_(\"The two password fields did not match.\"))\r\n\t\treturn self.cleaned_data"
}
] | 1 |
NathinduHimansha/CLI-MEMORY-GAME | https://github.com/NathinduHimansha/CLI-MEMORY-GAME | e3ed337bc72f36afa0831b4f745ab4257637fce0 | 9a94ddea49521f5df031382d9576474497846aa1 | ecb5784857a97ab9de0e3466ff3f19d3b2bfb66f | refs/heads/main | 2023-05-06T00:35:27.364046 | 2021-05-31T17:57:53 | 2021-05-31T17:57:53 | 372,589,951 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.40217629075050354,
"alphanum_fraction": 0.44911134243011475,
"avg_line_length": 26.902833938598633,
"blob_id": "c3f8c1c999659d9b9ba25cd981c6d4a07150d176",
"content_id": "7ed9899dad280c57b412d6753c998d535b6fd768",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13785,
"license_type": "no_license",
"max_line_length": 163,
"num_lines": 494,
"path": "/Level_02.py",
"repo_name": "NathinduHimansha/CLI-MEMORY-GAME",
"src_encoding": "UTF-8",
"text": "\ndef getinput():\n choice1=True\n choice2=True\n \"function for get user input,validate and check\"\n global selection1,selection2,hold1,hold2,inp1,inp2\n global turncount1, turncount2, turncount3, turncount4, turncount5, turncount6, turncount7, turncount8, turncount9, turncount10, turncount11, turncount12\n\n #input01 and checking\n choice1 = True\n while choice1:\n print()#print some space\n print(\"\\n\".join(\"\\t\".join(\"{:1}\".format(item) for item in row) for row in display))#print the list as a grid\n \n userinput1 = input(\"Select a card: \")\n inp1 = userinput1.upper()#input01\n print()#print some space\n\n if inp1 not in length_check:#input checking\n print(\"You cant select previous card\")\n continue\n choice1=True\n\n if inp1 == \"1\":\n selection1 = cards[0]\n hold1 = \"1\"\n turncount1 += 1\n choice1 = False\n\n elif inp1 == \"2\":\n selection1 = cards[1]\n hold1 = \"2\"\n turncount2 += 1\n choice1 = False\n\n elif inp1 == \"3\":\n selection1 = cards[2]\n hold1 = \"3\"\n turncount3 += 1\n choice1 = False\n\n elif inp1 == \"4\":\n selection1 = cards[3]\n hold1 = \"4\"\n turncount4 += 1\n choice1 = False\n\n elif inp1 == \"5\":\n selection1 = cards[4]\n hold1 = \"5\"\n turncount5 += 1\n choice1 = False\n\n elif inp1 == \"6\":\n selection1 = cards[5]\n hold1 = \"6\"\n turncount6 += 1\n choice1 = False\n\n elif inp1 == \"7\":\n selection1 = cards[6]\n hold1 = \"7\"\n turncount7 += 1\n choice1 = False\n\n elif inp1 == \"8\":\n selection1 = cards[7]\n hold1 = \"8\"\n turncount8 += 1\n choice1 = False\n\n elif inp1 == \"9\":\n selection1 = cards[8]\n hold1 = \"9\"\n turncount9 += 1\n choice1 = False\n\n elif inp1 == \"10\":\n selection1 = cards[9]\n hold1 = \"10\"\n turncount10 += 1\n choice1 = False\n\n elif inp1 == \"11\":\n selection1 = cards[10]\n hold1 = \"11\"\n turncount11 += 1\n choice1 = False\n\n elif inp1 == \"12\":\n selection1 = cards[11]\n hold1 = \"12\"\n turncount12 += 1\n choice1 = False\n\n else:\n selection1 = \"INVALID_1\"\n print(\"Please enter a valid card\")\n choice1 = True\n\n print(\"Your selection is \", selection1)#prints the value of selection\n print() # print some space\n\n #input02 and checking\n choice2 = True\n while choice2:\n userinput2 = input(\"Select a card: \")\n inp2 = userinput2.upper()\n print()#print some space\n\n if inp2 not in length_check:#inputchecking\n print(\"You cant select previous card\")\n continue\n choice2=True\n\n if inp1 == inp2:\n print(\"You cant select the same card\")\n choice2 = True\n\n elif inp2 == \"1\":\n selection2 = cards[0]\n hold2 = \"1\"\n turncount1 += 1\n choice2 = False\n\n elif inp2 == \"2\":\n selection2 = cards[1]\n hold2 = \"2\"\n turncount2 += 1\n choice2 = False\n\n elif inp2 == \"3\":\n selection2 = cards[2]\n hold2 = \"3\"\n turncount3 += 1\n choice2 = False\n\n elif inp2 == \"4\":\n selection2 = cards[3]\n hold2 = \"4\"\n turncount4 += 1\n choice2 = False\n\n elif inp2 == \"5\":\n selection2 = cards[4]\n \n hold2 = \"5\"\n turncount5 += 1\n choice2 = False\n\n elif inp2 == \"6\":\n selection2 = cards[5]\n \n hold2 = \"6\"\n turncount6 += 1\n choice2 = False\n\n elif inp2 == \"7\":\n selection2 = cards[6]\n hold2 = \"7\"\n turncount7 += 1\n choice2 = False\n\n elif inp2 == \"8\":\n selection2 = cards[7]\n hold2 = \"8\"\n turncount8 += 1\n choice2 = False\n\n elif inp2 == \"9\":\n selection2 = cards[8]\n hold2 = \"9\"\n turncount9 += 1\n choice2 = False\n\n elif inp2 == \"10\":\n selection2 = cards[9]\n hold2 = \"10\"\n turncount10 += 1\n choice2 = False\n\n elif inp2 == \"11\":\n selection2 = cards[10]\n hold2 = \"11\"\n turncount11 += 1\n choice2 = False\n\n elif inp2 == \"12\":\n selection2 = cards[11]\n hold2 = \"12\"\n turncount12 += 1\n choice2 = False\n\n else:\n selection2 = \"INVALID_2\"\n print(\"Please enter a valid cards\")\n choice2 = True\n\n print(\"Your selection is\", selection2)\n print()\n return ()\n# ..........................................................................\n\ndef inputcheking():\n \"This function for check the inputs matching or not.calculating the turn tiles count and marks\"\n global current_time,end_time,cards,cards_copy,length_check,selection1, selection2,hold1, hold2, inp1,remaintime\n global turncount1, turncount2, turncount3, turncount4, turncount5, turncount6, turncount7, turncount8, turncount9, turncount10, turncount11, turncount12, marks\n\n if selection1 == selection2:#matching\n print(\"Its Matched\")\n marks += 20\n print(\"Your Marks: \", marks)\n print(hold1)\n print(hold2)\n print(length_check)\n length_check.remove(hold1)\n length_check.remove(hold2)\n print(length_check) \n grid()\n current_time=int(time.time())\n remaintime = (end_time-current_time)\n print(\"time remains: \", remaintime, \"seconds\")\n\n else:\n print(\"Its not Matched\")\n \n if inp1 == \"1\":#calculating the count of the turns in tiles\n marks = marks - (turncount1 * 5)\n \n elif inp1 == \"2\":\n marks = marks - (turncount2 * 5)\n\n elif inp1 == \"3\":\n marks = marks - (turncount3 * 5)\n\n elif inp1 == \"4\":\n marks = marks - (turncount4 * 5)\n\n elif inp1 == \"5\":\n marks = marks - (turncount5 * 5)\n\n elif inp1 == \"6\":\n marks = marks - (turncount6 * 5)\n\n elif inp1 == \"7\":\n marks = marks - (turncount7 * 5)\n\n elif inp1 == \"8\":\n marks = marks - (turncount8 * 5)\n\n elif inp1 == \"9\":\n marks = marks - (turncount9 * 5)\n\n elif inp1 == \"10\":\n marks = marks - (turncount10 * 5)\n\n elif inp1 == \"11\":\n marks = marks - (turncount11 * 5)\n\n elif inp1 == \"12\":\n marks = marks - (turncount12 * 5)\n\n if inp2 == \"1\":\n marks = marks - (turncount1 * 5)\n\n elif inp2 == \"2\":\n marks = marks - (turncount2 * 5)\n\n elif inp2 == \"3\":\n marks = marks - (turncount3 * 5)\n\n elif inp2 == \"4\":\n marks = marks - (turncount4 * 5)\n\n elif inp2 == \"5\":\n marks = marks - (turncount5 * 5)\n\n elif inp2 == \"6\":\n marks = marks - (turncount6 * 5)\n\n elif inp2 == \"7\":\n marks = marks - (turncount7 * 5)\n\n elif inp2 == \"8\":\n marks = marks - (turncount8 * 5)\n\n elif inp2 == \"9\":\n marks = marks - (turncount9 * 5)\n\n elif inp2 == \"10\":\n marks = marks - (turncount10 * 5)\n\n elif inp2 == \"11\":\n marks = marks - (turncount11 * 5)\n\n elif inp2 == \"12\":\n marks = marks - (turncount12 * 5)\n\n print(\"Your Marks: \", marks)#marks and time\n current_time=int(time.time())\n remaintime = (end_time-current_time)\n print(\"time remains: \", remaintime, \"seconds\")\n return ()\n# ....................................................................................................\n\ndef grid():\n 'This function for update the grid after input matches'\n global selection1, selection2,hold1,hold2\n\n for x in display:\n if hold1 in x:\n if hold1 == \"1\":\n x[0] = selection1\n\n if hold1 == \"2\":\n x[1] = selection1\n\n if hold1 == \"3\":\n x[2] = selection1\n\n if hold1 == \"4\":\n x[3] = selection1\n\n if hold1 == \"5\":\n x[0] = selection1\n\n if hold1 == \"6\":\n x[1] = selection1\n\n if hold1 == \"7\":\n x[2] = selection1\n\n if hold1 == \"8\":\n x[3] = selection1\n\n if hold1 == \"9\":\n x[0] = selection1\n\n if hold1 == \"10\":\n x[1] = selection1\n\n if hold1 == \"11\":\n x[2] = selection1\n\n if hold1 == \"12\":\n x[3] = selection1\n\n if hold2 in x:\n if hold2 == \"1\":\n x[0] = selection2\n\n if hold2 == \"2\":\n x[1] = selection2\n\n if hold2 == \"3\":\n x[2] = selection2\n\n if hold2 == \"4\":\n x[3] = selection2\n\n if hold2 == \"5\":\n x[0] = selection2\n\n if hold2 == \"6\":\n x[1] = selection2\n\n if hold2 == \"7\":\n x[2] = selection2\n\n if hold2 == \"8\":\n x[3] = selection2\n\n if hold2 == \"9\":\n x[0] = selection2\n\n if hold2 == \"10\":\n x[1] = selection2\n\n if hold2 == \"11\":\n x[2] = selection2\n\n if hold2 == \"12\":\n x[3] = selection2\n return ()\n# .....................................................................................\n\ndef continuecheck():\n choice3=True\n choice4=True\n \"This function for check the state of play again,exit and level02\"\n global length_check,remaintime,marks\n\n if (remaintime <= 0):#if time over\n print(\"Time Over\")\n print(\"If want to play again press P\")\n print(\"If want to exit press E\")\n\n while (choice3):\n inp3 = str(input(\":\"))\n inp3=inp3.upper()\n\n if (inp3 == \"P\"):\n main()\n choice3=False\n\n elif (inp3 == \"E\"):\n print(\"You select EXIT\")\n sys.exit()\n choice3=False\n\n else:\n print(\"Enter a valid command\")\n choice3 = True\n\n if (len(length_check) == 0):#if all selects by the user\n print(\"Your Marks\",marks)\n print(\"Time Bonus\",remaintime)\n print(\"Total Marks\",(marks+remaintime))\n print(\"Level 02 Succesfully Completed\")\n print(\"To exit press E\")\n\n while (choice4):\n inp4 = str(input(\":\"))\n inp4=inp4.upper()\n \n if (inp4 == \"E\"):\n print(\"You selected EXIT\")\n sys.exit()\n choice4=False\n\n else:\n print(\"enter a valid command\")\n choice4 = True\n\n return ()\n# .......................................................................................................................\n\ndef main():\n import random, time, sys\n \"This functions for asign the variables,check user choice and start the game\"\n global current_time,end_time,maindisplay,cards,cards_copy, selection1, selection2,mainlist,mainlength_check,length_check, display, totaltime,remaintime,marks\n global turncount1, turncount2, turncount3, turncount4, turncount5, turncount6, turncount7, turncount8, turncount9, turncount10, turncount11, turncount12\n\n marks = 0\n \n turncount1 = 0\n turncount2 = 0\n turncount3 = 0\n turncount4 = 0\n turncount5 = 0\n turncount6 = 0\n turncount7 = 0\n turncount8 = 0\n turncount9 = 0\n turncount10 = 0\n turncount11 = 0\n turncount12 = 0\n\n choice0 = True\n\n mainlist = [\"A\", \"A\", \"A\", \"A\", \"B\", \"B\", \"B\", \"B\", \"C\", \"C\", \"C\", \"C\"]\n maindisplay = [[\"1\", \"2\", \"3\", \"4\"],[\"5\", \"6\", \"7\", \"8\"],[\"9\", \"10\", \"11\", \"12\"]]\n mainlength_check=[\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]\n\n print()\n print(' Welcome to the level 02 in Memory Game.')\n print()\n print(\" To start the game enter Y or to exit enter E\")\n while(choice0):\n startvalue = str(input(\"Your choice:\"))\n start = startvalue.upper()#\n while (start != \"E\"): #to exit the game \n choice0=False\n if (start == \"Y\" or start == \"P\"): # checking the game status\n choice0=False\n current_time=int(time.time())\n end_time=(current_time+50)\n cards = mainlist.copy()\n display = maindisplay.copy()\n length_check = mainlength_check.copy()\n cards = random.sample(cards, len(cards))\n print(cards)#remove\n remaintime = (end_time-current_time)\n\n else:\n print(\"Please enter a valid choice\")\n choice0=True\n\n while (len(length_check) != 0) and (remaintime > 0):\n getinput()\n inputcheking()\n continuecheck()\n print(\"You select EXIT\")\n sys.exit()\n return()\n"
}
] | 1 |
veirus/vimrc-to-json | https://github.com/veirus/vimrc-to-json | 6a467ac72a8d8f2e8d4ccf5535d7c569739ea9a3 | 6fa97b46ead6c8e6efb31c457d791df215b1bddd | c47e3327bca0c0bbce10f90f8d3b28d763963443 | refs/heads/master | 2020-04-22T02:20:31.011173 | 2019-02-11T03:20:15 | 2019-02-11T03:20:15 | 170,045,029 | 0 | 0 | MIT | 2019-02-11T00:42:57 | 2019-02-11T00:41:59 | 2019-02-08T21:56:24 | null | [
{
"alpha_fraction": 0.6444844007492065,
"alphanum_fraction": 0.6486810445785522,
"avg_line_length": 30.471698760986328,
"blob_id": "4c4f51e0824030d7967ae3e988784ab6914c49a9",
"content_id": "8bd8c6c57efe50bc23303b6f29438c07f1a6b54c",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1668,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 53,
"path": "/vimrc-to-json.py",
"repo_name": "veirus/vimrc-to-json",
"src_encoding": "UTF-8",
"text": "from pathlib import Path\nimport re\nimport json\n\n# Get the path of this file.\npath = Path(__file__).parent\n\nwith open(path / \".vimrc\", \"r\", encoding=\"utf-8\") as f:\n lines = f.readlines()\n\nmaptypes = {\n \"nmap\": \"vim.normalModeKeyBindings\",\n \"vmap\": \"vim.visualModeKeyBindings\",\n \"imap\": \"vim.insertModeKeyBindings\",\n \"nnoremap\": \"vim.normalModeKeyBindingsNonRecursive\",\n \"vnoremap\": \"vim.visualModeKeyBindingsNonRecursive\",\n \"inoremap\": \"vim.insertModeKeyBindingsNonRecursive\",\n}\n\njsondata = {\n \"vim.normalModeKeyBindings\": [],\n \"vim.visualModeKeyBindings\": [],\n \"vim.insertModeKeyBindings\": [],\n \"vim.normalModeKeyBindingsNonRecursive\": [],\n \"vim.visualModeKeyBindingsNonRecursive\": [],\n \"vim.insertModeKeyBindingsNonRecursive\": [],\n}\n\n\n# Parses abc to [\"a\", \"b\", \"c\"] and :wq<CR> to [\":wq\"]\ndef mapToJSONList(mapstring, after=False):\n if after and mapstring.startswith(\":\") and len(mapstring) > 1:\n map_json = re.match(\"(:\\w+)\", mapstring).group(1)\n return {\"command\": [map_json]}\n\n parts = re.findall(\"(<[^>]+>|.)\", mapstring)\n return {\"after\" if after else \"before\": parts}\n\n\n# Get all the mappings and place them in the correct category.\nfor item in lines:\n matches = re.match(\"(^.*map)\\s([\\S]+)\\s+([\\S]+)$\", item)\n if matches:\n maptype = matches.group(1)\n before = mapToJSONList(matches.group(2))\n after = mapToJSONList(matches.group(3), True)\n maptype = maptypes[maptype]\n jsondata[maptype].append({**before, **after})\n\n\n# Write the JSON to settings.json in the same directory.\nwith open(path / \"settings.json\", \"w\") as f:\n json.dump(jsondata, f, indent=4)\n"
}
] | 1 |
mak2salazarjr/arxiv-browse | https://github.com/mak2salazarjr/arxiv-browse | 4161da7c0a7d72084a87ca4743d5a7bbb3ef53fe | 4abd9b41042ab2a6f83f4564ad4035f20e55b700 | 80e957c101fc2569b8c11492b392e4461b39afd2 | refs/heads/master | 2020-12-11T15:55:45.294365 | 2020-01-08T19:55:17 | 2020-01-08T19:55:17 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6471009254455566,
"alphanum_fraction": 0.6481746435165405,
"avg_line_length": 31.870588302612305,
"blob_id": "0f17f73bb04a893fafd05295d84dc25bed4ac700",
"content_id": "73c1c8f165dae92f60376a67c343830d3b9f469f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2794,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 85,
"path": "/browse/controllers/prevnext/__init__.py",
"repo_name": "mak2salazarjr/arxiv-browse",
"src_encoding": "UTF-8",
"text": "\"\"\"Handle requests to support sequential navigation between arXiv IDs.\"\"\"\n\nfrom flask import url_for\nfrom typing import Tuple, Dict, Any\nfrom werkzeug import MultiDict\nfrom werkzeug.exceptions import InternalServerError, BadRequest\n\nfrom browse.domain.identifier import Identifier, IdentifierException\nfrom browse.services.database import get_sequential_id\nfrom arxiv import status\nfrom arxiv.taxonomy.definitions import ARCHIVES, CATEGORIES_ACTIVE\nfrom arxiv.base import logging\n\n\nResponse = Tuple[Dict[str, Any], int, Dict[str, Any]]\nlogger = logging.getLogger(__name__)\n\n\ndef get_prevnext(request_params: MultiDict) -> Response:\n \"\"\"\n Get the next or previous arXiv ID in the browse context.\n\n The 'id', 'function', and 'context' request parameters are required. The\n 'site' parameter from the classic prevnext is no longer supported.\n\n Parameters\n ----------\n request_params : dict\n\n Returns\n -------\n dict\n Search result response data.\n int\n HTTP status code.\n dict\n Headers to add to the response.\n\n Raises\n ------\n InternalServerError\n Raised when there was an unexpected problem executing the query.\n BadRequest\n Raised when request parameters are missing, invalid, or when an ID\n redirect cannot be returned even when the request parameters are valid.\n\n \"\"\"\n if 'id' not in request_params:\n raise BadRequest('Missing article identifier')\n try:\n arxiv_id = Identifier(request_params['id'])\n except IdentifierException:\n raise BadRequest(f\"Invalid article identifier {request_params['id']}\")\n\n if not ('function' in request_params\n and request_params['function'] in ['prev', 'next']):\n raise BadRequest('Missing or invalid function request')\n\n if 'context' not in request_params:\n raise BadRequest('Missing context')\n context = request_params['context']\n\n if not (context in CATEGORIES_ACTIVE\n or context in ARCHIVES or context == 'all'):\n raise BadRequest('Invalid context')\n\n is_next = request_params['function'] == 'next'\n try:\n seq_id = get_sequential_id(paper_id=arxiv_id,\n is_next=is_next,\n context=context)\n except Exception as ex:\n logger.warning(f'Error getting sequential ID: {ex}')\n raise InternalServerError from ex\n\n if not seq_id:\n raise BadRequest(\n f'No {\"next\" if is_next else \"previous\"} article found for '\n f'{arxiv_id.id} in {context}'\n )\n\n redirect_url = url_for('browse.abstract',\n arxiv_id=seq_id,\n context=context)\n return {}, status.HTTP_301_MOVED_PERMANENTLY, {'Location': redirect_url}\n"
},
{
"alpha_fraction": 0.5395228266716003,
"alphanum_fraction": 0.5659672021865845,
"avg_line_length": 32.77669906616211,
"blob_id": "bae4a6333bffae40e0d7ca57173e3f8b288b56a4",
"content_id": "c79e37f0f50164ede1f9902ba60d3044b0bc1530",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3479,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 103,
"path": "/browse/controllers/prevnext/tests.py",
"repo_name": "mak2salazarjr/arxiv-browse",
"src_encoding": "UTF-8",
"text": "\"\"\"Tests for prevnext controller, :mod:`browse.controllers.prevnext`.\"\"\"\n\nfrom unittest import TestCase, mock\nfrom werkzeug import MultiDict\nfrom werkzeug.exceptions import BadRequest\nfrom browse.controllers import prevnext\n\n\nclass TestPrevNextController(TestCase):\n \"\"\"Tests for :func:`.get_prevnext`.\"\"\"\n\n def test_missing_parameters(self) -> None:\n \"\"\"Test request with missing parameters.\"\"\"\n request_data = MultiDict()\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'id': '1801.00001'\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'id': '1801.00001',\n 'function': 'next'\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'id': '1801.00001',\n 'context': 'cs'\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'function': 'prev',\n 'context': 'cs'\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n def test_bad_parameters(self) -> None:\n \"\"\"Test parameters with bad values.\"\"\"\n request_data = MultiDict({\n 'id': 'foo', # invalid\n 'function': 'prev', # valid\n 'context': 'cs.AI' # valid\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'id': 'cs/0001001', # valid\n 'function': 'bar', # invalid\n 'context': 'cs' # valid\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n request_data = MultiDict({\n 'id': 'cs/0001001', # valid\n 'function': 'next', # valid\n 'context': 'baz' # invalid\n })\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n\n @mock.patch('browse.controllers.prevnext.get_sequential_id')\n @mock.patch('browse.controllers.prevnext.url_for')\n def test_good_parameters(self, mock_url_for, mock_get_sequential_id) -> None: # type: ignore\n \"\"\"Test parameters with good values.\"\"\"\n request_data = MultiDict({\n 'id': '1801.00001',\n 'function': 'next',\n 'context': 'all'\n })\n mock_get_sequential_id.return_value = '1801.00002'\n _, status, headers = prevnext.get_prevnext(request_data)\n self.assertEqual(status, 301)\n\n request_data = MultiDict({\n 'id': '1801.00002',\n 'function': 'prev',\n 'context': 'cs.AI'\n })\n mock_get_sequential_id.return_value = '1801.00001'\n _, status, headers = prevnext.get_prevnext(request_data)\n self.assertEqual(status, 301)\n\n request_data = MultiDict({\n 'id': '1701.00002',\n 'function': 'next',\n 'context': 'physics.gen-ph'\n })\n mock_get_sequential_id.return_value = None\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n mock_get_sequential_id.return_value = ''\n with self.assertRaises(BadRequest):\n prevnext.get_prevnext(request_data)\n"
},
{
"alpha_fraction": 0.5811808109283447,
"alphanum_fraction": 0.5977859497070312,
"avg_line_length": 40.69230651855469,
"blob_id": "aa45f48980767701a64be24ebb44ca9bf6dd323a",
"content_id": "e835ba930c726eb464497f4186d3371af1cd9639",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 542,
"license_type": "permissive",
"max_line_length": 145,
"num_lines": 13,
"path": "/browse/templates/stats/base.html",
"repo_name": "mak2salazarjr/arxiv-browse",
"src_encoding": "UTF-8",
"text": "{%- extends \"base.html\" -%}\n{# Do not show login status on stats pages for now, per classic #}\n\n{% block head %}\n {{ super() -}}\n <script src=\"{{ url_for('static', filename='js/lib/d3/3.5.17/d3.min.js') }}\" type=\"text/javascript\"></script>\n{% endblock head %}\n\n{% block header_h1 %}<h1><a href=\"{{ url_for('.home') }}\">{{ config['BROWSE_SITE_LABEL'] }}</a> > stats > server usage</h1>{% endblock %}\n\n{%- block content %}\n<p>See also <a href=\"{{ url_for('help') }}/stats\">other arXiv usage statistics</a>.</p>\n{% endblock content %}\n"
},
{
"alpha_fraction": 0.6648089289665222,
"alphanum_fraction": 0.6695860028266907,
"avg_line_length": 77.5,
"blob_id": "0c87019d1c37b89e01cd1e04d748e257306291d6",
"content_id": "216f26fb80361bc2bcb1bd8ed5873f126880bf32",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 1256,
"license_type": "permissive",
"max_line_length": 319,
"num_lines": 16,
"path": "/browse/templates/stats/monthly_submissions.html",
"repo_name": "mak2salazarjr/arxiv-browse",
"src_encoding": "UTF-8",
"text": "{%- extends \"stats/base.html\" -%}\n\n{% block title %}Monthly Submissions{% endblock %}\n{% block header_h1 %}<h1><a href=\"{{ url_for('.home') }}\">{{ config['BROWSE_SITE_LABEL'] }}</a> > stats > monthly submission rates</h1>{% endblock %}\n\n{% block content %}\n<h2>arXiv Monthly Submission Rates [<a title=\"download monthly submission CSV data\" href=\"{{ url_for('browse.stats', command='get_monthly_submissions') }}\">CSV</a>]</h2>\n{% include \"stats/monthly_submissions_js.html\" %}\n<p><b style=\"color:steelblue\">Blue</b>: Number of new submissions received during each month since {{ arxiv_start_dt.strftime('%B %Y') }}.<br />\n Hover over the graph to see the exact count for a given month.</p>\n<p>Total number of submissions shown in graph as of {{ current_dt.strftime('%B %-d, %Y') }} (after {{ \"%.1f\"|format(arxiv_age_years|float) }} years) = {{ \"{:,}\".format(num_submissions) }}</p>\n<p>\n The total number of submissions excludes {{ \"{:,}\".format(num_migrated) }} articles that were migrated to arXiv rather than being submitted directly, and includes {{ \"{:,}\".format(num_deleted) }} articles that have been deleted. The total number of articles available is {{ \"{:,}\".format(num_submissions_adjusted) }}.\n</p>\n{{ super() }}\n{% endblock content %}\n"
}
] | 4 |
sudha2327/Sum-of-the-digit | https://github.com/sudha2327/Sum-of-the-digit | 2c6e6137910405632199a646d5bd41194b257ace | efb49af251cff4ae7986b502ea7864ea1c3e5c97 | dce5831f498138be4e6fe147eddb98207f2cbcbb | refs/heads/main | 2023-07-09T03:03:17.209603 | 2021-08-06T15:14:44 | 2021-08-06T15:14:44 | 393,414,684 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6512702107429504,
"alphanum_fraction": 0.7251732349395752,
"avg_line_length": 26.0625,
"blob_id": "df8647b223138c140476677a094a8776d12a8a77",
"content_id": "ac783df1852bc473315c2cda7d6353c8fbf8365f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 441,
"license_type": "no_license",
"max_line_length": 311,
"num_lines": 16,
"path": "/README.md",
"repo_name": "sudha2327/Sum-of-the-digit",
"src_encoding": "UTF-8",
"text": "# Sum-of-the-digit\nYou're given an integer N. Write a program to calculate the sum of all the digits of N. Input The first line contains an integer T, the total number of testcases. Then follow T lines, each line contains an integer N. Output For each test case, calculate the sum of digits of N, and display it in a new line. \n\n#Constraints \n1 ≤ T ≤ 1000 1 ≤ N ≤ 1000000 \n\nExample \nInput \n3 \n12345 \n31203 \n2123 \nOutput \n15 \n9 \n8\n"
},
{
"alpha_fraction": 0.5425000190734863,
"alphanum_fraction": 0.550000011920929,
"avg_line_length": 14.956521987915039,
"blob_id": "5f396c8a14c3beb23b477062946cc6fa8c48ea3e",
"content_id": "8ae9e7ce7bdac3ae9f0b0c713f533d775a549f9d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 400,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 23,
"path": "/sumofdigits.py",
"repo_name": "sudha2327/Sum-of-the-digit",
"src_encoding": "UTF-8",
"text": "\r\n# creating an empty list\r\nlst = []\r\n \r\n# number of elements as input\r\nn = int(input())\r\n \r\n# iterating till the range\r\nfor i in range(0, n):\r\n ele = int(input())\r\n \r\n lst.append(str(ele)) # adding the element\r\n \r\nprint(lst)\r\n\r\n#process\r\n#print(\"process of printing\")\r\nsum=0\r\nfor i in lst:\r\n sum=0\r\n for digit in str(i):\r\n \r\n sum=int(digit)+sum\r\n print(sum)\r\n \r\n\r\n"
}
] | 2 |
hipposareevil/books | https://github.com/hipposareevil/books | d4eef5bbc2bed7d25387eaa648d1138ebc2a08a8 | 8cb0d09fbbfd936064795f068b1aedfcab2165f9 | c2025f234bcc079ef45e9bf94677e503b86c8e11 | refs/heads/master | 2022-12-26T19:28:46.705592 | 2019-07-02T23:02:36 | 2019-07-02T23:02:36 | 74,049,730 | 6 | 1 | null | 2016-11-17T17:14:26 | 2022-08-31T02:05:36 | 2022-12-10T16:07:23 | Java | [
{
"alpha_fraction": 0.5545549988746643,
"alphanum_fraction": 0.5661864280700684,
"avg_line_length": 23.27140235900879,
"blob_id": "1fb106b0191c0c8fe44644a35a2a6ebe453cbdff",
"content_id": "9996197d92697660f54ff570019681d563153934",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 13326,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 549,
"path": "/test/author.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "\n######################\n#\n# author related calls\n#\n#\n######################\n\n\n\n##########\n# get author by id\n#\n##########\nget_author_by_id() {\n author_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/author/${author_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /author for author '$authorName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting author '$authorName'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# Get all authors\n#\n##########\nget_all_authors() {\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/author \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /author for all authors\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all authors: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# Get all authors w/ offset and limit\n#\n##########\nget_all_authors_with_offset_limit() {\n offset=$1\n limit=$2\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/author?offset=${offset}&limit=${limit}\"\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /author for all authors\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all authors: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# get author by name\n#\n# also takes offset and limit\n##########\nget_author_by_name() {\n authorName=\"$1\"\n offset=\"$2\"\n limit=\"$3\"\n\n query=$(url_encode \"$authorName\")\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/author?name=$query&offset=${offset}&limit=${limit}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /author for author '$authorName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting author '$authorName'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n\n##########\n# delete author\n#\n# params:\n# author id\n##########\ndelete_author() {\n author_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE ${ROOT_URL}/author/${author_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE to /author for author $author_id\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n echo \"Error deleting author '$author_id'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n}\n\n\n\n##########\n# update author\n#\n#########\n_update_author() {\n author_id=\"$1\"\n post_data=\"$2\"\n\nlogit \"Author id: $author_id\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/author/${author_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for test author\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n error \"Error making PUT for test author. code: $code: $result\"\n fi\n\n author_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n logit \"author updated\"\n}\n\n##########\n# Update author\n##########\nupdate_author() {\n author_id=$1\n\nread -r -d '' author_data <<EOF\n{\n\"name\":\"Isaac Asimov!\",\n\"olKey\":\"OL34221AXX\",\n\"birthDate\":null,\n\"imageSmall\":\"https://covers.openlibrary.org/a/olid/OL34221A-L.jpg\",\n\"imageLarge\":\"https://covers.openlibrary.org/a/olid/OL34221A-L.jpg\"\n}\nEOF\n\n _update_author $author_id \"$author_data\"\n}\n\n\n##########\n# Create author in db\n# \n##########\n_create_author() {\n post_data=\"$1\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/author\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for test author\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n if [[ \"$code\" == \"409\" ]]; then\n error \"Author already exists!\"\n else\n error \"Error making POST for test author. code: $code: $result\"\n fi\n fi\n\n author_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$author_result\"\n}\n\n\n#########\n# Create Madeleine Lengle\n#\n#########\ncreate_author_lengle() {\nread -r -d '' author_data <<EOF\n{\n\"name\":\"Madeleine LEngle\",\n\"olKey\":\"OL28188A\",\n\"subjects\":[\"Fiction\",\"In library\",\"Juvenile fiction\",\"Madeleine LEngle\",\"Science fiction\",\"Biography\",\"Fantasy\",\"Adventure and adventurers\"],\n\"birthDate\":null,\n\"imageSmall\":\"https://covers.openlibrary.org/a/olid/OL28188A-S.jpg\",\n\"imageMedium\":\"https://covers.openlibrary.org/a/olid/OL28188A-M.jpg\",\n\"imageLarge\":\"https://covers.openlibrary.org/a/olid/OL28188A-L.jpg\"\n}\nEOF\n\n # create author\n _create_author \"${author_data}\"\n}\n\n#########\n# Create Asimov\n#\n#########\ncreate_author_asimov() {\nread -r -d '' author_data <<EOF\n{\n\"name\":\"Isaac Asimov\",\n\"olKey\":\"OL34221A\",\n\"birthDate\": null,\n\"imageSmall\": null,\n\"goodreadsUrl\": \"https://www.goodreads.com/author/show/16667.Isaac_Asimov\",\n\"imageMedium\":\"https://covers.openlibrary.org/a/olid/OL34221A-M.jpg\",\n\"imageLarge\": null\n}\nEOF\n\n # create author\n _create_author \"${author_data}\"\n}\n\n\n#########\n# Create generic author\n#\n# params:\n# author name\n#########\ncreate_author() {\n name=\"$1\"\nread -r -d '' author_data <<EOF\n{\n\"name\":\"$name\",\n\"olKey\":\"olkey for $name\",\n\"birthDate\": null,\n\"imageSmall\": null,\n\"goodreadsUrl\": \"https://${name}.here\",\n\"imageMedium\":\"image for $name\",\n\"imageLarge\": null\n}\nEOF\n\n # create author\n _create_author \"${author_data}\"\n}\n\n\n\n\n######\n# print info for author\n# \n######\nprint_author_info() {\n author_info=\"$1\"\n name=$(echo \"$author_info\" | jq -r .name)\n id=$(echo \"$author_info\" | jq -r .id)\n\n echo \"Author: '$name', ID: '$id'\"\n}\n\n\n\n########\n# delete all authors\n#\n# param: json with all authors\n########\ndelete_all_authors() {\n # get 1000 authors\n authors=$(get_all_authors_with_offset_limit 0 1000 )\n ids=$(echo \"${authors}\" | jq -r \".data[].id\" )\n num=$(echo \"${authors}\" | jq -r \".data | length\" )\n\n echo \"Delete all ($num) authors.\"\n\n for id in $ids\n do\n $(delete_author $id)\n done\n}\n\n###############\n#\n# Clean all authors\n#\n###############\nauthor::clean() {\n echo \"\"\n delete_all_authors\n}\n\n\n###############\n#\n# Main test\n#\n###############\nauthor::main_test() {\n echo \"Get all authors\"\n all_authors=$(get_all_authors)\n\n echo \"\"\n echo \"----Delete all authors----\"\n delete_all_authors\n\n echo \"\"\n echo \"Create author: 'lengle'\"\n author=$(create_author_lengle)\n authorname=$(echo \"$author\" | jq -r .name) \n assert_string_equals \"Madeleine LEngle\" \"$authorname\" \"Author name\"\n\n echo \"\"\n echo \"Create author: 'asimov'\"\n author_asimov=$(create_author_asimov)\n# print_author_info \"$author_asimov\"\n author_asimov_id=$(echo \"$author_asimov\" | jq -r .id)\n authorname=$(echo \"$author_asimov\" | jq -r .name) \n assert_string_equals \"Isaac Asimov\" \"$authorname\" \"Author name\"\n\n # verify author\n echo \"\"\n echo \"Verifying first author\"\n subjects=$(echo \"$author_asimov\" | jq -r '.subjects | join (\"\")')\n assert_equals $? 0 \"jq failed on checking author subject\"\n assert_string_equals \"\" \"$subjects\" \"asimov subject list (should be empty)\"\n\n echo \"\"\n echo \"Get author by id $author_asimov_id \"\n # get single author\n author_asimov_single=$(get_author_by_id \"$author_asimov_id\")\n print_author_info \"$author_asimov_single\"\n\n echo \"\"\n echo \"Update author: 'asimov'\"\n update_author $author_asimov_id\n\n echo \"Verifying updated author...\"\n author_asimov_single=$(get_author_by_id \"$author_asimov_id\")\n olkey=$(echo \"$author_asimov_single\" | jq -r .olKey)\n assert_string_equals \"OL34221AXX\" \"$olkey\" \"Updated author olkey\"\n\n echo \"\"\n echo \"Get author by name (should be 1)\"\n authors=$(get_author_by_name \"asi\" 0 100 )\n\n echo \"\"\n echo \"Check limit & offset\"\n limit=$(echo \"$authors\" | jq -r .limit)\n total=$(echo \"$authors\" | jq -r .total)\n offset=$(echo \"$authors\" | jq -r .offset)\n\n assert_equals 1 $limit \"limit number authors\"\n assert_equals 1 $total \"total number authors\"\n assert_equals 0 ${offset} \"offset in authors returned\"\n\n authors=$(echo \"$authors\" | jq -r .data)\n numAuthors=$(echo $authors | jq -r '. | length')\n assert_equals 1 $numAuthors \"Number of authors\"\n\n authorName=$(echo \"$authors\" | jq -r .[0].name)\n\n assert_string_equals \"Isaac Asimov!\" \"$authorName\" \"Author name\"\n\n author::clean\n}\n\n\n###############\n#\n# Main test\n#\n###############\nauthor::test_limit_offset() {\n echo \"\"\n echo \"[[ Author Limit/Offset test ]]\"\n\n # num authors to create\n COUNT=40\n\n echo \"Creating $COUNT authors\"\n\n idx=1\n while [ $idx -le $COUNT ]\n do\n idx=$(( $idx + 1 ))\n authorname=\"author_${idx}\"\n result=$(create_author $authorname)\n done\n\n #######\n # Default returns\n # get authors and see how many\n echo \"\"\n echo \"Testing default limit (20)\"\n\n all_authors=$(get_all_authors)\n total=$(echo \"$all_authors\" | jq -r .total)\n offset=$(echo \"$all_authors\" | jq -r .offset)\n limit=$(echo \"$all_authors\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in authors returned\"\n assert_equals $EXPECTED_DEFAULT_LIMIT $limit \"limit number authors\"\n assert_equals $COUNT $total \"total number authors\"\n\n #######\n # new limit\n echo \"\"\n echo \"Testing new limit\"\n all_authors=$(get_all_authors_with_offset_limit 0 500)\n total=$(echo \"$all_authors\" | jq -r .total)\n offset=$(echo \"$all_authors\" | jq -r .offset)\n limit=$(echo \"$all_authors\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in authors returned\"\n assert_equals $COUNT $limit \"limit number authors\"\n assert_equals $COUNT $total \"total number authors\"\n\n\n #######\n # new offset\n echo \"\"\n echo \"Testing new offset\"\n all_authors=$(get_all_authors_with_offset_limit 10 10)\n total=$(echo \"$all_authors\" | jq -r .total)\n offset=$(echo \"$all_authors\" | jq -r .offset)\n limit=$(echo \"$all_authors\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 10 ${offset} \"offset in authors returned\"\n assert_equals 10 $limit \"limit number authors\"\n assert_equals $COUNT $total \"total number authors\"\n\n #######\n # new offset\n echo \"\"\n echo \"Testing 2nd new offset\"\n all_authors=$(get_all_authors_with_offset_limit 13 2)\n total=$(echo \"$all_authors\" | jq -r .total)\n offset=$(echo \"$all_authors\" | jq -r .offset)\n limit=$(echo \"$all_authors\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 13 ${offset} \"offset in authors returned\"\n assert_equals 2 $limit \"limit number authors\"\n assert_equals $COUNT $total \"total number authors\"\n\n\n ##########\n # Test with author name in query\n echo \"\"\n echo \"Testing with author name in query\"\n all_authors=$(get_author_by_name \"author_1\" 2 3 )\n\n total=$(echo \"$all_authors\" | jq -r .total)\n offset=$(echo \"$all_authors\" | jq -r .offset)\n limit=$(echo \"$all_authors\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 2 ${offset} \"offset in authors returned\"\n assert_equals 3 $limit \"limit number authors\"\n assert_equals 10 $total \"total number authors\"\n\n\n author::clean\n\n echo \"[[ Done Author Limit/Offset test ]]\"\n}\n\n\n\n###############\n#\n# Test author endpoint\n#\n###############\ntest_author() {\n echo \"\"\n echo \"[ Author test ]\"\n\n author::main_test\n\n author::test_limit_offset\n\n echo \"[ DoneAuthor test ]\"\n}\n"
},
{
"alpha_fraction": 0.5586552023887634,
"alphanum_fraction": 0.5615164637565613,
"avg_line_length": 23.964284896850586,
"blob_id": "198631b1f148777bddb2c56500db36b46e22b6c5",
"content_id": "1af7b41520f9bed3157fc9a29d65ca14901b2c92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1398,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 56,
"path": "/images/gateway/build.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n############\n# Build nginx image\n# \n############\n\n# Our real directory (so this can be called from outside directories)\nour_directory=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n# Image we build\nproject=$(cat $our_directory/webservice.name | xargs)\nproject_version=$(cat $our_directory/../webservice.version | xargs)\n\nbase_image_name=\"books.${project}\"\nimage_name=\"${base_image_name}:${project_version}\"\n\n# set build time\nBUILD_TIME=$(date +%Y-%m-%dT%H:%M:%S%Z)\nVERSION_TAG=\"latest\"\n\nthen=$(date +%s)\necho \"[[Building Docker image '$image_name']]\"\n\n# build image\ndocker build -t ${image_name} \\\n --build-arg BUILD_TIME=${BUILD_TIME} \\\n --build-arg VERSION=${VERSION_TAG} \\\n \"$our_directory\" \nbuild_result=$?\n\n# check result\nnow=$(date +%s)\nelapsed=$(expr $now - $then)\n\nif [ $build_result -eq 0 ]; then\n echo \"\"\n echo \"[[Built $image_name in $elapsed second(s)]]\"\n\n # tag as latest\n output=$(docker tag ${image_name} ${base_image_name}:latest)\n tag_result=$?\n if [ $tag_result -eq 0 ]; then\n echo \"[[Tagged \\\"${image_name}\\\" as \\\"${base_image_name}:latest\\\"]]\"\n else\n echo \"[[Unable to tag image as latest!!!!]]\"\n fi\n\n\nelse\n echo \"\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n echo \"Unable to build Docker image for $image_name\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n exit 1\nfi\n"
},
{
"alpha_fraction": 0.7023575901985168,
"alphanum_fraction": 0.7023575901985168,
"avg_line_length": 18.960784912109375,
"blob_id": "a75e10998b190834078d44de3094806881b92f85",
"content_id": "bd3db76eeaa1610133bc05c50276a8e6d229fe10",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1018,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 51,
"path": "/images/query/src/github.com/hipposareevil/query/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// Authors\n// response for query author\ntype authorsResponse struct {\n\tData Authors `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\n// authorsResponse.error\nfunc (theResponse authorsResponse) error() error {\n\treturn theResponse.Err\n}\n\n// authorsResponse.getData\nfunc (theResponse authorsResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// Titles\n// response for query title\ntype titlesResponse struct {\n\tData Titles `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\n// titlesResponse.error\nfunc (theResponse titlesResponse) error() error {\n\treturn theResponse.Err\n}\n\n// titlesResponse.getData\nfunc (theResponse titlesResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n"
},
{
"alpha_fraction": 0.6826554536819458,
"alphanum_fraction": 0.6840153336524963,
"avg_line_length": 22.93195343017578,
"blob_id": "08dcf6fdaa1a3d0e7a00a8bec59d7f86d8ef4e09",
"content_id": "e4c40c6800753ef3dcc724d55b89f85196469a62",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 8089,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 338,
"path": "/images/author/src/github.com/hipposareevil/author/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. authorRequest, postAuthorRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /author/\n// Make endpoint for getting authors\nfunc makeGetAuthorsEndpoint(svc AuthorService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a authors specific request\n\t\treq := request.(getAllAuthorsRequest)\n\n\t\t// call actual service with data from the req\n\t\tauthors, err := svc.GetAuthors(req.Offset, req.Limit, req.Name)\n\t\treturn authorsResponse{\n\t\t\tData: authors,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /author/<author_id>\n// Make endpoint for getting single Author\nfunc makeGetAuthorEndpoint(svc AuthorService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a authorRequest\n\t\treq := request.(getAuthorRequest)\n\n\t\t// call actual service with data from the req\n\t\tauthor, err := svc.GetAuthor(req.AuthorId)\n\t\treturn authorResponse{\n\t\t\tData: author,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// DELETE /author/<author_id>\n// Make endpoint for deleting single Author\nfunc makeDeleteAuthorEndpoint(svc AuthorService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a authorRequest\n\t\treq := request.(deleteAuthorRequest)\n\n\t\t// call actual service with data from the req\n\t\terr := svc.DeleteAuthor(req.AuthorId)\n\t\treturn deleteAuthorResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /author/\n// Make endpoint for creating (via post) a author\nfunc makeCreateAuthorEndpoint(svc AuthorService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a createAuthorRequest\n\t\treq := request.(createAuthorRequest)\n\n\t\t// call actual service with data from the req\n\t\tnewAuthor, err := svc.CreateAuthor(req.Name,\n\t\t\treq.BirthDate,\n\t\t\treq.OlKey,\n\t\t\treq.GoodReadsUrl,\n\t\t\treq.ImageSmall,\n\t\t\treq.ImageMedium,\n\t\t\treq.ImageLarge,\n\t\t\treq.Subjects)\n\n\t\treturn createAuthorResponse{\n\t\t\tData: newAuthor,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// PUT /author/<author_id>\n// Make endpoint for updating (via PUT) a author\nfunc makeUpdateAuthorEndpoint(svc AuthorService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a updateAuthorRequest\n\t\treq := request.(updateAuthorRequest)\n\n\t\t// call actual service with data from the req (putAuthorRequest)\n\t\terr := svc.UpdateAuthor(req.Id,\n\t\t\treq.Name,\n\t\t\treq.BirthDate,\n\t\t\treq.OlKey,\n\t\t\treq.GoodReadsUrl,\n\t\t\treq.ImageSmall,\n\t\t\treq.ImageMedium,\n\t\t\treq.ImageLarge,\n\t\t\treq.Subjects)\n\n\t\treturn updateAuthorResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a getAllAuthorsRequest from the context and http.Request\n// /author/\n//\n// The getAllAuthorsRequest has 3 variables:\n// - Offset Offset into the query\n// - Limit Number of values to return\nfunc decodeGetAllAuthorsRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\n\t// Get name\n\tr.ParseForm()\n\tvalues := r.Form\n\tname := values.Get(\"name\")\n\n\t// Make request for all authors\n\tvar request getAllAuthorsRequest\n\trequest = getAllAuthorsRequest{\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tName: name,\n\t}\n\n\treturn request, nil\n}\n\n// Create getAuthorRequest\n// /author/id\n//\nfunc decodeGetAuthorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tauthorId, err := parseAuthorId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request for single author\n\tvar request getAuthorRequest\n\trequest = getAuthorRequest{\n\t\tAuthorId: authorId,\n\t}\n\n\treturn request, nil\n}\n\n// Create deleteAuthorRequest\n// DELETE /author/id\n//\n// The (delete) authorRequest has 2 variables:\n// - AuthorId ID of author taken from the path\nfunc decodeDeleteAuthorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tauthorId, err := parseAuthorId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request to delete author\n\tvar request deleteAuthorRequest\n\trequest = deleteAuthorRequest{\n\t\tAuthorId: authorId,\n\t}\n\n\treturn request, nil\n}\n\n// Create createAuthorRequest\n// POST /author\nfunc decodeCreateAuthorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse body\n\tvar newAuthor createAuthorRequest\n\tif err := json.NewDecoder(r.Body).Decode(&newAuthor); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn newAuthor, nil\n}\n\n// Create updateAuthorRequest\n// PUT /author/id\nfunc decodeUpdateAuthorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tauthorId, err := parseAuthorId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t///////////////////\n\t// Parse body\n\tvar updateAuthor updateAuthorRequest\n\tif err := json.NewDecoder(r.Body).Decode(&updateAuthor); err != nil {\n\t\treturn nil, err\n\t}\n\n\tupdateAuthor.Id = authorId\n\n\treturn updateAuthor, nil\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n// Decode the 'author_id' from the request.\n//\n//// Returns the author id\nfunc parseAuthorId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'author_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"author_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar authorId int\n\tif id != \"\" {\n\t\tauthorId, _ = strconv.Atoi(id)\n\t}\n\n\treturn authorId, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\tfmt.Println(\"Sending back error '\" + err.Error() + \"'\")\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.5439212918281555,
"alphanum_fraction": 0.5453267693519592,
"avg_line_length": 24.85454559326172,
"blob_id": "e21d4a9e0a0717b6d6bd698a5de8689c9292cd6f",
"content_id": "99776bcdfd264a453b87e4e4bbd79810426bb97b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1423,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 55,
"path": "/buildfiles/build_golang.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#################################\n#\n# Build file for golang projects\n# \n#################################\n\n###############\n# Cleans a golang project.\n#\n# Run as a container in case directories are mapped as\n# root and local (non docker) user doesn't have permissions.\n# \n###############\ngolang::clean() {\n echo \"[Removing dep vendor, metadata, and pkg directories]\"\n \n docker run -it \\\n -v ${our_directory}:/go \\\n -w /go alpine \\\n sh -c \"rm -rf src/github.com/hipposareevil/vendor src/github.com/hipposareevil/Gopkg.* pkg/\"\n \n echo \"[Clean complete]\"\n}\n\n##############\n# Run 'dep' for dependencies\n##############\ngolang::run_dep() {\n # Use 'dep' (https://github.com/golang/dep) to manage the packages for this project.\n # This is done via the 'hipposareevil/alpine-dep' image\n # All packages are downloaded into the src/github.com/hipposareevil/vendor directory\n echo \"[Running go's 'dep' against source]\"\n docker run -it \\\n -e GOPATH=/go \\\n -v ${our_directory}:/go \\\n -w /go hipposareevil/alpine-dep \\\n init src/github.com/hipposareevil \n if [ $? -ne 0 ]; then\n echo \"** Error running 'dep' for $image_name **\"\n exit 1\n fi\n\n echo \"[Done grabbing dependencies]\"\n echo \"\"\n}\n\n\n###############\n# Builds a golang project\n# \n###############\ngolang::build() {\n golang::clean\n golang::run_dep\n}\n\n"
},
{
"alpha_fraction": 0.6639189720153809,
"alphanum_fraction": 0.6648859977722168,
"avg_line_length": 26.076059341430664,
"blob_id": "a2a99e8e362c2ef03c20b94cbcb1caad7c6466b7",
"content_id": "41dc71292cabffd7665d688413b012e85009cb60",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 21715,
"license_type": "no_license",
"max_line_length": 177,
"num_lines": 802,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Book service\n\nimport (\n\t\"database/sql\"\n\t_ \"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t_ \"github.com/go-sql-driver/mysql\"\n\t_ \"io/ioutil\"\n\t_ \"net/http\"\n\t_ \"strconv\"\n)\n\n// Service interface exposed to clients\ntype UserBookService interface {\n\t// GetUserBooks:\n\tGetUserBooks(string, int, int, int, int, string, []string) (UserBooks, error)\n\t// GetUserBook:\n\tGetUserBook(string, int, int) (UserBook, error)\n\n\t// DeleteUserBook:\n\tDeleteUserBook(int, int) error\n\n\t// CreateUserBook (see createUserBookRequest for params)\n\tCreateUserBook(string, int, int, bool, []string, string) (UserBook, error)\n\n\t// UpdateUserBook\n\t// Same as CreateUserBook but the first param is the ID of book to update\n\t// first param: bearer\n\tUpdateUserBook(string, int, int, int, *bool, *[]string, *string) (UserBook, error)\n\n\t//////////////////////////////////////////////////\n\n\t// Helper method to get user books by filter\n\tgetUserBooksByFilter(string, int, int, int, int, string, []string) (UserBooks, error)\n\n\t// Helper method to get all user books with no filters\n\tgetAllUserBooks(string, int, int, int) (UserBooks, error)\n\n\t// get tag mappings for userbook\n\tgetTagMappings(int, int) ([]string, error)\n\n\t// update tag mappings for userbook\n\tupdateTagMappings(string, int, int, []string) ([]string, error)\n\n\t// delete tag mappings\n\tdeleteTagMappings(int, int) error\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\n// - cache layer\ntype userbookService struct {\n\tmysqlDb *sql.DB\n\tcache CacheLayer\n}\n\n//////////\n// METHODS on userbookService\n\n////////////////\n// Get UserBook\n//\n// returns:\n// userbook\n// error\nfunc (theService userbookService) GetUserBook(bearer string, userId int, userBookId int) (UserBook, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- GetUserBook --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn UserBook{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tvar userBook UserBook\n\n\t// Scan the DB info into 'book' composite variable\n\terr := theService.mysqlDb.\n\t\tQueryRow(\"SELECT \"+\n\t\t\t\"user_book_id, user_id, book_id, rating, date_added, review \"+\n\t\t\t\"FROM userbook \"+\n\t\t\t\"WHERE user_id=? AND user_book_id=?\",\n\t\t\tuserId, userBookId).\n\t\tScan(&userBook.UserBookId, &userBook.UserId, &userBook.BookId, &userBook.Rating, &userBook.DateAdded, &userBook.Review)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn UserBook{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"Got error from select: \", err)\n\t\treturn UserBook{}, ErrServerError\n\t}\n\n\t//////////////////\n\t// get tag mappings\n\tvar tags []string\n\ttags, err = theService.getTagMappings(userId, userBookId)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get tags for userbook: \", userBookId, \" :\", err)\n\t\treturn UserBook{}, ErrServerError\n\t}\n\n\t// set tags\n\tuserBook.Tags = tags\n\n\t//////////////////////////\n\t// Get book information\n\terr = getBookById(theService.cache, bearer, userBook.BookId, &userBook)\n\tif err != nil {\n\t\tfmt.Println(\"Error getting Book information for userbook \", userBookId)\n\t\treturn UserBook{}, err\n\t}\n\n\treturn userBook, nil\n}\n\n////////////////\n// Get user books\n//\n// returns:\n// books\n// error\nfunc (theService userbookService) GetUserBooks(bearer string, userId int, offset int, limit int, bookId int, title string, tags []string) (UserBooks, error) {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn UserBooks{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\tif len(title) > 0 ||\n\t\tlen(tags) > 0 ||\n\t\tbookId > 0 {\n\t\treturn theService.getUserBooksByFilter(bearer, userId, offset, limit, bookId, title, tags)\n\t} else {\n\t\treturn theService.getAllUserBooks(bearer, userId, offset, limit)\n\t}\n}\n\n////////////////////\n// Get all user books without query params besides offset/limit\n//\nfunc (theService userbookService) getAllUserBooks(bearer string, userId int, offset int, limit int) (UserBooks, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- GetUserBooks (all) --\")\n\n\t// Get total number of rows\n\tvar totalNumberOfRows int\n\t_ = theService.mysqlDb.QueryRow(\"SELECT COUNT(*) FROM userbook\").Scan(&totalNumberOfRows)\n\n\tif limit > totalNumberOfRows {\n\t\tlimit = totalNumberOfRows\n\t}\n\n\t// Make query\n\tresults, err := theService.mysqlDb.\n\t\tQuery(\"SELECT \"+\n\t\t\t\"user_book_id, user_id, book_id, rating, date_added, review \"+\n\t\t\t\"FROM userbook WHERE \"+\n\t\t\t\"user_id=? LIMIT ?,?\", userId, offset, limit)\n\n\tif err != nil {\n\t\tfmt.Println(\"Got error from mysql: \" + err.Error())\n\t\treturn UserBooks{}, errors.New(\"unable to create query in mysql\")\n\t}\n\n\t// slice of UserBook entities\n\tdatum := make([]UserBook, 0, 0)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar userBook UserBook\n\n\t\t// For each row, scan the result into our userbook composite object:\n\t\terr = results.\n\t\t\tScan(&userBook.UserBookId, &userBook.UserId, &userBook.BookId, &userBook.Rating, &userBook.DateAdded, &userBook.Review)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql when getting all user books: \" + err.Error())\n\t\t\treturn UserBooks{}, errors.New(\"Unable to scan mysql for all user books.\")\n\t\t}\n\n\t\t//////////////////\n\t\t// Get tag mappings\n\t\tvar tags []string\n\t\ttags, err = theService.getTagMappings(userId, userBook.UserBookId)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Unable to get tags for userbook: \", userBook.UserBookId, \" :\", err)\n\t\t\treturn UserBooks{}, ErrServerError\n\t\t}\n\n\t\t// set tags\n\t\tuserBook.Tags = tags\n\n\t\t//////////////////////////\n\t\t// Get book information\n\t\terr = getBookById(theService.cache, bearer, userBook.BookId, &userBook)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error getting Book information for userbook \", userBook.UserBookId)\n\t\t\treturn UserBooks{}, err\n\t\t}\n\n\t\tdatum = append(datum, userBook)\n\t}\n\n\t// Create Books to return\n\treturnValue := UserBooks{\n\t\tOffset: offset,\n\t\tLimit: limit,\n\t\tTotal: totalNumberOfRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n\n///////////////////////\n// Get user books by filter (query param)\n//\nfunc (theService userbookService) getUserBooksByFilter(bearer string, userId int, offset int, limit int,\n\tdesiredBookId int, desiredTitle string, desiredTags []string) (UserBooks, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- GetUserBooks (by filter) --\")\n\n\t/////////////////////\n\t// Query for each portion of the filter:\n\t// - tags\n\t// - title\n\t// - bookId\n\n\t// full set of userBook IDs to return\n\tfinalUserBookIds := []int{}\n\n\tuserBookIdsInTagBucket := []int{}\n\tuserBookIdsInTitleBucket := []int{}\n\tuserBookIdsInIdBucket := []int{}\n\n\t//////////////////\n\t// #1: by Tag\n\n\t// tags index by the name\n\tvar tagsInDatabase map[string]Tag\n\tallTags := getAllTags(theService.cache, bearer)\n\ttagsInDatabase = convertTagsJsonToArray(allTags)\n\n\t// Loop through all tags and get userbooks for those\n\t// tags. We throw out invalid desiredTags along the way\n\tvar tagIds []int\n\tfor _, currentTag := range desiredTags {\n\t\tif tag, ok := tagsInDatabase[currentTag]; ok {\n\t\t\t// tag is in the database\n\t\t\ttagIds = append(tagIds, tag.ID)\n\t\t}\n\t}\n\n\t// Only query for tags if we have some to look for\n\tif len(tagIds) > 0 {\n\t\t// Get all user_book IDs for the tags\n\n\t\tselectString := \"SELECT user_book_id FROM tagmapping WHERE \" +\n\t\t\t\"tag_id IN (\" + convertIntArrayToCsv(tagIds) + \") \" +\n\t\t\t\" AND user_id=?\"\n\n\t\tresults, err := theService.mysqlDb.Query(selectString,\n\t\t\tuserId)\n\n\t\t// Parse results\n\t\tfor results.Next() {\n\t\t\t// scan the id\n\t\t\tvar userBookId int\n\n\t\t\terr = results.Scan(&userBookId)\n\t\t\tif err != nil {\n\t\t\t\treturn UserBooks{}, errors.New(\"Unable to scan userbook (by tag): \" + err.Error())\n\t\t\t}\n\n\t\t\tuserBookIdsInTagBucket = append(userBookIdsInTagBucket, userBookId)\n\t\t}\n\n\t\tfinalUserBookIds = userBookIdsInTagBucket\n\t}\n\n\t////////////////\n\t// #2: by title\n\n\t// Get list of books for the incoming title\n\tdesiredTitle = strings.TrimSpace(desiredTitle)\n\t// list of book ids we want\n\tvar bookIds []int\n\n\tif len(desiredTitle) > 0 {\n\t\tvar booksByTitle Books\n\t\tbooksByTitle, err := getBooksByTitle(bearer, desiredTitle)\n\t\tif err != nil {\n\t\t\treturn UserBooks{}, ErrServerError\n\t\t}\n\n\t\tfor _, currentBook := range booksByTitle.Data {\n\t\t\t// Get user book ID for this bookid\n\t\t\tbookIds = append(bookIds, currentBook.Id)\n\t\t}\n\t}\n\n\t// only look for book ids if we have some to scan\n\tif len(bookIds) > 0 {\n\t\tbookIdsAsString := convertIntArrayToCsv(bookIds)\n\n\t\t// Get all user_book IDs for these book ids\n\t\tselectString := \"SELECT user_book_id FROM userbook WHERE book_id in (\" +\n\t\t\tbookIdsAsString + \") AND user_id=?\"\n\n\t\tresults, err := theService.mysqlDb.\n\t\t\tQuery(selectString, userId)\n\n\t\tif err != nil {\n\t\t\treturn UserBooks{}, ErrServerError\n\t\t}\n\n\t\t// Parse results\n\t\tfor results.Next() {\n\t\t\t// scan the id\n\t\t\tvar userBookId int\n\n\t\t\terr = results.Scan(&userBookId)\n\t\t\tif err != nil {\n\t\t\t\treturn UserBooks{}, errors.New(\"Unable to scan userbook (by title): \" + err.Error())\n\t\t\t}\n\t\t\tuserBookIdsInTitleBucket = append(userBookIdsInTitleBucket, userBookId)\n\t\t}\n\n\t\tfinalUserBookIds = userBookIdsInTitleBucket\n\n\t\t// update final set\n\t\tif len(tagIds) > 0 {\n\t\t\t// if tags were queried for, union them with the final set\n\t\t\tfinalUserBookIds = Union(finalUserBookIds, userBookIdsInTagBucket)\n\t\t}\n\t}\n\n\t///////////////////\n\t// #3: by book_id\n\tif desiredBookId > 0 {\n\t\t// Get all user_book IDs for these book ids\n\t\tselectString := \"SELECT user_book_id FROM userbook WHERE book_id =? AND user_id=?\"\n\n\t\tresults, err := theService.mysqlDb.\n\t\t\tQuery(selectString, desiredBookId, userId)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql when getting book ids: \" + err.Error())\n\t\t\treturn UserBooks{}, ErrServerError\n\t\t}\n\n\t\t// Parse results\n\t\tfor results.Next() {\n\t\t\t// scan the id\n\t\t\tvar userBookId int\n\n\t\t\terr = results.Scan(&userBookId)\n\t\t\tif err != nil {\n\t\t\t\treturn UserBooks{}, errors.New(\"Unable to scan userbook (by title): \" + err.Error())\n\t\t\t}\n\n\t\t\tuserBookIdsInIdBucket = append(userBookIdsInIdBucket, userBookId)\n\t\t}\n\n\t\tfinalUserBookIds = userBookIdsInIdBucket\n\n\t\t// update final set\n\t\tif len(tagIds) > 0 {\n\t\t\t// if tags were queried for, union them with the final set\n\t\t\tfinalUserBookIds = Union(finalUserBookIds, userBookIdsInTagBucket)\n\t\t}\n\t\tif len(desiredTitle) > 0 {\n\t\t\t// if title was queried for, union that with the final set\n\t\t\tfinalUserBookIds = Union(finalUserBookIds, userBookIdsInTitleBucket)\n\t\t}\n\t}\n\n\t////////////////////////\n\t// Now for each user book id, get the real user books\n\n\t// slice of UserBook entities\n\tdatum := make([]UserBook, 0, len(finalUserBookIds))\n\n\t// Only query if there are some IDs to search for\n\tif len(finalUserBookIds) > 0 {\n\t\tselectString := \"SELECT \" +\n\t\t\t\"user_book_id, user_id, book_id, rating, date_added, review \" +\n\t\t\t\"FROM userbook WHERE \" +\n\t\t\t\"user_book_id in (\" + convertIntArrayToCsv(finalUserBookIds) + \")\"\n\n\t\t// Make query\n\t\tresults, err := theService.mysqlDb.Query(selectString)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql when getting all user book ids : \" + err.Error())\n\t\t\treturn UserBooks{}, errors.New(\"unable to create query in mysql\")\n\t\t}\n\n\t\t// Parse results\n\t\tfor results.Next() {\n\t\t\tvar userBook UserBook\n\n\t\t\t// For each row, scan the result into our userbook composite object:\n\t\t\terr = results.\n\t\t\t\tScan(&userBook.UserBookId, &userBook.UserId, &userBook.BookId, &userBook.Rating, &userBook.DateAdded, &userBook.Review)\n\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Got error from mysql when getting all user books: \" + err.Error())\n\t\t\t\treturn UserBooks{}, errors.New(\"Unable to scan mysql for all user books.\")\n\t\t\t}\n\n\t\t\t//////////////////\n\t\t\t// Get tag mappings\n\t\t\tvar tags []string\n\t\t\ttags, err = theService.getTagMappings(userId, userBook.UserBookId)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Unable to get tags for userbook: \", userBook.UserBookId, \" :\", err)\n\t\t\t\treturn UserBooks{}, ErrServerError\n\t\t\t}\n\n\t\t\t// set tags\n\t\t\tuserBook.Tags = tags\n\n\t\t\t//////////////////////////\n\t\t\t// Get book information\n\t\t\terr = getBookById(theService.cache, bearer, userBook.BookId, &userBook)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Error getting Book information for userbook \", userBook.UserBookId)\n\t\t\t\treturn UserBooks{}, err\n\t\t\t}\n\n\t\t\tdatum = append(datum, userBook)\n\t\t}\n\t}\n\n\t//////////////\n\t// Update return data\n\n\t// Get the total number of rows\n\trealNumberRows := len(datum)\n\trealLimit := limit\n\n\t// fix offset\n\tif (offset > realNumberRows) || (offset < 0) {\n\t\toffset = 0\n\t}\n\n\t// fix limit\n\tif realLimit < 0 {\n\t\trealLimit = len(datum)\n\t}\n\n\tif realLimit > realNumberRows {\n\t\trealLimit = realNumberRows\n\t}\n\n\t// determine slice of datum to use\n\twhereToEnd := offset + realLimit\n\tif whereToEnd > realNumberRows {\n\t\twhereToEnd = realNumberRows\n\t}\n\n\tdatum = datum[offset:whereToEnd]\n\n\t// Create Books to return\n\treturnValue := UserBooks{\n\t\tOffset: offset,\n\t\tLimit: realLimit,\n\t\tTotal: realNumberRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n\n////////////////\n// Delete user book\n//\n// returns:\n// error\nfunc (theService userbookService) DeleteUserBook(userId int, userBookId int) error {\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql when deleting userbook\")\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM userbook WHERE user_id = ? AND user_book_id = ?\",\n\t\tuserId, userBookId)\n\n\treturn err\n}\n\n////////////////\n// CreateUserBook\n//\n// returns:\n// userbook\n// error\nfunc (theService userbookService) CreateUserBook(bearer string, userId int, bookId int, rating bool, incomingTags []string, review string) (UserBook, error) {\n\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- CreateUserBook --\")\n\n\t////////////////////\n\t// verify mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn UserBook{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make prepared insert statement\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"INSERT INTO userbook SET \" +\n\t\t\t\"user_id=?, book_id=?, \" +\n\t\t\t\"rating=?, date_added=?, review=? \")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn UserBook{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\t// now for 'date_added'\n\tnow := time.Now()\n\n\t///////////////////////\n\t// Execute Insert\n\tres, err := stmt.Exec(userId, bookId, rating, now, review)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error inserting into userbook DB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn UserBook{}, ErrAlreadyExists\n\t\t} else {\n\t\t\treturn UserBook{}, errors.New(\"Unable to run INSERT against userbook DB: \")\n\t\t}\n\t}\n\n\t// get the id\n\tuserBookId, _ := res.LastInsertId()\n\n\t///////////////////////\n\t// Update the tag mappings\n\ttagNamesAdded, err := theService.updateTagMappings(bearer, userId, int(userBookId), incomingTags)\n\tif err != nil {\n\t\tfmt.Println(\"Error updating tag mappings for new userbook. Bookid: \", bookId)\n\t\treturn UserBook{}, err\n\t}\n\n\tvar userBookToReturn UserBook\n\tuserBookToReturn = UserBook{\n\t\tBookId: bookId,\n\t\tRating: rating,\n\t\tTags: tagNamesAdded,\n\t\tUserId: userId,\n\t\tUserBookId: int(userBookId),\n\t\tReview: review,\n\t\tDateAdded: now,\n\t}\n\n\t//////////////////////////\n\t// Get book information\n\terr = getBookById(theService.cache, bearer, bookId, &userBookToReturn)\n\tif err != nil {\n\t\tfmt.Println(\"Error getting Book information for new userbook. book id: \", bookId)\n\t\treturn UserBook{}, err\n\t}\n\n\treturn userBookToReturn, nil\n}\n\n////////////////\n// UpdateUserBook\n//\n// returns:\n// error\nfunc (theService userbookService) UpdateUserBook(bearer string, userId int, userBookId int, bookId int, rating *bool, incomingTags *[]string, review *string) (UserBook, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- UpdateUserBook --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn UserBook{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"UPDATE userbook SET \" +\n\t\t\t\"book_id=COALESCE(NULLIF(?,''),book_id) \" +\n\t\t\t\"WHERE user_id=? AND user_book_id=?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB when updating userbook: \", err)\n\t\treturn UserBook{}, errors.New(\"Unable to prepare a DB statement when updating userbook: \")\n\t}\n\n\t_, err = stmt.Exec(bookId, userId, userBookId)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error updating DB for userbook: \", err)\n\t\treturn UserBook{}, errors.New(\"Unable to run update against DB for userbook: \")\n\t}\n\n\t// Update rating\n\tif rating != nil {\n\t\tstmt, err := theService.mysqlDb.\n\t\t\tPrepare(\"UPDATE userbook SET \" +\n\t\t\t\t\"rating=? \" +\n\t\t\t\t\"WHERE user_id=? AND user_book_id=?\")\n\t\tdefer stmt.Close()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error preparing DB when updating userbook: \", err)\n\t\t\treturn UserBook{}, errors.New(\"Unable to prepare a DB statement when updating userbook: \")\n\t\t}\n\n\t\t_, err = stmt.Exec(*rating, userId, userBookId)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error updating DB.rating for userbook: \", err)\n\t\t\treturn UserBook{}, errors.New(\"Unable to run update against DB for userbook: \")\n\t\t}\n\t}\n\n\t// Update review\n\tif review != nil {\n\t\tstmt, err := theService.mysqlDb.\n\t\t\tPrepare(\"UPDATE userbook SET \" +\n\t\t\t\t\"review=? \" +\n\t\t\t\t\"WHERE user_id=? AND user_book_id=?\")\n\t\tdefer stmt.Close()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error preparing DB when updating userbook: \", err)\n\t\t\treturn UserBook{}, errors.New(\"Unable to prepare a DB statement when updating userbook: \")\n\t\t}\n\n\t\t_, err = stmt.Exec(*review, userId, userBookId)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error updating DB.review for userbook: \", err)\n\t\t\treturn UserBook{}, errors.New(\"Unable to run update against DB for userbook: \")\n\t\t}\n\n\t}\n\n\t//////////////////////\n\t// Clear old tag mappings and add new ones\n\n\tif incomingTags != nil {\n\t\terr = theService.deleteTagMappings(userId, userBookId)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error updating tag mappings for updated userbook. UserBookId: \", userBookId)\n\t\t\treturn UserBook{}, err\n\t\t}\n\n\t\t_, err = theService.updateTagMappings(bearer, userId, int(userBookId), *incomingTags)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error updating tag mappings for updated userbook . userBookid: \", userBookId)\n\t\t\treturn UserBook{}, err\n\t\t}\n\t}\n\n\t///////////////\n\t// Get full userbook\n\tfmt.Println(\"Get updated user book for user:\", userId, \" and book id:\", userBookId)\n\tuserBookToReturn, err := theService.GetUserBook(bearer, userId, userBookId)\n\n\treturn userBookToReturn, nil\n}\n\n/////////////////\n// Get tag names for a userbook\n//\n// returns:\n// array of tag names for the incoming userbook\nfunc (theService userbookService) getTagMappings(userId int, userBookId int) ([]string, error) {\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn nil, errors.New(\"unable to ping mysql\")\n\t}\n\n\tvar tagsToReturn []string\n\ttagsToReturn = make([]string, 0)\n\n\t// make query\n\tresults, err := theService.mysqlDb.\n\t\tQuery(\"SELECT T.name \"+\n\t\t\t\"FROM tag AS T, tagmapping AS M \"+\n\t\t\t\"WHERE M.user_id=? AND M.user_book_id=? AND \"+\n\t\t\t\"M.tag_id = T.tag_id\",\n\t\t\tuserId, userBookId)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar currentTitle string\n\n\t\t// For each row, scan the result into our book composite object:\n\t\terr = results.Scan(¤tTitle)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql when getting tags for userbook: \" + err.Error())\n\t\t\treturn tagsToReturn, errors.New(\"Unable to scan mysql for all user Book tags.\")\n\t\t}\n\n\t\ttagsToReturn = append(tagsToReturn, currentTitle)\n\t}\n\n\treturn tagsToReturn, nil\n}\n\n/////////////\n// Update the tag mappings\n//\n// returns:\n// array of tag names that were added\nfunc (theService userbookService) updateTagMappings(bearer string, userId int, userBookId int, incomingTags []string) ([]string, error) {\n\t// all tags in database, queried from /tag endpoint\n\tallTags := getAllTags(theService.cache, bearer)\n\ttagsInDatabase := convertTagsJsonToArray(allTags)\n\t// map of tags to add for this user book mapping\n\ttagsToAddToMapping := make(map[string]Tag)\n\n\t// loop through names of 'tags to add' and put them\n\t// into the tagsToAddMapping.\n\tfor _, tagToAdd := range incomingTags {\n\t\t// See if tagToAdd is a valid tag, and if so,\n\t\t// add a mapping with that id\n\t\tif tag, ok := tagsInDatabase[tagToAdd]; ok {\n\t\t\t// tag is in DB\n\t\t\ttagsToAddToMapping[tag.Name] = tag\n\t\t} else {\n\t\t\tfmt.Println(\"Not adding tag \", tagToAdd, \" as it doesn't exist in DB\")\n\t\t}\n\t}\n\n\t// keep list of tag names that were actually added\n\ttagNamesAdded := []string{}\n\n\t// Loop through tags to add\n\tfor tagName, currentTag := range tagsToAddToMapping {\n\t\ttagId := currentTag.ID\n\n\t\tstmt, err := theService.mysqlDb.\n\t\t\tPrepare(\"INSERT INTO tagmapping SET \" +\n\t\t\t\t\"user_book_id=?, user_id=?, tag_id=?\")\n\t\tdefer stmt.Close()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error preparing DB for adding tag mappings: \", err)\n\t\t\treturn nil, errors.New(\"Unable to prepare a DB statement: \")\n\t\t}\n\n\t\t// Execute statement\n\t\t_, err = stmt.Exec(userBookId, userId, tagId)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error inserting into tagmapping DB: \", err)\n\t\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\t\treturn nil, ErrAlreadyExists\n\t\t\t} else {\n\t\t\t\treturn nil, errors.New(\"Unable to run INSERT against tagmapping DB\")\n\t\t\t}\n\t\t}\n\n\t\tfmt.Println(\"Added tagmapping for tag: \", tagName, \":\", tagId)\n\t\ttagNamesAdded = append(tagNamesAdded, tagName)\n\t}\n\n\treturn tagNamesAdded, nil\n}\n\n/////////////\n// Delete the tag mappings for a user book\n//\nfunc (theService userbookService) deleteTagMappings(userId int, userBookId int) error {\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM tagmapping WHERE user_id=? and user_book_id=?\", userId, userBookId)\n\n\treturn err\n}\n"
},
{
"alpha_fraction": 0.7479734420776367,
"alphanum_fraction": 0.7568165063858032,
"avg_line_length": 40.06060791015625,
"blob_id": "aa8c6389a6b2dd2603f5e152d2513e16d7389e0f",
"content_id": "b0b7a775ddfdb4bb6f1802aa6dbab0754ea102ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1357,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 33,
"path": "/images.java/tag/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Tag Microservice\n\n## Introduction\n\n**/tag** is a microservice for creating, listing, updating and deleting Tags from the database. A tag can have any name and are visable among all users. The *delete* and *put* operations are only available to the *admin* user.\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for a Tag\nA Tag entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the Tag. This is used by the *user books* to map tags to a users individual bok.\nname | Name of the tag. Examples: e-book, sci-fi, mystery, to-read.\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n\n## Dropwizard Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [dropwizard](http://www.dropwizard.io/) for microservice framework.\n* [maven](https://maven.apache.org/) for building.\n\n\n"
},
{
"alpha_fraction": 0.7548449635505676,
"alphanum_fraction": 0.7625969052314758,
"avg_line_length": 38.69230651855469,
"blob_id": "52a1ddbac4fc2aaa67858f317bdca31da136f520",
"content_id": "9e55a3dcfc986b58f84306ae29ea72db786df203",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1032,
"license_type": "no_license",
"max_line_length": 161,
"num_lines": 26,
"path": "/images.java/query/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Query Microservice\n\n## Introduction\n\n**/query** is a microservice for looking up author names and book titles. This endpoint is not authenticated and thus doesn't need an Authorization HTTP header.\n\nThis uses the [openlibrary api](https://openlibrary.org/developers/api) to query for authors and title information.\n\n## Spring Boot Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries USED\n\n* [spring boot](https://projects.spring.io/spring-boot/) for REST framework.\n* [gradle](https://gradle.org) for building.\n* [spring fox](https://springfox.github.io/springfox/docs/current/) for Swagger documentation.\n\n### Old google querying\nOnce you've obtained an API key, set it in a *.env* file next to the root *docker-compose.yml* with the format:\n```\nGOOGLE_API_KEY=A...yourkeyhere\n```\nThis will set an environment variable *googleapikey* via *docker-compose* that then is injected by Spring into the controller.\n"
},
{
"alpha_fraction": 0.6174189448356628,
"alphanum_fraction": 0.6206005215644836,
"avg_line_length": 27.573863983154297,
"blob_id": "379b17466f3d1a2a38fab44c04f523b6ed706a04",
"content_id": "1cc2422ff759e2e833e228a4abbfa8cd863854d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 5029,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 176,
"path": "/images/frontend/content/mybooks/src/auth/index.js",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "/**\n * Authentication related variables and functions.\n *\n * Used to obtain a Bearer auth token from webservice.\n *\n * This stores the following keys in localStorage:\n * - 'id_token' -> The Bearer token\n * - 'user_id' -> ID of user in the database\n * - 'user_name' -> Name of user\n * - 'user_grup' -> Group of user, e.g. admin\n */\n\n// URL of API\nconst HOSTNAME = location.hostname\nconst API_URL = 'http://' + HOSTNAME + ':8080/'\n// URL of token generation\nconst LOGIN_URL = API_URL + 'authorize/token'\n// URL of validate token\nconst VALIDATE_URL = API_URL + 'authorize/validate'\n\nexport default {\n\n /**\n * User's ID, group, name, and authenticated flag\n *\n */\n user: {\n authenticated: false,\n id: '',\n name: '',\n group: ''\n },\n /**\n * Check if this user is authenticated.\n * Optionally takes 'groupToTest' to validate the group of the user.\n */\n isAuthenticated (groupToTest) {\n if (groupToTest) {\n // Check the incoming group against the users' group\n if (groupToTest === this.user.group) {\n return true\n } else {\n // Doesn't match group, return false\n return false\n }\n } else {\n // no group to check, reply with authenticated flag\n return this.user.authenticated\n }\n },\n /**\n * Validate the locally stored tokens against the server.\n * This should be used on startup of the application as the local creds\n * could be stored, but the server version could be revoked\n */\n validateTokens (context) {\n const authString = this.getAuthHeader()\n let self = this\n context.$axios.get(VALIDATE_URL, { headers: { Authorization: authString } })\n .then((response) => {\n console.log('User is logged in.')\n })\n .catch(function () {\n console.log('Auth.validatetokens: Unable to validate user authtoken. Logging our selves out internally.')\n self.logout(context)\n })\n },\n /**\n * Log into the application.\n *\n * params:\n * context: Calling object, used to obtain the $axios and $router variables.\n * creds: Credentials object, must container 'name' and 'password'\n * redirect: Location to redirect to after login. Uses the $router.\n */\n login (context, creds, redirect) {\n context.$axios.post(LOGIN_URL, {\n name: creds.name,\n password: creds.password\n })\n .then((response) => {\n // Set local storage for later\n localStorage.setItem('id_token', response.data.token)\n localStorage.setItem('user_id', response.data.userId)\n localStorage.setItem('user_name', creds.name)\n localStorage.setItem('user_group', response.data.groupName)\n\n // Save user information in our object\n this.user.authenticated = true\n this.user.id = response.data.userId\n this.user.name = creds.name\n this.user.group = response.data.groupName\n\n console.log('Auth.login: finished processing login information')\n Event.$emit('loggedIn')\n\n // Redirect to some page/tab\n if (redirect) {\n console.log('Auth.login: route to location: ' + redirect)\n context.$router.push(redirect)\n }\n })\n // Process errors\n .catch(function (error) {\n if (error.response) {\n // got a response from server\n\n console.log('login: ' + error.response.status)\n\n if (error.response.status === 500) {\n // Send message to Event\n context.emitMessage('got500')\n }\n } else if (error.request) {\n console.log('no response, sending message')\n // no response from server\n context.emitMessage('got500', 'Unable to contact the server (500).')\n } else {\n console.log('Auth.login: unknown error - ' + error)\n }\n })\n },\n /**\n * Zero out the local storage and 'user'\n */\n zeroOut () {\n // zero out local storage\n localStorage.removeItem('id_token')\n localStorage.removeItem('user_id')\n localStorage.removeItem('user_name')\n localStorage.removeItem('user_group')\n\n // zero out the user\n this.user.authenticated = false\n this.user.group = ''\n this.user.id = ''\n this.user.name = ''\n },\n /**\n * Logout\n *\n * Remove the local storage and set authenticated to false\n */\n logout (context) {\n this.zeroOut()\n\n // clear out the vue-x storage\n // context.$store.commit('clearEverything')\n Event.$emit('clearEverything')\n Event.$emit('loggedOut')\n\n // Go back to main page\n context.$router.push('/')\n },\n /**\n * Check local storage for the user information.\n * If it exists, the token and user id will be set.\n */\n checkAuth () {\n var jwt = localStorage.getItem('id_token')\n if (jwt) {\n this.user.authenticated = true\n this.user.id = localStorage.getItem('user_id')\n this.user.name = localStorage.getItem('user_name')\n this.user.group = localStorage.getItem('user_group')\n } else {\n this.zeroOut()\n }\n },\n /**\n * Return the header used in authentication\n */\n getAuthHeader () {\n return localStorage.getItem('id_token')\n }\n}\n"
},
{
"alpha_fraction": 0.7020407915115356,
"alphanum_fraction": 0.7020407915115356,
"avg_line_length": 20.77777862548828,
"blob_id": "5977e9c9beac4fae21d45d2093c7592ba753cc08",
"content_id": "250dd88805f1fa921d96f712f79577c25eb6f393",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1960,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 90,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// UserBooks (all)\n// response for userBooks (vs. single userBook)\ntype userBooksResponse struct {\n\tData UserBooks `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse userBooksResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse userBooksResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// USERBOOK (single)\n// response for userBook (single)\ntype userBookResponse struct {\n\tData UserBook `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse userBookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse userBookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// DELETE USERBOOK (single)\n// response for userBook (single)\ntype deleteUserBookResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse deleteUserBookResponse) error() error {\n\treturn theResponse.Err\n}\n\n////////////////////\n/// Create USERBOOK\n// response for create userBook\ntype createUserBookResponse struct {\n\tData UserBook `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse createUserBookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse createUserBookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// Update USERBOOK\n// response for update userBook\ntype updateUserBookResponse struct {\n\tErr error `json:\"err,omitempty\"`\n\tData UserBook `json:\"all,omitempty\"`\n}\n\nfunc (theResponse updateUserBookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse updateUserBookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n"
},
{
"alpha_fraction": 0.5166576504707336,
"alphanum_fraction": 0.5253391265869141,
"avg_line_length": 21.63882064819336,
"blob_id": "ff7c681c2eb37ddece657eb48aa8872bc59d82e7",
"content_id": "d079b1a7a6f21b0eef785f9024fccbda4b98ad22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 9215,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 407,
"path": "/test/user.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "\n######################\n#\n# user related calls\n#\n#\n######################\n\n\n##########\n# get user by id\n#\n##########\nget_user_by_id() {\n user_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/user/${user_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user for user '$userName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user '$userName'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# Get all users\n#\n##########\nget_all_users() {\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user for all users\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all users: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n###############\n# Get all users w/ offset & limit\n# \n#\n###############\nget_all_users_with_offset_limit() {\n offset=$1\n limit=$2\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user?offset=${offset}&limit=${limit}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user for all users\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all users: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n \n\n\n\n##########\n# delete user\n#\n# params:\n# user id\n##########\ndelete_user() {\n user_id=\"$1\"\n\n if [ $user_id -eq 1 ]; then\n logit \"Not deleting user 1 as that's the admin user.\"\n return\n fi\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE ${ROOT_URL}/user/${user_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE to /user for user $user_id\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n echo \"Error deleting user '$user_id'. code: $code: $error\"\n fi\n\n# logit \"got error code from delete: $code\"\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n}\n\n\n\n##########\n# update user\n#\n#########\n_update_user() {\n user_id=\"$1\"\n post_data=\"$2\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/user/${user_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for test user\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n error \"Error making PUT for test user. code: $code: $result\"\n fi\n\n user_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n logit \"user updated\"\n}\n\n##########\n# Update user\n##########\nupdate_user() {\n user_id=$1\n\nread -r -d '' user_data <<EOF\n{\n\"data\" : \"new data\"\n}\nEOF\n\n _update_user $user_id \"$user_data\"\n}\n\n\n##########\n# Create user in db\n# \n##########\n_create_user() {\n post_data=\"$1\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/user\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for test user\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n if [[ \"$code\" == \"409\" ]]; then\n error \"User already exists!\"\n else\n error \"Error making POST for test user. code: $code: $result\"\n fi\n fi\n\n user_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n# logit \"user created\"\n echo \"$user_result\"\n}\n\n\n#########\n# Create user\n#\n# param:\n# 1- name of user\n#########\nuser::create_user() {\n name=\"$1\"\nread -r -d '' user_data <<EOF\n{\n\"name\":\"${name}\",\n\"userGroup\":\"othergroup\",\n\"data\":\"n/a\",\n\"password\":\"${SECOND_USER_PASSWORD}\"\n}\nEOF\n\n # create user\n _create_user \"${user_data}\"\n}\n\n######\n# print info for user\n# \n######\nprint_user_info() {\n user_info=\"$1\"\n name=$(echo \"$user_info\" | jq -r .name)\n id=$(echo \"$user_info\" | jq -r .id)\n group=$(echo \"$user_info\" | jq -r .userGroup)\n\n echo \"User: '$name', ID: '$id', Group: '$group'\"\n}\n\n###############\n# clean users, besides the admin user\n#\n###############\nuser::clean() {\n # offset of 1 to skip the admin user\n users=$(get_all_users_with_offset_limit 1 1000)\n ids=$(echo \"${users}\" | jq -r \".data[].id\" )\n num=$(echo \"${users}\" | jq -r \".data | length\" )\n\n echo \"\"\n echo \"Delete all ($num) users.\"\n\n for id in $ids\n do\n $(delete_user $id)\n done\n}\n\n\n\n###############\n#\n# main test for user\n#\n###############\nuser::main_test() {\n echo \"Get all users\"\n all_users=$(get_all_users)\n\n echo \"\"\n echo \"Create user: 'fooz'\"\n user=$(user::create_user \"fooz\")\n user_id=$(echo \"$user\" | jq -r .id)\n print_user_info \"$user\"\n\n echo \"\"\n echo \"Get single user\"\n user=$(get_user_by_id $user_id)\n username=$(echo \"$user\" | jq -r .name)\n assert_string_equals \"fooz\" $username \"Username\"\n\n echo \"\"\n echo \"Update single user\"\n user=$(update_user $user_id)\n\n echo \"\"\n echo \"Verifying updated user...\"\n user=$(get_user_by_id $user_id)\n userdata=$(echo \"$user\" | jq -r .data)\n assert_string_equals \"new data\" \"$userdata\" \"Updated users data\"\n\n user::clean\n}\n\n\n###############\n#\n# Test the limits and offsets for large datasets\n#\n###############\nuser::test_limit_offset() {\n echo \"\"\n echo \"[[ User Limit/Offset test]]\"\n\n # num users to create\n COUNT=40\n\n echo \"Creating $COUNT users\"\n\n idx=1\n while [ $idx -le $COUNT ]\n do\n idx=$(( $idx + 1 ))\n username=\"user_${idx}\"\n result=$(user::create_user $username)\n done\n\n COUNT=$(( $COUNT +1 ))\n\n #######\n # Default returns\n # get users and see how many\n echo \"\"\n echo \"Testing default limit (20)\"\n\n all_users=$(get_all_users)\n total=$(echo \"$all_users\" | jq -r .total)\n offset=$(echo \"$all_users\" | jq -r .offset)\n limit=$(echo \"$all_users\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in users returned\"\n assert_equals $EXPECTED_DEFAULT_LIMIT $limit \"limit number users\"\n assert_equals $COUNT $total \"total number users\"\n\n #######\n # new limit\n echo \"\"\n echo \"Testing new limit\"\n all_users=$(get_all_users_with_offset_limit 0 500)\n total=$(echo \"$all_users\" | jq -r .total)\n offset=$(echo \"$all_users\" | jq -r .offset)\n limit=$(echo \"$all_users\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in users returned\"\n assert_equals $COUNT $limit \"limit number users\"\n assert_equals $COUNT $total \"total number users\"\n\n\n #######\n # new offset\n echo \"\"\n echo \"Testing new offset\"\n all_users=$(get_all_users_with_offset_limit 10 10)\n total=$(echo \"$all_users\" | jq -r .total)\n offset=$(echo \"$all_users\" | jq -r .offset)\n limit=$(echo \"$all_users\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 10 ${offset} \"offset in users returned\"\n assert_equals 10 $limit \"limit number users\"\n assert_equals $COUNT $total \"total number users\"\n\n #######\n # new offset\n echo \"\"\n echo \"Testing 2nd new offset\"\n all_users=$(get_all_users_with_offset_limit 13 2)\n total=$(echo \"$all_users\" | jq -r .total)\n offset=$(echo \"$all_users\" | jq -r .offset)\n limit=$(echo \"$all_users\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 13 ${offset} \"offset in users returned\"\n assert_equals 2 $limit \"limit number users\"\n assert_equals $COUNT $total \"total number users\"\n\n\n user::clean\n echo \"[[DONE User Limit/Offset test]]\"\n}\n\n\n\n###############\n#\n# Test user endpoint\n#\n###############\ntest_user() {\n echo \"\"\n echo \"[ User test ]\"\n\n user::main_test\n\n user::test_limit_offset \n\n echo \"[ Done User test ]\"\n}\n"
},
{
"alpha_fraction": 0.7020540833473206,
"alphanum_fraction": 0.7059459686279297,
"avg_line_length": 27.030303955078125,
"blob_id": "c1fe185bb9a681eefa4279a96b4e4885b2c7a85d",
"content_id": "faa4f22426732e174aae0812d32d38d74f860451",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4625,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 165,
"path": "/images/book/src/github.com/hipposareevil/book/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"time\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books.db:3306)/booksdatabase\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// cache layer\n\tvar cache CacheLayer\n\tcache = cacheLayer{redisPool}\n\n\t// clear the cache on startup\n\tcache.ClearAll(BOOK_CACHE)\n\n\t///////////////\n\t// 'book' service\n\tvar bookSvc BookService\n\tbookSvc = bookService{db, cache}\n\n\t// Set up the endpoints on our service\n\t//\n\t// Note: the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one as some\n\t// care about only accepting 'admin' group.\n\n\t////////////////\n\t// Endpoints\n\n\t//////\n\t// GET /book (all books)\n\tbooksEndpoint := makeGetBooksEndpoint(bookSvc)\n\tbaseBooksHandler := httptransport.NewServer(\n\t\tbooksEndpoint,\n\t\tdecodeGetAllBooksRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tbooksHandler := Authenticate(false, redisPool, baseBooksHandler)\n\trouter.Methods(\"GET\").Path(\"/book\").Handler(booksHandler)\n\n\t//////\n\t// GET /book/<book_id>\n\tbookEndpoint := makeGetBookEndpoint(bookSvc)\n\tbaseBookHandler := httptransport.NewServer(\n\t\tbookEndpoint,\n\t\tdecodeGetBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tbookHandler := Authenticate(false, redisPool, baseBookHandler)\n\t// 'book_id' is used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"GET\").Path(\"/book/{book_id}\").Handler(bookHandler)\n\n\t//////\n\t// DELETE /book/<book_id>\n\tdeleteBookEndpoint := makeDeleteBookEndpoint(bookSvc)\n\tbaseDeleteBookHandler := httptransport.NewServer(\n\t\tdeleteBookEndpoint,\n\t\tdecodeDeleteBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tdeleteBookHandler := Authenticate(true, redisPool, baseDeleteBookHandler)\n\t// 'book_id' is used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"DELETE\").Path(\"/book/{book_id}\").Handler(deleteBookHandler)\n\n\t//////\n\t// POST /book\n\tcreateBookEndpoint := makeCreateBookEndpoint(bookSvc)\n\tbaseCreateBookHandler := httptransport.NewServer(\n\t\tcreateBookEndpoint,\n\t\tdecodeCreateBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tcreateBookHandler := Authenticate(true, redisPool, baseCreateBookHandler)\n\trouter.Methods(\"POST\").Path(\"/book\").Handler(createBookHandler)\n\n\t//////\n\t// PUT /book/<book_id>\n\tupdateBookEndpoint := makeUpdateBookEndpoint(bookSvc)\n\tbaseUpdateBookHandler := httptransport.NewServer(\n\t\tupdateBookEndpoint,\n\t\tdecodeUpdateBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tupdateBookHandler := Authenticate(true, redisPool, baseUpdateBookHandler)\n\t// 'book_id' is used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"PUT\").Path(\"/book/{book_id}\").Handler(updateBookHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tfmt.Println(\"book service up on \" + addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.7202072739601135,
"alphanum_fraction": 0.7374784350395203,
"avg_line_length": 33.05882263183594,
"blob_id": "e914af2c086c1cd6e9d95a4f38b59b986cdf63f0",
"content_id": "54f308c05341b56a24d22a453aa487feee6116c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1737,
"license_type": "no_license",
"max_line_length": 199,
"num_lines": 51,
"path": "/images/authorization/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Authorize Microservice\n\n## Introduction\n\n**/authorize** is a microservice for creating authorization tokens. Each token expires in 24 hours. The returned token should be placed into the HTTP Headers with key 'Authorization'.\n\nExample usage:\n\n* *admin* user creates user Bob with password \"s3cret\".\n* Actual user Bob makes a REST call to */authorize* with:\n```\n{ \"name\":\"bob\",\n \"password\":\"s3cret\" }\n```\n* Bob receives a response with:\n```\nBearer qwerty-1234-asdf-9876\n```\n* Bob inserts the following into the HTTP Headers for calls to any endpoint.\n```\nAuthorization : Bearer qwerty-1234-asdf-9876\n```\n\nExample of *authorize* call via curl. This is slightly different than what the *swagger* UI shows:\n```\n$> curl -X POST http://localhost:8080/authorize/token --header 'Content-Type: application/json' -d '{\"name\":\"bob, \"password\":\"s3cret}'\n\n```\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n\n## Encryption\n\nPassword are encrypted with [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt).\n\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n* [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt) - encryption library\n"
},
{
"alpha_fraction": 0.7410160899162292,
"alphanum_fraction": 0.7410160899162292,
"avg_line_length": 20.236841201782227,
"blob_id": "87d50493342775f04dabf9fe5c5871aa3753489d",
"content_id": "d13c90f2a85f72fc19d5b4d3eab514133883d0d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 807,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 38,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Interface denoting a Response can contain an error\ntype errorer interface {\n\terror() error\n}\n\n// Interface denoting a Response can contain data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n// Authorization Response\ntype authorizationResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse authorizationResponse) error() error {\n\treturn theResponse.Err\n}\n\n// Create Token Response\ntype createTokenResponse struct {\n\tData Authorization `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse createTokenResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\nfunc (theResponse createTokenResponse) error() error {\n\treturn theResponse.Err\n}\n"
},
{
"alpha_fraction": 0.7464752793312073,
"alphanum_fraction": 0.7469879388809204,
"avg_line_length": 30.20800018310547,
"blob_id": "456230d42f8a080bb2f30f9dab60e7cbeecc8cdf",
"content_id": "ffdd6998b8e0c4affef9b79f1cc018c09478e391",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 3901,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 125,
"path": "/images.java/author/src/main/java/com/wpff/AuthorApplication.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff;\n\nimport javax.ws.rs.container.DynamicFeature;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n// Jedis\nimport com.bendb.dropwizard.redis.JedisBundle;\nimport com.bendb.dropwizard.redis.JedisFactory;\nimport com.wpff.common.cache.Cache;\nimport com.wpff.common.cache.CacheFactory;\nimport com.wpff.core.Author;\n// Resources\nimport com.wpff.db.AuthorDAO;\nimport com.wpff.resources.AuthorHelper;\nimport com.wpff.resources.AuthorResource;\n\nimport io.dropwizard.Application;\nimport io.dropwizard.db.DataSourceFactory;\nimport io.dropwizard.hibernate.HibernateBundle;\nimport io.dropwizard.hibernate.UnitOfWorkAwareProxyFactory;\nimport io.dropwizard.migrations.MigrationsBundle;\nimport io.dropwizard.setup.Bootstrap;\nimport io.dropwizard.setup.Environment;\nimport io.federecio.dropwizard.swagger.SwaggerBundle;\nimport io.federecio.dropwizard.swagger.SwaggerBundleConfiguration;\nimport redis.clients.jedis.JedisPool;\n\n/**\n * Application for managing authors.\n *\n */\npublic class AuthorApplication extends Application<AuthorConfiguration> {\n\n private static final Logger LOGGER = LoggerFactory.getLogger(AuthorApplication.class);\n\n /**\n * Start application\n *\n * @param args\n * Args for application\n * @throws Exception\n * if error in application\n */\n public static void main(final String[] args) throws Exception {\n new AuthorApplication().run(args);\n }\n\n // Create hibernate bundle\n private final HibernateBundle<AuthorConfiguration> hibernateBundle = new HibernateBundle<AuthorConfiguration>(\n Author.class) {\n @Override\n public DataSourceFactory getDataSourceFactory(AuthorConfiguration configuration) {\n return configuration.getDataSourceFactory();\n }\n };\n\n @Override\n public String getName() {\n return \"author\";\n }\n\n /**\n * Initialize the application with configurations\n */\n @Override\n public void initialize(final Bootstrap<AuthorConfiguration> bootstrap) {\n // Hibernate\n bootstrap.addBundle(hibernateBundle);\n\n // configuration for migration of databse\n bootstrap.addBundle(new MigrationsBundle<AuthorConfiguration>() {\n @Override\n public DataSourceFactory getDataSourceFactory(AuthorConfiguration configuration) {\n return configuration.getDataSourceFactory();\n }\n });\n\n // Jedis for Redis\n bootstrap.addBundle(new JedisBundle<AuthorConfiguration>() {\n @Override\n public JedisFactory getJedisFactory(AuthorConfiguration configuration) {\n return configuration.getJedisFactory();\n }\n });\n\n // Swagger\n bootstrap.addBundle(new SwaggerBundle<AuthorConfiguration>() {\n @Override\n protected SwaggerBundleConfiguration getSwaggerBundleConfiguration(AuthorConfiguration configuration) {\n return configuration.swaggerBundleConfiguration;\n }\n });\n }\n\n /**\n * Start the jersey endpoint for /author\n */\n @Override\n public void run(final AuthorConfiguration configuration, final Environment environment) {\n // Set up Jedis. Currently JedisFactory doesn't inject into a filter, just\n // Resources.\n JedisPool jedisPool = configuration.getJedisFactory().build(environment);\n\n // Cache\n\t\tCache cache = CacheFactory.createCache(jedisPool);\n \n // author rest endpoint\n final AuthorDAO authorDao = new AuthorDAO(hibernateBundle.getSessionFactory());\n\n // Helper for UnitOfWork\n AuthorHelper authorHelper = new UnitOfWorkAwareProxyFactory(hibernateBundle).create(\n AuthorHelper.class, \n new Class[] { AuthorDAO.class, Cache.class }, \n new Object[] { authorDao, cache } );\n\n environment.jersey().register(new AuthorResource(authorHelper));\n\n // Add a container request filter for securing webservice endpoints.\n DynamicFeature tokenRequired = new com.wpff.common.auth.TokenRequiredFeature(jedisPool);\n environment.jersey().register(tokenRequired);\n }\n\n}\n"
},
{
"alpha_fraction": 0.7283950448036194,
"alphanum_fraction": 0.7283950448036194,
"avg_line_length": 25.372093200683594,
"blob_id": "b155c8a47e6abb5a9ca0366080970e59e23fa416",
"content_id": "014406034e7891834c9b319ddd04c27681e7475d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 1134,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 43,
"path": "/images.java/mybooks_common/src/main/java/com/wpff/common/auth/TokenRequiredFeature.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.common.auth;\n\n\nimport javax.ws.rs.container.DynamicFeature;\nimport javax.ws.rs.container.ResourceInfo;\nimport javax.ws.rs.core.FeatureContext;\nimport javax.ws.rs.ext.Provider;\n\nimport redis.clients.jedis.JedisPool;\n\n\n/**\n * A DynamicFeature that triggers the TokenFilter filter to be applied to certain Resource methods.\n * In this case, any Resource method that has the 'TokenRequired' annotation.\n * \n * This is hooked into the system inside UserApplication.\n */\n@Provider\npublic class TokenRequiredFeature implements DynamicFeature {\n\n /**\n * Jedis (redis) entrypoint\n */\n private JedisPool jedisPool;\n\n\n /**\n * Create new dynamic feature\n *\n * @param jedis Jedis instance used by the TokenFilter \n */\n public TokenRequiredFeature(JedisPool jedisPool) {\n this.jedisPool = jedisPool;\n }\n\n @Override\n public void configure(ResourceInfo resourceInfo, FeatureContext context) {\n if (resourceInfo.getResourceMethod().getAnnotation(TokenRequired.class) != null) {\n TokenFilter newFilter = new TokenFilter(this.jedisPool);\n context.register(newFilter);\n }\n }\n}\n"
},
{
"alpha_fraction": 0.63671875,
"alphanum_fraction": 0.63671875,
"avg_line_length": 29.117647171020508,
"blob_id": "139df00688121a3156cc215572a4c75691781abd",
"content_id": "758798c80ef567933f8a616b156c4a71a2746e2d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1536,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 51,
"path": "/images/query/src/github.com/hipposareevil/query/openlibrary_structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n//////////////////////////\n// Responses from from openlibrary API\n\n// This contains the individual author structs\ntype OpenLibraryAuthors struct {\n\tStart int `json:\"start\"`\n\tNumFound int `json:\"numFound\"`\n\tData []OpenLibraryAuthor `json:\"docs\"`\n}\n\n// Individual Author response\ntype OpenLibraryAuthor struct {\n\tSubjects []string `json:\"top_subjects\"`\n\tName string `json:\"name\"`\n\tOlKey string `json:\"key\"`\n\tBirthDate string `json:\"birth_date\"`\n}\n\n// This contains the individual title structs\ntype OpenLibraryTitles struct {\n\tStart int `json:\"start\"`\n\tNumFound int `json:\"numFound\"`\n\tData []OpenLibraryTitle `json:\"docs\"`\n}\n\n// Individual Title response\ntype OpenLibraryTitle struct {\n\tTitle string `json:\"title_suggest\"`\n\tCoverImage int `json:\"cover_i\"`\n\tFirstPublishYear int `json:\"first_publish_year\"`\n\tWorksKey string `json:\"key\"`\n\tAuthorKeys []string `json:\"author_key\"`\n\tAuthorNames []string `json:\"author_name\"`\n\tSubjects []string `json:\"subject\"`\n\tIsbns []string `json:\"isbn\"`\n\tPublishYears []int `json:\"publish_year\"`\n\tEditionKeys []string `json:\"edition_key\"`\n}\n\n// Description JSON returned for a given works url\ntype MetaDescription struct {\n\tType string `json:\"Type\"`\n\tValue string `json:\"value\"`\n}\n\ntype OpenLibraryDescription struct {\n\tDescription MetaDescription `json:\"description\"`\n\tSubject []string `json:\"subjects\"`\n}\n"
},
{
"alpha_fraction": 0.5683333277702332,
"alphanum_fraction": 0.5766666531562805,
"avg_line_length": 16.14285659790039,
"blob_id": "a007f7dd126e70c07111a48cefb92d7b0fa176cf",
"content_id": "5aa48be365b71b8fc0e90e95cdaa127c1ddfb12a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 600,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 35,
"path": "/images.java/mybooks_common/src/main/java/com/wpff/common/util/TimeIt.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.common.util;\n\nimport java.util.Date;\n\n/**\n * Util to do timing. \n * \n * Example:\n * <code>\n * TimeIt timer = TimeIt.mark();\n * // do some work\n * System.out.println(timer.done(\"done with work\");\n * // results in \"done with work 135ms.\"\n * </code>\n */\npublic class TimeIt {\n \n private long start = 0L;\n private long end = 0l;\n \n private TimeIt() {\n start = new Date().getTime();\n }\n \n public static TimeIt mark() {\n return new TimeIt();\n }\n \n public String done(String msg) {\n end = new Date().getTime();\n \n return msg+ \" \" + (end - start) + \"ms.\";\n }\n\n}\n"
},
{
"alpha_fraction": 0.7621951103210449,
"alphanum_fraction": 0.7691637873649597,
"avg_line_length": 44.91999816894531,
"blob_id": "7fcaef30a4c1a7911fef62104b2392d18c44001b",
"content_id": "578c7cbe5a473235ca851aed937953ab903d6444",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1148,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 25,
"path": "/images/review/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Review Microservice\n\n## Introduction\n\n**/review** is a microservice for listing users' reviews for books.\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n"
},
{
"alpha_fraction": 0.6802217960357666,
"alphanum_fraction": 0.6826863884925842,
"avg_line_length": 20.639999389648438,
"blob_id": "9a0e89ddce4b2666fdc29596a9b1044763d02e73",
"content_id": "9fec1d3a937a05e1344ef0252edd36dbef74f842",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1623,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 75,
"path": "/images/query/src/github.com/hipposareevil/query/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the router, static files\n// and wire everything together\n\nimport (\n\t\"net/http\"\n\t\"os\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// 'query' service\n\tvar querySvc QueryService\n\tquerySvc = queryService{}\n\n\t////////////////\n\t// Endpoints\n\n\t// GET /author\n\n\tqueryAuthorEndpoint := makeQueryAuthorEndpoint(querySvc)\n\tqueryAuthorHandler := httptransport.NewServer(\n\t\tqueryAuthorEndpoint,\n\t\tdecodeQueryAuthorRequest,\n\t\tencodeResponse,\n\t)\n\t// no authentication like the other services\n\trouter.Methods(\"GET\").Path(\"/query/author\").Handler(queryAuthorHandler)\n\n\t// GET /title\n\n\tqueryTitleEndpoint := makeQueryTitleEndpoint(querySvc)\n\tqueryTitleHandler := httptransport.NewServer(\n\t\tqueryTitleEndpoint,\n\t\tdecodeQueryTitleRequest,\n\t\tencodeResponse,\n\t)\n\t// no authentication like the other services\n\trouter.Methods(\"GET\").Path(\"/query/book\").Handler(queryTitleHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.6014330387115479,
"alphanum_fraction": 0.6048663854598999,
"avg_line_length": 33.00507736206055,
"blob_id": "01972d4c399757adf6081125107100ebb9a62a0f",
"content_id": "f95b93cacd571fb4666fc698ef8d964bff2db91a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 6699,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 197,
"path": "/images/frontend/content/mybooks/src/updatedb/index.js",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "/**\n * Methods to help add Authors and Books to database\n *\n */\nimport Auth from '../auth'\n\n// URL of API\nconst HOSTNAME = location.hostname\nconst API_URL = 'http://' + HOSTNAME + ':8080'\n// const API_URL = 'http://localhost:8080'\n\nexport default {\n /**\n * Try and add an author to the database.\n * If the author already exists, do nothing\n *\n * params:\n * context- Vue component that contains $axios\n * author- Author bean\n */\n tryAddAuthor (context, author) {\n console.log('add author ' + author.name)\n let self = this\n\n // Check for author existing\n const authString = Auth.getAuthHeader()\n const url = API_URL + '/author?name=' + author.name\n\n // Make GET call to /author?name=foo to see if author exists\n context.$axios.get(url, { headers: { Authorization: authString } })\n .then((response) => {\n let listOfAuthors = response.data.data\n if (listOfAuthors.length <= 0) {\n self.addAuthor(context, author)\n } else {\n window.Event.$emit('updatedb_authorcreated', 'Author ' + author.name + ' already exists. Continuing.')\n }\n })\n .catch(function (error) {\n console.log(error)\n })\n },\n /**\n * Add the author to the database\n *\n * params:\n * context- Vue component that contains $axios\n * author- Author bean\n */\n addAuthor (context, author) {\n console.log('really add author: ' + author.name)\n\n // Add author\n const authString = Auth.getAuthHeader()\n const url = API_URL + '/author'\n\n // Make POST call to /author\n context.$axios.post(url, author, { headers: { Authorization: authString } })\n .then((response) => {\n window.Event.$emit('updatedb_authorcreated', 'Author ' + author.name + ' created!')\n })\n .catch(function (error) {\n window.Event.$emit('updatedb_error', 'Unable to create author \"' + author.name + '\": ' + error)\n console.log(error)\n })\n },\n /**\n * Add a book to the database.\n *\n * params:\n * context- Vue component that contains $axios\n * book- Book bean\n * bookInformation- Metadata on book, add to userbook flag and tags\n */\n addBook (context, book, bookInformation) {\n console.log('add book ' + book.title)\n console.log('add book ' + book.authorName)\n\n let self = this\n const authString = Auth.getAuthHeader()\n\n // Make GET call to /author?name=foo to see if author exists\n const url = API_URL + '/author?name=' + book.authorName\n console.log('making call to ' + url)\n context.$axios.get(url, { headers: { Authorization: authString } })\n .then((response) => {\n let listOfAuthors = response.data.data\n if (listOfAuthors.length <= 0) {\n // no author\n self.addBook_noauthor(context, book, bookInformation)\n } else {\n // author exists\n // Get the first author from array.\n let authorId = listOfAuthors[0].id\n console.log('author already exists: ' + authorId)\n self.addBook_authorcreated(context, book, authorId, bookInformation)\n }\n })\n .catch(function (error) {\n console.log(error)\n })\n },\n /**\n * No author exists so add one. Then create the book.\n * To create an author, we make a call to /query/author\n * and then post that to /author to create.\n *\n */\n addBook_noauthor (context, book, bookInformation) {\n let self = this\n let authString = Auth.getAuthHeader()\n\n // Get Author information from /query into 'authorJson'\n let url = API_URL + '/query/author?author=' + book.authorName\n console.log('addBook_noauthor. do a get on: ' + url)\n context.$axios.get(url, { headers: { Authorization: authString } })\n .then((response) => {\n // We expect the response to be an array of 1\n let authorJson = response.data.data[0]\n console.log(authorJson)\n\n // Add new author with 'authorJson'\n const posturl = API_URL + '/author'\n console.log('addBook. do a POST to ' + posturl)\n // Make ajax call to /author\n context.$axios.post(posturl, authorJson, { headers: { Authorization: authString } })\n .then((response) => {\n let authorId = response.data.id\n console.log('AUTHOR: ' + response.data)\n console.log('Created author: ' + authorId)\n self.addBook_authorcreated(context, book, authorId, bookInformation)\n })\n .catch(function (error) {\n // Couldn't create author. error out\n window.Event.$emit('updatedb_error', 'Unable to create author \"' + book.authorName + '\": ' + error)\n console.log(error)\n })\n })\n .catch(function (error) {\n // Unable to get author info from /query endpoint\n console.log(error)\n })\n },\n /**\n * Create the actual book\n *\n * params:\n * context - Vue component\n * book- Book bean\n * authorId- ID of author\n * bookInformation- Bean with information about the book from the user\n * (add to user books flag and tags)\n */\n addBook_authorcreated (context, book, authorid, bookInformation) {\n console.log('create book for existing author ' + authorid)\n\n const authString = Auth.getAuthHeader()\n\n // Create a subset of the openlibrary data for the POST\n let data = {\n imageLarge: book.imageLarge,\n imageMedium: book.imageMedium,\n imageSmall: book.imageSmall,\n authorId: parseInt(authorid, 10),\n description: book.description,\n firstPublishedYear: parseInt(book.firstPublishedYear, 10),\n openLibraryWorkUrl: book.openLibraryWorkUrl,\n goodreadsUrl: book.goodreadsUrl,\n subjects: book.subjects,\n isbns: book.isbns,\n title: book.title\n }\n\n // Make POST call to /book\n const url = API_URL + '/book'\n console.log('create book at url: ' + url)\n context.$axios.post(url, data, { headers: { Authorization: authString } })\n .then((response) => {\n bookInformation.bookId = response.data.id\n bookInformation.title = response.data.title\n window.Event.$emit('updatedb_bookcreated', 'Book ' + book.title + ' saved!', bookInformation)\n })\n .catch(function (error) {\n if (error.response) {\n if (error.response.status === 409) {\n // duplidate book\n window.Event.$emit('updatedb_book_409', 'Book \"' + book.title + '\" already exists')\n } else {\n window.Event.$emit('updatedb_error', 'Unable to create book \"' + book.title + '\": ' + error)\n }\n } else {\n window.Event.$emit('updatedb_error', 'Unable to create book \"' + book.title + '\": ' + error)\n console.log(error)\n }\n })\n }\n}\n"
},
{
"alpha_fraction": 0.5447944402694702,
"alphanum_fraction": 0.5577495694160461,
"avg_line_length": 24.52400016784668,
"blob_id": "9306c1cce06057948faab866c0aba275376aa244",
"content_id": "3ea98fdc29c527fc605b23887e18d50e9aeb924c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 19143,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 750,
"path": "/test/book.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "######################\n#\n# book related calls\n#\n#\n######################\n\n\n\n##########\n# Create book in db\n# \n##########\n_create_book() {\n post_data=\"$1\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/book\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for test book\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n if [[ \"$code\" == \"409\" ]]; then\n error \"Book already exists!\"\n else\n error \"Error making POST for test book. code: $code: $result\"\n fi\n fi\n\n book_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$book_result\"\n}\n\n\n#########\n# Create book by asimov\n#\n#########\nbook::create_book_asimov() {\n asimov=$(create_author_asimov)\n asimov_id=$(echo \"$asimov\" | jq -r .id)\n\nread -r -d '' book_data <<EOF\n{\n \"authorId\": $asimov_id,\n \"firstPublishedYear\": 1952,\n \"title\": \"The Currents Of Space\",\n \"isbns\": [\n \"9789867001122\", \"0449238296\"\n ],\n \"subjects\": [\n \"Science Fiction\"\n ],\n \"description\": \"High above the planet Florinia.\",\n \"openlibraryWorkUrl\": \"https://openlibrary.org/works/OL46385W\",\n \"goodreadsUrl\": \"string\",\n \"imageSmall\": \"https://covers.openlibrary.org/b/id/6297485-S.jpg\",\n \"imageMedium\": \"https://covers.openlibrary.org/b/id/6297485-M.jpg\",\n \"imageLarge\": null\n}\nEOF\n\n # create book\n _create_book \"${book_data}\"\n}\n\n#########\n# Create 2nd book\n#\n#########\nbook::create_book_second() {\n author=$(create_author_lengle)\n author_id=$(echo \"$author\" | jq -r .id)\n\nread -r -d '' book_data <<EOF\n{\n \"authorId\": $author_id,\n \"firstPublishedYear\": 1975,\n \"title\": \"Currents are for eating\",\n \"description\": \"Low below the planet Florinia.\",\n \"openlibraryWorkUrl\": \"https://openlibrary.org/works/OL46385W\",\n \"goodreadsUrl\": \"fake string\",\n \"imageSmall\": \"small image location\",\n \"imageMedium\": \"https://covers.openlibrary.org/b/id/6297485-S.jpg\",\n \"imageLarge\": null\n}\nEOF\n\n # create book\n _create_book \"${book_data}\"\n}\n\n###############\n# Generic create book\n#\n# params:\n# 1- name of book\n# 2- author id\n###############\nbook::create_book() {\n# title=$(url_encode \"$1\")\n title=\"$1\"\n author_id=\"$2\"\n\n\nread -r -d '' book_data <<EOF\n{\n \"authorId\": $author_id,\n \"firstPublishedYear\": 1999,\n \"title\": \"$title\",\n \"description\": \"Book for $title.\",\n \"openlibraryWorkUrl\": \"https://openlibrary.org/works/$title\",\n \"goodreadsUrl\": \"fake string\",\n \"imageSmall\": \"small image location for ${title}\",\n \"imageMedium\": \"https://covers.openlibrary.org/b/id/${title}.jpg\",\n \"imageLarge\": \"large image location for ${title}\"\n}\nEOF\n\n # create book\n _create_book \"${book_data}\"\n}\n\n\n\n##########\n# get books by name\n#\n##########\nget_books_by_title() {\n title=\"$1\"\n query=$(url_encode \"$title\")\n\n # create url\n url=\"${ROOT_URL}/book?title=$query\"\n\n if [ $# -eq 3 ]; then\n offset=\"$2\"\n limit=\"$3\"\n\n url=\"${url}&offset=${offset}&limit=${limit}\"\n fi\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${url}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for book '$bookName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting book '$bookName'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# get books by multiple ids\n#\n##########\nget_books_by_multiple_ids() {\n ids=\"$1\"\n query=$(url_encode \"$ids\")\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/book?book_id=$query\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for books '$ids'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting books by ids '$ids'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# get book by author ID\n#\n##########\nget_books_by_author_id() {\n author_id=\"$1\"\n query=$(url_encode \"$author_id\")\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/book?author_id=$query\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for author '$author_id'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting book. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# get book by author name\n#\n##########\nget_books_by_author_name() {\n author_name=\"$1\"\n query=$(url_encode \"$author_name\")\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/book?author_name=$query\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for book '$author_name'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting book. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# get book by id\n#\n##########\nget_book_by_id() {\n book_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/book/${book_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for book '$bookName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting book '$bookName'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# Get all books\n#\n##########\nget_all_books() {\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/book\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for all books\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all books: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# Get all books with offset and limit\n#\n##########\nget_all_books_with_offset_limit() {\n offset=$1\n limit=$2\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/book?limit=${limit}&offset=${offset}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /book for all books\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all books: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n########\n# delete all books\n#\n# param: json with all books\n########\ndelete_all_books() {\n # get 1000 books\n books=$(get_all_books_with_offset_limit 0 1000 )\n ids=$(echo \"${books}\" | jq -r \".data[].id\" )\n num=$(echo \"${books}\" | jq -r \".data | length\" )\n\n echo \"Delete all ($num) books.\"\n\n for id in $ids\n do\n $(delete_book $id)\n done\n}\n\n\n\n##########\n# delete book\n#\n# params:\n# book id\n##########\ndelete_book() {\n book_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE ${ROOT_URL}/book/${book_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE to /book for book $book_id\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n echo \"Error deleting book '$book_id'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n}\n\n\n\n##########\n# update book\n#\n#########\n_update_book() {\n book_id=\"$1\"\n post_data=\"$2\"\n\nlogit \"Book id: $book_id\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/book/${book_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for test book\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n error \"Error making PUT for test book. code: $code: $result\"\n fi\n\n book_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n logit \"book updated\"\n}\n\n##########\n# Update book\n#\n# param: book id\n##########\nupdate_book() {\n book_id=$1\n\nread -r -d '' book_data <<EOF\n{\n\"description\": \"new description\",\n\"imageSmall\": \"small image\"\n}\nEOF\n\n _update_book $book_id \"$book_data\"\n}\n\n\n######\n# print info for book\n# \n######\nprint_book_info() {\n book_info=\"$1\"\n title=$(echo \"$book_info\" | jq -r .title)\n id=$(echo \"$book_info\" | jq -r .id)\n author_name=$(echo \"$book_info\" | jq -r .authorName)\n\n echo \"Book: '$title', ID: '$id', author: '$author_name'\"\n}\n\n\n###############\n#\n# Clean books\n#\n###############\nbook::clean() {\n echo \"\"\n delete_all_books\n author::clean\n}\n\n###############\n#\n# Test the limits and offsets for large datasets\n#\n###############\nbook::test_limit_offset() {\n echo \"\"\n echo \"[[ Book Limit/Offset test ]]\"\n\n # create author for the generic books\n author=$(create_author_lengle)\n author_id=$(echo \"$author\" | jq -r .id)\n\n # num books to create\n COUNT=40\n\n echo \"Creating $COUNT books\"\n\n idx=1\n while [ $idx -le $COUNT ]\n do\n idx=$(( $idx + 1 ))\n bookname=\"book_${idx}\"\n result=$(book::create_book $bookname $author_id)\n done\n\n #######\n # Default returns\n # get books and see how many\n echo \"\"\n echo \"Testing default limit (20)\"\n\n all_books=$(get_all_books)\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n limit=$(echo \"$all_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in books returned\"\n assert_equals $EXPECTED_DEFAULT_LIMIT $limit \"limit number books\"\n assert_equals $COUNT $total \"total number books\"\n \n #######\n # new limit\n echo \"\"\n echo \"Testing new limit (500)\"\n all_books=$(get_all_books_with_offset_limit 0 500)\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n limit=$(echo \"$all_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in books returned\"\n assert_equals $COUNT $limit \"limit number books\"\n assert_equals $COUNT $total \"total number books\"\n\n\n #######\n # new offset\n echo \"\"\n echo \"Testing new offset (10)\"\n all_books=$(get_all_books_with_offset_limit 10 10)\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n limit=$(echo \"$all_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 10 ${offset} \"offset in books returned\"\n assert_equals 10 $limit \"limit number books\"\n assert_equals $COUNT $total \"total number books\"\n\n #######\n # new offset\n echo \"\"\n echo \"Testing 2nd new offset (13)\"\n all_books=$(get_all_books_with_offset_limit 13 2)\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n limit=$(echo \"$all_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 13 ${offset} \"offset in books returned\"\n assert_equals 2 $limit \"limit number books\"\n assert_equals $COUNT $total \"total number books\"\n\n ##########\n # Test with book name in query\n echo \"\"\n echo \"Testing with book title in query\"\n all_books=$(get_books_by_title \"book_1\" 2 3)\n\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n limit=$(echo \"$all_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 2 ${offset} \"offset in books returned\"\n assert_equals 3 $limit \"limit number books\"\n assert_equals 10 $total \"total number books\"\n\n book::clean\n\n echo \"[[ Done Book Limit/Offset test ]]\"\n}\n\n###############\n# \n# Main book test\n#\n###############\nbook::main_test() {\n echo \"Create first book\"\n book=$(book::create_book_asimov)\n# print_book_info \"$book\"\n book_id=$(echo \"$book\" | jq -r .id)\n author_asimov_id=$(echo \"$book\" | jq -r .authorId)\n book_year=$(echo \"$book\" | jq -r .firstPublishedYear)\n assert_string_equals \"1952\" \"$book_year\" \"Book's year\"\n \n # get single book\n echo \"\"\n echo \"Get single book\"\n book=$(get_book_by_id $book_id)\n first_book_id=$(echo \"$book\" | jq -r .id)\n\n assert_equals \"$book_id\" \"$first_book_id\" \"Get single book by id\"\n\n # check isbns\n echo \"\"\n echo \"Check isbns\"\n num_isbns=$(echo \"$book\" | jq -r '.isbns | length')\n isbns=$(echo \"$book\" | jq -r '.isbns')\n assert_equals 2 $num_isbns \"Number of isbns\"\n assert_contains \"$isbns\" \"9789867001122\" \"Isbn\"\n assert_contains \"$isbns\" \"0449238296\" \"Isbn\"\n\n # check subjects\n echo \"\"\n echo \"Check subjects\"\n num_subjects=$(echo \"$book\" | jq -r '.subjects | length')\n assert_equals 1 $num_subjects \"Number of subjects in book\"\n\n\n # update book\n echo \"\"\n echo \"Update book\"\n update_book $book_id\n book=$(get_book_by_id $book_id)\n\n # verify description\n description=$(echo \"$book\" | jq -r .description )\n assert_string_equals \"new description\" \"$description\" \"Updated book's description\"\n\n imageSmall=$(echo \"$book\" | jq -r .imageSmall )\n assert_string_equals \"small image\" \"$imageSmall\" \"Updated book's small image\"\n\n # 2nd book\n echo \"Create second book\"\n book=$(book::create_book_second)\n# print_book_info \"$book\"\n second_book_id=$(echo \"$book\" | jq -r \".id\")\n\n echo \"\"\n echo \"Check isbns on 2nd book\"\n\n num_isbns=$(echo \"$book\" | jq -r '.isbns | length')\n isbns=$(echo \"$book\" | jq -r '.isbns')\n assert_equals 0 $num_isbns \"Number of isbns\"\n\n num_subjects=$(echo \"$book\" | jq -r '.subjects | length')\n assert_equals 0 $num_subjects \"Number of subjects\"\n\n # check the null vs empty array\n ignore=$(echo \"$book\" | jq -r '.isbns | join(\" \")')\n assert_equals $? 0 \"jq failed on isbn query for 2nd book\"\n\n ignore=$(echo \"$book\" | jq -r '.subjects | join(\" \")')\n assert_equals $? 0 \"jq failed on subject query for 2nd book\"\n\n echo \"\"\n echo \"Check all books\"\n all_books=$(get_all_books)\n limit=$(echo \"$all_books\" | jq -r .limit)\n total=$(echo \"$all_books\" | jq -r .total)\n offset=$(echo \"$all_books\" | jq -r .offset)\n\n assert_equals 2 $limit \"limit number books\"\n assert_equals 2 $total \"total number books\"\n assert_equals 0 ${offset} \"offset in books returned\"\n\n # various queries\n echo \"\"\n echo \"Test book queries\"\n\n echo \"\"\n echo \"By id, multiple\"\n books=$(get_books_by_multiple_ids \"$first_book_id,$second_book_id\")\n # should be 2 books\n books=$(echo \"$books\" | jq -r .data)\n numBooks=$(echo $books | jq -r '. | length')\n assert_equals 2 $numBooks \"Number books returned by ids\"\n\n echo \"\"\n echo \"By title, multiple\"\n books=$(get_books_by_title \"Currents\")\n # should be 2 books\n books=$(echo \"$books\" | jq -r .data)\n numBooks=$(echo $books | jq -r '. | length')\n assert_equals 2 $numBooks \"Number books returned by title\"\n\n # get single book\n echo \"\"\n echo \"Search by title, single book\"\n books=$(get_books_by_title \"Currents of space\")\n\n echo \"\"\n echo \"Check limit & offset for single book\"\n\n offset=$(echo \"$books\" | jq -r .offset)\n limit=$(echo \"$books\" | jq -r .limit)\n total=$(echo \"$books\" | jq -r .total)\n assert_equals 1 $limit \"limit number of books\"\n assert_equals 1 $total \"total number of books\"\n assert_equals 0 $offset \"Offset into books\"\n\n title=$(echo \"$books\" | jq -r .data[0].title)\n assert_string_equals \"The Currents Of Space\" \"$title\" \"Search by title\"\n\n authors_name=$(echo \"$books\" | jq -r .data[0].authorName)\n assert_string_equals \"Isaac Asimov\" \"$authors_name\" \"Search by title's author name\"\n\n # by author id\n echo \"\"\n echo \"By author id\"\n books=$(get_books_by_author_id \"$author_asimov_id\")\n title=$(echo \"$books\" | jq -r .data[0].title)\n\n assert_string_equals \"The Currents Of Space\" \"$title\" \"Query by author id's title\"\n\n echo \"\"\n echo \"Check limit & offset for query by id\"\n offset=$(echo \"$books\" | jq -r .offset)\n limit=$(echo \"$books\" | jq -r .limit)\n total=$(echo \"$books\" | jq -r .total)\n assert_equals 1 $limit \"limit number of books\"\n assert_equals 1 $total \"total number of books\"\n assert_equals 0 $offset \"Offset into books\"\n\n # by author name\n echo \"\"\n echo \"Check limit & offset for query by name\"\n books=$(get_books_by_author_name \"Isaac Asimo\")\n\n offset=$(echo \"$books\" | jq -r .offset)\n limit=$(echo \"$books\" | jq -r .limit)\n total=$(echo \"$books\" | jq -r .total)\n assert_equals 1 $limit \"limit number of books\"\n assert_equals 1 $total \"total number of books\"\n assert_equals 0 $offset \"Offset into books\"\n\n echo \"\"\n echo \"By author name\"\n title=$(echo \"$books\" | jq -r .data[0].title)\n assert_string_equals \"The Currents Of Space\" \"$title\" \"Query by author name\"\n\n book::clean\n}\n\n###############\n#\n# Test book endpoint\n#\n###############\ntest_book() {\n echo \"\"\n echo \"[ Book Test ]\"\n\n book::main_test\n book::test_limit_offset\n\n echo \"[ Done Book Test ]\"\n\n}\n"
},
{
"alpha_fraction": 0.586280345916748,
"alphanum_fraction": 0.6041755676269531,
"avg_line_length": 24.21505355834961,
"blob_id": "da4a2eb9b15c51ed18211bc7584d2ffd9af4ba0b",
"content_id": "9c0802829d67403291599394d207a9a46c19a28a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2347,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 93,
"path": "/images.java/author/waitforit.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# curl --connect-timeout 0.5 -sL localhost:888 -o /dev/null\n\n# This program polls the incoming URL for a connection.\n# When URL is available this script will call the incoming command.\n# If URL is not available before the timeout, will exit with 1 error code.\n#\n# params:\n# 1- url\n# 2- timeout\n# 3- command to run\n#\n# will run command if successful connection, 1 otherwise\n\n\n################\n# process params\nif [ $# -lt 3 ]; then\n echo \"usage: $0 <URL> <timeout in seconds>. e.g. $0 localhost:8888 3\"\n exit 1\nfi\n\nurl_to_check=$1\ntimeout=$2\nshift\nshift\ncommand=\"$@\"\n\n\n###############\n# local vars\nelapsedTime=0\n# how long to wait on a curl connection\nwaitTime=1\n\n\n# could we connect. \n# -1 mean valid connection but bad URL\n# 0 means no connection\n# 1 means valid connection and URL\nconnected=0\n\nwhile [[ $elapsedTime -lt $timeout && $connected -eq 0 ]]\ndo\n\n # If url_to_check starts with http, then verify the full URL is accessible,\n # otherwise, just do a GET\n if [[ $url_to_check == http* ]]; then\n # URL is HTTP\n resultString=$(curl -s -I -w %{http_code} --connect-timeout $waitTime -sL $url_to_check -o /dev/null)\n result=$?\n\n # verify connection and result\n if [ $result -eq 0 ]; then\n # connected to server. Let's verify the resource exists\n if [ \"$resultString\" = \"200\" ] ; then\n connected=1\n else\n connected=-1\n fi\n fi\n else\n # URL is non HTTP, do GET\n resultString=$(curl -X GET --connect-timeout $waitTime -sL $url_to_check -o /dev/null)\n result=$?\n\n # verify connection and result\n if [ $result -eq 0 ]; then\n connected=1\n fi\n fi\n\n if [ $connected -eq 0 ]; then\n # No connection to server, sleep for a bit\n sleep $waitTime\n elapsedTime=$(($elapsedTime + $waitTime))\n fi\ndone\n\nif [ $connected -eq 1 ]; then\n # connect was valid\n echo \"$url_to_check is up.\"\n exec $command\nelif [ $connected -eq -1 ]; then\n # able to connect, but bad HTTP code\n echo \"Able to connect to $url_to_check, but got HTTP code $resultString\"\n exit 1\nelif [ $connected -eq 0 ]; then\n # unable to connect to server\n echo \"Unable to connect to '$url_to_check' in $timeout seconds. Exiting.\"\n exit 1\nfi\n\n\n"
},
{
"alpha_fraction": 0.71529620885849,
"alphanum_fraction": 0.7188329100608826,
"avg_line_length": 26.560976028442383,
"blob_id": "6ee7a838eead4ef6dab5380ab8ec8163cb2c4065",
"content_id": "6a97704bf8791ea7f7697cc1116965c2f670b0c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 1131,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 41,
"path": "/images/query/Dockerfile",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "########\n# Build stage\nFROM golang:alpine AS build-env\n\n# golang defaults to directory '/go' for building\nWORKDIR /go\n\n# add 'src' and 'pkg' into the /go directory\nCOPY src/ /go/src/\n\n# Builds the 'query' service and places it into /go\n# Note the extra flags passed to 'go build' so this can run on a scratch image instead of alpine.\nRUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo github.com/hipposareevil/query\n\n########\n# final stage\n#FROM scratch\nFROM alpine:3.8\n\n# Add in certs for SSL connections\nRUN apk add --no-cache ca-certificates\n\n# Args passed in via 'docker build'\n# Used by the LABELs\nARG BUILD_TIME\nARG VERSION\n\n# copy binary\nCOPY --from=build-env /go/query /\n\n# copy static html files\nCOPY html/* /html/\n\n# Putting LABEL last so we can re-use the preceding caching layers\nLABEL org.label-schema.build-date=\"$BUILD_TIME\" \\\n org.label-schema.vendor=\"github.com/hipposareevil\" \\\n org.label-schema.version=\"$VERSION\" \\\n org.label-schema.description=\"Microservice for querying 3rd party APIs for author and book information.\" \\\n org.label-schema.name=\"books.query.go\" \n\nCMD [\"/query\"]\n\n"
},
{
"alpha_fraction": 0.614656388759613,
"alphanum_fraction": 0.614656388759613,
"avg_line_length": 31.044944763183594,
"blob_id": "5c7532e50b998b39b56ec9417cb0c045e60af399",
"content_id": "f730c6afe7af5aac8f63f6373d071582f9acac85",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 2852,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 89,
"path": "/images/book/src/github.com/hipposareevil/book/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Structures\n\n// GET request for book, contains:\n// - offset\n// - limit\ntype getAllBooksRequest struct {\n\tBearer string `json:\"bearer\"`\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTitle string `json:\"title\"`\n\tAuthorName string `json:\"author_name\"`\n\tAuthorId []int `json:\"author_id\"`\n\tBookId []int `json:\"book_id\"`\n}\n\n// GET request for single book\n// - book_id\ntype getBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tBookId int `json:\"book_id\"`\n}\n\n// DELETE request for single book\n// - book_id\ntype deleteBookRequest struct {\n\tBookId int `json:\"book_id\"`\n}\n\n// POST request to create book\ntype createBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tAuthorId int `json:\"authorId\"`\n\tDescription string `json:\"description\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tIsbns []string `json:\"isbns\"`\n\tOpenlibraryWorkUrl string `json:\"openlibraryWorkUrl\"`\n\tSubjects []string `json:\"subjects\"`\n\tTitle string `json:\"title\"`\n}\n\n// PUT request to update book\n// struct passed into service\ntype updateBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tBookId int `json:\"id\"`\n\tAuthorId int `json:\"authorId\"`\n\tDescription string `json:\"description\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tIsbns []string `json:\"isbns\"`\n\tOpenlibraryWorkUrl string `json:\"openlibraryWorkUrl\"`\n\tSubjects []string `json:\"subjects\"`\n\tTitle string `json:\"title\"`\n}\n\n//// Response structures\n\ntype Book struct {\n\tAuthorId int `json:\"authorId\"`\n\tAuthorName string `json:\"authorName\"`\n\tDescription string `json:\"description\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tId int `json:\"id\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tIsbns []string `json:\"isbns\"`\n\tOpenlibraryWorkUrl string `json:\"openlibraryWorkUrl\"`\n\tSubjects []string `json:\"subjects\"`\n\tTitle string `json:\"title\"`\n}\n\ntype Books struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Book `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.7191600799560547,
"alphanum_fraction": 0.7230970859527588,
"avg_line_length": 29.399999618530273,
"blob_id": "15b09bb510d15cb1ab76b752cfe2de321927f67d",
"content_id": "6a9c9938c6cf5febcfd9b9777a3591702df3eb6b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 762,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 25,
"path": "/images/frontend/Dockerfile",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "FROM node:8.4.0-alpine\n\nRUN apk update; apk upgrade; apk add git; apk add bash; apk add bash-completion\nRUN sed -i s/ash/bash/ /etc/passwd\nCOPY bashrc /root/.bashrc\n\nRUN npm install -g vue-cli\n\n# caller should mount into /scratch to run the dev server\nWORKDIR /scratch/\n\n# Args passed in via 'docker build'\n# Used by the LABELs\nARG BUILD_TIME\nARG VERSION\n\n# Putting LABEL last so we can re-use the preceding caching layers\nLABEL org.label-schema.build-date=\"$BUILD_TIME\" \\\n org.label-schema.vendor=\"github.com/hipposareevil\" \\\n org.label-schema.version=\"$VERSION\" \\\n org.label-schema.description=\"Dev Frontend SPA webpage for books application.\" \\\n org.label-schema.name=\"books.frontend\" \n\n#ENTRYPOINT [\"npm\", \"run\", \"dev\"]\nCMD npm run dev\n\n\n"
},
{
"alpha_fraction": 0.6557046175003052,
"alphanum_fraction": 0.6580478549003601,
"avg_line_length": 31.313316345214844,
"blob_id": "1b91ef62bf9986831a7b5ca6a43047f04c150cd1",
"content_id": "f129c0a142fa0ee6b755db0cc74ae54122bcfd46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 12376,
"license_type": "no_license",
"max_line_length": 196,
"num_lines": 383,
"path": "/images.java/author/src/main/java/com/wpff/resources/AuthorResource.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.resources;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.stream.Collectors;\n\nimport javax.ws.rs.DELETE;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.HeaderParam;\nimport javax.ws.rs.NotFoundException;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.PUT;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.PathParam;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.QueryParam;\nimport javax.ws.rs.WebApplicationException;\nimport javax.ws.rs.core.Context;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.SecurityContext;\nimport javax.ws.rs.core.UriInfo;\n\n// utils\nimport org.apache.commons.beanutils.BeanUtils;\n\nimport com.codahale.metrics.annotation.Timed;\nimport com.wpff.common.auth.TokenRequired;\nimport com.wpff.common.result.ResultWrapper;\nimport com.wpff.common.result.ResultWrapperUtil;\nimport com.wpff.common.result.Segment;\nimport com.wpff.core.Author;\nimport com.wpff.query.AuthorQuery;\nimport com.wpff.result.AuthorResult;\n\nimport io.dropwizard.jersey.params.IntParam;\n// Swagger\nimport io.swagger.annotations.Api;\nimport io.swagger.annotations.ApiOperation;\nimport io.swagger.annotations.ApiParam;\nimport io.swagger.annotations.ApiResponse;\nimport io.swagger.annotations.ApiResponses;\n\n\n/**\n * Resource for /author url. Manages authors.\n */\n@Api( value=\"/author\",\n tags= \"author\",\n description=\"Manages authors\")\n@Path(\"/author\")\n@Produces(MediaType.APPLICATION_JSON)\npublic class AuthorResource {\n\n private final AuthorHelper authorHelper;\n\n public AuthorResource(AuthorHelper authorHelper) {\n this.authorHelper = authorHelper;\n }\n\n /**\n * Return a single author, by id.\n *\n * @param authorId\n * ID of author\n * @param authDummy\n * Dummy authorization string that is solely used for Swagger\n * description.\n * @return Author\n */\n @ApiOperation(\n value=\"Get author by ID.\",\n notes=\"Get author information. Requires authentication token in header with key AUTHORIZATION. \"\n + \"Example: AUTHORIZATION: Bearer qwerty-1234-asdf-9876.\"\n )\n @GET\n @Path(\"/{author_id}\")\n @com.wpff.common.auth.TokenRequired\n @Timed(absolute=true, name=\"getSingle\")\n public AuthorResult getAuthor(\n @ApiParam(value = \"ID of author to retrieve.\", required = false)\n @PathParam(\"author_id\") \n IntParam authorId,\n @ApiParam(value=\"Bearer authorization\", required=true)\n @HeaderParam(value=\"Authorization\")\n String authDummy\n ) {\n Author authorInDb = this.authorHelper.findById(authorId.get());\n return convertToBean(authorInDb);\n }\n\n /**\n * Get list authors.\n *\n * @param offset\n * Start index of data segment\n * @param limit\n * Size of data segment\n * @param authorNameQuery\n * Name of author, or partial name, that is used to match against the\n * database.\n * @param authDummy\n * Dummy authorization string that is solely used for Swagger\n * description.\n * \n * @return list of matching Author(s). When query is empty, this will be all\n * author\n */\n @ApiOperation(\n value=\"Get authors via optional 'name' query param.\",\n notes=\"Returns list of authors. When 'name' is specified only matching authors are returned.\" \n + \" Requires authentication token in header with key AUTHORIZATION. Example: AUTHORIZATION: Bearer qwerty-1234-asdf-9876.\" \n )\n @GET\n @TokenRequired\n @Timed(absolute=true, name=\"getAll\") \n public ResultWrapper<AuthorResult> getAuthors(\n @ApiParam(value = \"Name or partial name of author to retrieve.\", required = false)\n @QueryParam(\"name\") String authorNameQuery,\n \n @ApiParam(value = \"Where to start the returned data segment from the full result.\", required = false) \n @QueryParam(\"offset\") \n Integer offset,\n\n @ApiParam(value = \"size of the returned data segment.\", required = false) \n\t\t\t@QueryParam(\"limit\") \n\t\t\tInteger limit,\n\n @ApiParam(value=\"Bearer authorization\", required=true)\n @HeaderParam(value=\"Authorization\") String authDummy\n ) {\n // Start\n Segment segment = new Segment(offset, limit);\n \n \n List<Author> authors = null;\n if (authorNameQuery != null) {\n authors = this.authorHelper.findByName(authorNameQuery, segment);\n segment.setTotalLength((long) authors.size());\n }\n else {\n authors = this.authorHelper.findAll(segment);\n segment.setTotalLength(this.authorHelper.getTotalNumberAuthors());\n }\n \n // Convert list of Authors (DB) to AuthorResults (bean)\n List<AuthorResult> authorList = authors.\n stream().\n sorted().\n map( x -> this.convertToBean(x)).\n collect(Collectors.toList());\n \n ResultWrapper<AuthorResult> result = ResultWrapperUtil.createWrapper(authorList, segment);\n return result;\n }\n\n\n /**\n * Create a new author in the DB.\n *\n * @param authorBean\n * Author to add\n * @param context\n * security context (INJECTED via TokenFilter)\n * @param uriInfo\n * Information about this URI\n * @param authDummy\n * Dummy authorization string that is solely used for Swagger\n * description.\n * @return newly created Author\n */\n @ApiOperation(\n value=\"Create author.\",\n notes=\"Create new author in the database. The 'id' field will be ignored. Requires authentication token in header with key AUTHORIZATION. Example: AUTHORIZATION: Bearer qwerty-1234-asdf-9876.\"\n )\n @POST\n @ApiResponses( value = {\n @ApiResponse(code = 409, message = \"Author already exists.\"),\n @ApiResponse(code = 200, \n message = \"Author created.\")\n })\n @TokenRequired\n @Timed(absolute = true, name = \"create\")\n public AuthorResult createAuthor(\n @ApiParam(value = \"Author information.\", required = false)\n AuthorQuery authorBean,\n \n @Context SecurityContext context,\n \n @Context UriInfo uriInfo,\n \n @ApiParam(value=\"Bearer authorization\", required=true)\n @HeaderParam(value=\"Authorization\") String authDummy\n ) {\n if (authorBean == null) {\n throw new WebApplicationException(\"No data payload received for creating Author.\", Response.Status.BAD_REQUEST);\n }\n \n // START\n verifyAdminUser(context);\n \n try {\n // Make new Author from authorBean\n Author authorInDatabase = new Author();\n \n // copy(destination, source)\n BeanUtils.copyProperties(authorInDatabase, authorBean);\n \n // Make subjects in DB a CSV string\n authorInDatabase.setSubjectsAsCsv(AuthorHelper.convertListToCsv(authorBean.getSubjects()));\n\n // Create the author in the database, \n // then convert it to a normal bean and return that\n Author created = this.authorHelper.createAuthor(authorInDatabase);\n return this.convertToBean(created);\n }\n catch (org.hibernate.exception.ConstraintViolationException e) {\n String errorMessage = e.getMessage();\n // check cause/parent\n if (e.getCause() != null) {\n errorMessage = e.getCause().getMessage();\n }\n\n throw new WebApplicationException(errorMessage, 409);\n }\n catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException bean) {\n throw new WebApplicationException(\"Error in updating database when creating author \" + authorBean + \".\", Response.Status.INTERNAL_SERVER_ERROR);\n }\n }\n\n /**\n * Update an author in the DB.\n * \n * @param authorBean\n * Author to update\n * @param context\n * security context (INJECTED via TokenFilter)\n * @param authorId ID of author, from the path\n * @param authDummy\n * Dummy authorization string that is solely used for Swagger\n * description.\n * @return udpatedAuthor\n */\n @ApiOperation(\n value=\"Update an existing author.\",\n notes=\"Update the author in the database. The 'id' field will be ignored. Requires authentication token in header with key AUTHORIZATION. Example: AUTHORIZATION: Bearer qwerty-1234-asdf-9876.\"\n )\n @PUT\n @ApiResponse(code = 409, message = \"Duplicate value\")\n @TokenRequired\n @Path(\"/{author_id}\")\n @Timed(absolute = true, name = \"update\")\n public AuthorResult updateAuthor(\n @ApiParam(value = \"Author information.\", required = false)\n AuthorQuery authorBean,\n \n @Context SecurityContext context,\n \n @ApiParam(value = \"ID of author.\", required = false) \n @PathParam(\"author_id\") \n IntParam authorId,\n\n @ApiParam(value=\"Bearer authorization\", required=true)\n @HeaderParam(value=\"Authorization\") String authDummy\n ) {\n if (authorBean == null) {\n throw new WebApplicationException(\"No data payload received for updating Author.\", Response.Status.BAD_REQUEST);\n }\n \n // START\n verifyAdminUser(context);\n \n try {\n Author updated = this.authorHelper.updateAuthor(authorBean, authorId.get());\n\n return this.convertToBean(updated);\n }\n catch (org.hibernate.exception.ConstraintViolationException e) {\n String errorMessage = e.getMessage();\n // check cause/parent\n if (e.getCause() != null) {\n errorMessage = e.getCause().getMessage();\n }\n\n throw new WebApplicationException(errorMessage, 409);\n }\n catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException bean) {\n throw new WebApplicationException(\"Error in updating database when creating author \" + authorBean + \".\", Response.Status.INTERNAL_SERVER_ERROR);\n }\n }\n \n\n\n /**\n * Deletes a author by ID\n *\n * @param authorId\n * ID of author\n * @param context\n * security context (INJECTED via TokenFilter)\n * @param authDummy\n * Dummy authorization string that is solely used for Swagger\n * description.\n * \n * @return Response denoting if the operation was successful (202) or failed\n * (404)\n */\n @ApiOperation(\n value=\"Delete author by ID.\",\n notes=\"Delete author from database. Requires authentication token in header with key AUTHORIZATION. \"\n + \"Example: AUTHORIZATION: Bearer qwerty-1234-asdf-9876. \"\n + \"User must be in the 'admin' group.\"\n )\n @DELETE\n @Path(\"/{author_id}\")\n @TokenRequired\n @Timed(absolute = true, name = \"delete\") \n public Response deleteAuthor(\n @ApiParam(value = \"ID of author to retrieve.\", required = true)\n @PathParam(\"author_id\") IntParam authorId,\n @Context SecurityContext context,\n @ApiParam(value=\"Bearer authorization\", required=true)\n @HeaderParam(value=\"Authorization\") String authDummy\n ) {\n try {\n // Start\n verifyAdminUser(context);\n\n this.authorHelper.deleteAuthor(authorId.get());\n }\n catch (org.hibernate.HibernateException he) {\n System.out.println(he);\n throw new NotFoundException(\"No author by id '\" + authorId + \"'\");\n }\n return Response.ok().build();\n }\n\n\n\n\n /************************************************************************/\n /** Helper methods **/\n /************************************************************************/\n \n\n \n /**\n * Convert an Author from the DB into a AuthorResult for return to the caller\n * \n * @param dbAuthor\n * Author in DB\n * @return Author bean\n */\n private AuthorResult convertToBean(Author dbAuthor) {\n AuthorResult result = new AuthorResult();\n\n try {\n BeanUtils.copyProperties(result, dbAuthor);\n\n // dbAuthor's 'subjects' is a csv. Convert to a list\n List<String> subjects = Arrays.asList(dbAuthor.getSubjectsAsCsv().split(\"\\\\s*,\\\\s*\"));\n BeanUtils.copyProperty(result, \"subjects\", subjects);\n } catch (IllegalAccessException | InvocationTargetException e) {\n e.printStackTrace();\n }\n\n return result;\n }\n \n\n /**\n * Verifies the incoming user is 'admin'.\n * Throws exception if user is not admin.\n */\n static void verifyAdminUser(SecurityContext context) throws WebApplicationException {\n if (! context.isUserInRole(\"admin\")) {\n throw new WebApplicationException(\"Must be logged in as a member of the 'admin' user group.\", Response.Status.UNAUTHORIZED);\n }\n }\n\n\n}\n"
},
{
"alpha_fraction": 0.6126760840415955,
"alphanum_fraction": 0.6281690001487732,
"avg_line_length": 15.928571701049805,
"blob_id": "917125b2463fd4472bdf6f013d4aea5005f57cde",
"content_id": "3cc29cc22613bf17732853c4483db2a9383f6b83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Gradle",
"length_bytes": 710,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 42,
"path": "/images.java/mybooks_common/build.gradle",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "apply plugin: 'java'\napply plugin: 'eclipse'\n\nsourceCompatibility = 1.7\nversion = '1.0'\njar {\n manifest {\n attributes 'Implementation-Title': 'mybooks.common',\n 'Implementation-Version': version,\n 'groupId': 10\n }\n}\n\n\nclean.doFirst {\n delete \"${rootDir}/repos\"\n}\n\n\nuploadArchives {\n repositories {\n flatDir {\n dirs 'repos'\n }\n }\n}\n\n\nrepositories {\n mavenCentral()\n}\n\ndependencies {\n // https://mvnrepository.com/artifact/redis.clients/jedis\n compile group: 'redis.clients', name: 'jedis', version: '1.5.0'\n\n// https://mvnrepository.com/artifact/javax.ws.rs/javax.ws.rs-api\ncompile group: 'javax.ws.rs', name: 'javax.ws.rs-api', version: '2.0'\n\n\n\n}"
},
{
"alpha_fraction": 0.6483566164970398,
"alphanum_fraction": 0.6497073173522949,
"avg_line_length": 19.190908432006836,
"blob_id": "6ed7e1829a491d41581eb3fe0ca0635123e22021",
"content_id": "10496daf0dc6ef706b4ca7f964b9ed969c553c82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 2221,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 110,
"path": "/images/review/src/github.com/hipposareevil/review/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Book service\n\nimport (\n\t_ \"encoding/json\"\n\t\"fmt\"\n\t_ \"io/ioutil\"\n\t_ \"net/http\"\n\t_ \"strconv\"\n)\n\n// Service interface exposed to clients\ntype ReviewService interface {\n\tGetReviews(string, int, int, int) (Reviews, error)\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\n// - cache layer\ntype reviewService struct {\n}\n\n//////////\n// METHODS on userbookService\n\n////////////////\n// Get Review\n//\n// returns:\n// reviews\n// error\nfunc (theService reviewService) GetReviews(bearer string, offset int, limit int, bookId int) (Reviews, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- GetReviews --\")\n\n\tvar datum []Review\n\n\t// Query /user for list of users\n\tvar users Users\n\tusers, err := getUsers(bearer)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get users for reviews: \", err)\n\t\treturn Reviews{}, ErrServerError\n\t}\n\n\t// For each user, query /user_book for reviews for the book\n\tfor _, currentUser := range users.Data {\n\t\tfmt.Println(\"Found user: \", currentUser.Id)\n\t\tfmt.Println(\"Found user: \", currentUser.Name)\n\n\t\tuserBook, err := getUserBook(bearer, currentUser.Id, bookId)\n\n\t\tif err == nil {\n\t\t\t// make a Review object for this user book\n\t\t\tnewReview := Review{\n\t\t\t\tBookId: bookId,\n\t\t\t\tRating: userBook.Rating,\n\t\t\t\tTags: userBook.Tags,\n\t\t\t\tUserName: currentUser.Name,\n\t\t\t\tUserId: currentUser.Id,\n\t\t\t\tReview: userBook.Review,\n\t\t\t}\n\t\t\tdatum = append(datum, newReview)\n\t\t} else {\n\t\t\tfmt.Println(\"Got error trying to get userbook for user: \", currentUser.Id, \" :\", err)\n\t\t}\n\t}\n\n\t///////////////\n\t// Update return data\n\n\t// Get the total number of rows\n\trealNumberRows := len(datum)\n\trealLimit := limit\n\n\t// fix offset\n\tif (offset > realNumberRows) || (offset < 0) {\n\t\toffset = 0\n\t}\n\n\t// fix limit\n\tif realLimit < 0 {\n\t\trealLimit = len(datum)\n\t}\n\n\tif realLimit > realNumberRows {\n\t\trealLimit = realNumberRows\n\t}\n\n\t// determine slice of datum to use\n\twhereToEnd := offset + realLimit\n\tif whereToEnd > realNumberRows {\n\t\twhereToEnd = realNumberRows\n\t}\n\n\tdatum = datum[offset:whereToEnd]\n\n\t// Create Reviews to return\n\treturnValue := Reviews{\n\t\tOffset: offset,\n\t\tLimit: realLimit,\n\t\tTotal: realNumberRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n"
},
{
"alpha_fraction": 0.6492146849632263,
"alphanum_fraction": 0.6494526267051697,
"avg_line_length": 29.671533584594727,
"blob_id": "7945a3fc8eac375acf1554a98f02767413ed0694",
"content_id": "8508983471dd41c0a2a77e2922a6c26fc1d41b7f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 4202,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 137,
"path": "/images.java/mybooks_common/src/main/java/com/wpff/common/auth/TokenFilter.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.common.auth;\n\n\nimport java.io.IOException;\nimport java.security.Principal;\n\nimport javax.ws.rs.WebApplicationException;\nimport javax.ws.rs.container.ContainerRequestContext;\nimport javax.ws.rs.container.ContainerRequestFilter;\nimport javax.ws.rs.core.HttpHeaders;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.SecurityContext;\n\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.JedisPool;\n\n/**\n * This is a container request filter that checks for an Authorization header\n * that should container a token that was generated by the AuthResource resource\n * (/authorize/token). This filter is only applied to resource methods that have been\n * annotated with TokenRequired (com.wpff.filter.TokenRequired).\n *\n * If a token that matches key/value in our database, we update the security\n * context with a UserPrincipal that has the name of the User.\n *\n */\npublic class TokenFilter implements ContainerRequestFilter {\n\n // Static Bearer text\n private static String BEARER = \"Bearer\";\n\n /**\n * JedisPool used in the filter method to see if the token matches a user.\n */\n private JedisPool jedisPool;\n\n /**\n * Create new request filter. Currently takes a jedis pool.\n * \n * @param jedisPool\n * Jedis pool\n */\n public TokenFilter(JedisPool jedisPool) {\n this.jedisPool = jedisPool;\n }\n\n /**\n * Filter an incoming request. Looks for the authorization header (starting with\n * 'Bearer') and if it matches a key/value in our DB, we update the context so\n * the resource method being called has that information\n * \n * This checks the Redis DB to get the users name and group.\n *\n * @param requestContext\n * Context that contains headers and will potentially be modified to\n * have a new UserPrincipal.\n * @throws IOException\n * If an error occurs of the caller is unauthorized.\n */\n @Override\n public void filter(ContainerRequestContext requestContext) throws IOException {\n\n String authHeader = requestContext.getHeaderString(HttpHeaders.AUTHORIZATION);\n\n if ((authHeader == null) || (!authHeader.startsWith(BEARER))) {\n throw new WebApplicationException(\n \"Must supply valid Authorization header. Authenticate at /auth/token\",\n Response.Status.UNAUTHORIZED);\n }\n\n // Grab token text from Header\n String token = authHeader.substring(BEARER.length() + 1);\n token = token.trim();\n\n // Get username and group from Jedis.\n String redisHashName = \"user:\" + token;\n \n // Get jedis from pool\n Jedis jedis = null;\n try {\n jedis = this.jedisPool.getResource();\n final String username = jedis.hget(redisHashName, \"name\");\n final String group = jedis.hget(redisHashName, \"group\");\n\n if ((username == null) || (username.isEmpty())) {\n throw new WebApplicationException(\n \"Must supply valid Authorization header. Authenticate at /auth/token\",\n Response.Status.UNAUTHORIZED);\n }\n\n // Override the security context by giving it a new UserPrincipal\n // that will container the username we got from our DB\n requestContext.setSecurityContext(new SecurityContext() {\n @Override\n public Principal getUserPrincipal() {\n return new Principal() {\n @Override\n public String getName() {\n return username;\n }\n };\n }\n\n /**\n * Check if the user is in the role or group.\n * This is called to verify if the user is in the 'admin' group.\n * \n * @param role Role to check for.\n * @return true if the role is equal to the user's group\n */\n @Override\n public boolean isUserInRole(String role) {\n if (role.equals(group)) {\n return true;\n } else {\n return false;\n }\n }\n\n @Override\n public boolean isSecure() {\n return false;\n }\n\n @Override\n public String getAuthenticationScheme() {\n return null;\n }\n });\n } finally {\n if (jedis != null) {\n this.jedisPool.returnResource(jedis);\n }\n }\n }\n\n}\n"
},
{
"alpha_fraction": 0.6870229244232178,
"alphanum_fraction": 0.6870229244232178,
"avg_line_length": 19.377777099609375,
"blob_id": "968a8743049d6e47ddc6e7e49b4471abef6c8220",
"content_id": "49b2662efdc56df7a02f1901b1da7f594b2fd14d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1834,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 90,
"path": "/images/book/src/github.com/hipposareevil/book/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// Books (all)\n// response for books (vs. single book)\ntype booksResponse struct {\n\tData Books `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse booksResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse booksResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// BOOK (single)\n// response for book (single)\ntype bookResponse struct {\n\tData Book `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse bookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse bookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// DELETE BOOK (single)\n// response for book (single)\ntype deleteBookResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse deleteBookResponse) error() error {\n\treturn theResponse.Err\n}\n\n////////////////////\n/// Create BOOK\n// response for create book\ntype createBookResponse struct {\n\tData Book `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse createBookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse createBookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// Update BOOK\n// response for update book\ntype updateBookResponse struct {\n\tErr error `json:\"err,omitempty\"`\n\tData Book `json:\"all,omitempty\"`\n}\n\nfunc (theResponse updateBookResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse updateBookResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n"
},
{
"alpha_fraction": 0.7530648112297058,
"alphanum_fraction": 0.7577349543571472,
"avg_line_length": 42.92307662963867,
"blob_id": "47504e8265e91339a846522612fbb60db02a10f2",
"content_id": "2a6903e467f5218e7cc3b053a8d5ac827d2d7e8e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1713,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 39,
"path": "/images/author/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Microservice\n\n## Introduction\n\n**/author** is a microservice for querying, listing, adding and deleting Author entries from the database. Authors are visable to all users but are only mutable by the *admin* user. \n\n## Encryption\n\nPassword are encrypted with [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt).\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for an Author\nAn Author entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the author. This is used to manage the author and for reference in a Book.\nname | Full name of the author. Example: \"Isaac Asimov\".\nimageUrl | URL of image for the author. \n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n* [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt) - encryption library\n"
},
{
"alpha_fraction": 0.6708694100379944,
"alphanum_fraction": 0.6714175939559937,
"avg_line_length": 23.920764923095703,
"blob_id": "0529d89122dea2d2c5ddf67e6049d434c864d257",
"content_id": "9e1375f820264c6bac79402103535558575cd53b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 9121,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 366,
"path": "/images/author/src/github.com/hipposareevil/author/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Author service\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\n\t\"errors\"\n\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n)\n\nconst AUTHOR_CACHE = \"author.name\"\n\n// Service interface exposed to clients\ntype AuthorService interface {\n\t// GetAuthors: offset, limit\n\tGetAuthors(int, int, string) (Authors, error)\n\n\t// GetAuthor: id\n\tGetAuthor(int) (Author, error)\n\n\t// DeleteAuthor: id\n\tDeleteAuthor(int) error\n\n\t// CreateAuthor\n\t// name, authorgroup, data, password\n\tCreateAuthor(string, string, string, string, string, string, string, []string) (Author, error)\n\n\t// UpdateAuthor\n\t// id, name, authorgroup, data, password\n\tUpdateAuthor(int, string, string, string, string, string, string, string, []string) error\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\ntype authorService struct {\n\tmysqlDb *sql.DB\n\tcache CacheLayer\n}\n\n//////////\n// METHODS on authorService\n\n////////////////\n// Get Author\n//\n// params:\n// bearer: Authorization bearer\n// authorId : ID of author to get\n//\n// returns:\n// author\n// error\nfunc (theService authorService) GetAuthor(authorId int) (Author, error) {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Author{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tvar author Author\n\tvar subjectAsCsv string\n\n\t// Scan the DB info into 'author' composite variable\n\terr := theService.mysqlDb.\n\t\tQueryRow(\"SELECT author_id, name, birth_date, \"+\n\t\t\t\"image_small, image_medium, image_large, \"+\n\t\t\t\"ol_key, goodreads_url, subjects \"+\n\t\t\t\"FROM author WHERE author_id = ?\", authorId).\n\t\tScan(&author.Id, &author.Name, &author.BirthDate,\n\t\t\t&author.ImageSmall, &author.ImageMedium, &author.ImageLarge,\n\t\t\t&author.OlKey, &author.GoodReadsUrl, &subjectAsCsv)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn Author{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"Got error from select: \", err)\n\t\treturn Author{}, ErrServerError\n\tdefault:\n\t\tfmt.Println(\"got author\")\n\t}\n\n\t// Convert subjects from CSV to string array\n\tauthor.Subjects = splitCsvStringToArray(subjectAsCsv)\n\n\t// Cache author name by id\n\tgo theService.cache.Set(AUTHOR_CACHE, authorId, author.Name)\n\n\treturn author, nil\n}\n\n////////////////\n// Get authors\n//\n// params:\n// offset : offset into list\n// limit : number of items to get from list\n// name : partial name of author\n//\n// returns:\n// authors\n// error\nfunc (theService authorService) GetAuthors(offset int, limit int, name string) (Authors, error) {\n\tfmt.Println(\"\")\n\tfmt.Println(\"-- GetAuthors --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Authors{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Get total number of rows\n\tvar totalNumberOfRows int\n\tcountQuery := \"SELECT COUNT(*) FROM author \"\n\t_ = theService.mysqlDb.QueryRow(countQuery).\n\t\tScan(&totalNumberOfRows)\n\n\tif limit > totalNumberOfRows {\n\t\tlimit = totalNumberOfRows\n\t}\n\n\tfmt.Println(\"Looking for author with limit \", limit)\n\tfmt.Println(\"Looking for author with offset \", offset)\n\n\t// Create SELECT string\n\tselectString := \"SELECT author_id, name, birth_date, \" +\n\t\t\"image_small, image_medium, image_large, \" +\n\t\t\"ol_key, goodreads_url, subjects \" +\n\t\t\"FROM author \"\n\n\t\t// Make query\n\tif len(name) > 0 {\n\t\t// Update query to add 'name'\n\t\tappendString := \" WHERE name LIKE '%\" + name + \"%' \"\n\t\tfmt.Println(\"Looking for author with name like '\" + name + \"'\")\n\n\t\tselectString += appendString\n\t\tcountQuery += appendString\n\t}\n\n\t// Redo the total number of rows\n\t_ = theService.mysqlDb.QueryRow(countQuery).Scan(&totalNumberOfRows)\n\n\t// Make query\n\tresults, err := theService.mysqlDb.Query(\n\t\tselectString+\"LIMIT ?,?\",\n\t\toffset, limit)\n\n\tif err != nil {\n\t\tfmt.Println(\"Got error from mysql when querying for all authors: \" + err.Error())\n\t\treturn Authors{}, errors.New(\"unable to create query in mysql\")\n\t}\n\n\t// slice of Author entities\n\tdatum := make([]Author, 0, 0)\n\n\t// Make hashmap of author ID to author name for the cache\n\tkvMap := make(map[int]string)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar subjectAsCsv string\n\t\tvar author Author\n\n\t\t// For each row, scan the result into our author composite object:\n\t\terr = results.Scan(&author.Id, &author.Name, &author.BirthDate,\n\t\t\t&author.ImageSmall, &author.ImageMedium, &author.ImageLarge,\n\t\t\t&author.OlKey, &author.GoodReadsUrl, &subjectAsCsv)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql: \" + err.Error())\n\t\t\treturn Authors{}, errors.New(\"Unable to scan the query mysql\")\n\t\t}\n\n\t\t// Convert subjects from CSV to string array\n\t\tauthor.Subjects = splitCsvStringToArray(subjectAsCsv)\n\n\t\t// Save the authors name indexed by id\n\t\tkvMap[author.Id] = author.Name\n\n\t\tdatum = append(datum, author)\n\t}\n\n\t// Cache author name by id for all authors found\n\tgo theService.cache.SetMultiple(AUTHOR_CACHE, kvMap)\n\n\t// reset the limit (number of things being returned)\n\tlimit = len(datum)\n\n\t// Create Authors to return\n\treturnValue := Authors{\n\t\tOffset: offset,\n\t\tLimit: limit,\n\t\tTotal: totalNumberOfRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n\n////////////////\n// Delete author\n//\n// params:\n// authorId : ID of author to delete\n//\n// returns:\n// error\nfunc (theService authorService) DeleteAuthor(authorId int) error {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Verify the author exists, if not, throw ErrNotFound\n\t_, getErr := theService.GetAuthor(authorId)\n\tif getErr != nil {\n\t\treturn getErr\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM author WHERE author_id = ?\", authorId)\n\n\t// Clear cache for this author\n\ttheService.cache.Clear(AUTHOR_CACHE, authorId)\n\n\treturn err\n}\n\n////////////////\n// CreateAuthor\n//\n// returns:\n// author\n// error\nfunc (theService authorService) CreateAuthor(authorName string,\n\tbirthDate string,\n\tolKey string,\n\tgoodreadsUrl string,\n\timageSmall string,\n\timageMedium string,\n\timageLarge string,\n\tsubjects []string) (Author, error) {\n\n\t////////////////////\n\t// verify mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Author{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Convert []string to string as csv for database\n\tsubjectsAsCsv := strings.Join(subjects[:], \",\")\n\n\t// Make insert\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"INSERT INTO author SET \" +\n\t\t\t\"name=?, birth_date=?, subjects=?, image_small=?, image_medium=?, image_large=?, ol_key=?, goodreads_url=?\")\n\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn Author{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\tres, err := stmt.Exec(authorName, birthDate, subjectsAsCsv, imageSmall, imageMedium, imageLarge, olKey, goodreadsUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Error inserting into DB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn Author{}, ErrAlreadyExists\n\t\t} else {\n\t\t\treturn Author{}, errors.New(\"Unable to run INSERT against DB: \")\n\t\t}\n\t}\n\n\t// get the id\n\tid, _ := res.LastInsertId()\n\n\tauthor, err := theService.GetAuthor(int(id))\n\n\treturn author, err\n}\n\n////////////////\n// UpdateAuthor\n//\n// returns:\n// error\nfunc (theService authorService) UpdateAuthor(authorId int,\n\tauthorName string,\n\tbirthDate string,\n\tolKey string,\n\tgoodreadsUrl string,\n\timageSmall string,\n\timageMedium string,\n\timageLarge string,\n\tsubjects []string) error {\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\tfmt.Println(\"Updating author by iD: \", authorId)\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"UPDATE author SET \" +\n\t\t\t\"name=COALESCE(NULLIF(?,''),name), \" +\n\t\t\t\"birth_date=COALESCE(NULLIF(?,''),birth_date), \" +\n\t\t\t\"subjects=COALESCE(NULLIF(?,''),subjects), \" +\n\t\t\t\"image_small=COALESCE(NULLIF(?,''),image_small), \" +\n\t\t\t\"image_medium=COALESCE(NULLIF(?,''),image_medium), \" +\n\t\t\t\"image_large=COALESCE(NULLIF(?,''),image_large), \" +\n\t\t\t\"ol_key=COALESCE(NULLIF(?,''),ol_key), \" +\n\t\t\t\"goodreads_url=COALESCE(NULLIF(?,''),goodreads_url) \" +\n\t\t\t\"WHERE author_id = ?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\t// Convert []string to string as csv for database\n\tsubjectsAsCsv := strings.Join(subjects[:], \",\")\n\n\t_, err = stmt.Exec(authorName, birthDate, subjectsAsCsv, imageSmall, imageMedium, imageLarge, olKey, goodreadsUrl, authorId)\n\tif err != nil {\n\t\tfmt.Println(\"Error updatingDB: \", err)\n\t\treturn errors.New(\"Unable to run update against DB: \")\n\t}\n\n\t// Clear cache for this author\n\ttheService.cache.Clear(AUTHOR_CACHE, authorId)\n\n\treturn nil\n}\n\n////////////\n// Split a CSV string into array\nfunc splitCsvStringToArray(subjectCsv string) []string {\n\tif len(subjectCsv) > 0 {\n\t\treturn strings.Split(subjectCsv, \",\")\n\t} else {\n\t\treturn make([]string, 0)\n\t}\n}\n"
},
{
"alpha_fraction": 0.7518759369850159,
"alphanum_fraction": 0.7558779120445251,
"avg_line_length": 44.431819915771484,
"blob_id": "f807806e5154049d06d7468b20aa6d895977e5fc",
"content_id": "62aef1525dd27abb27b039bfd62a29aba4080d6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1999,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 44,
"path": "/images/user/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# User Microservice\n\n## Introduction\n\n**/user** is a microservice for creating, listing, updating and deleting Users from the database. The *admin* user is available to manage subsequent users. (It's recommended to change that *admin* users password.)\n\nExpected usage:\n* *admin* user creates user Bob with password \"s3cret\".\n* Actual user Bob makes REST call to */authorize* with {\"name\":\"bob\",\"password\",\"s3cret\"} and recieves authorization token.\n* That token is inserted into the HTTP Headers for calls to the other endpoints.\n\n## Encryption\n\nPassword are encrypted with [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt).\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for a User\nA User entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the user. This is used to manage the user, obtain authorization, and link books to the user.\nname | Name of the user. Example: \"Bob D.\"\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n* [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt) - encryption library\n"
},
{
"alpha_fraction": 0.562988817691803,
"alphanum_fraction": 0.5705414414405823,
"avg_line_length": 26.63450813293457,
"blob_id": "0915677f3d76527c7891bd568406b5916c707054",
"content_id": "db6125fc0b9f85b786c831ddb16b05053a99d197",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 29791,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 1078,
"path": "/test/user_book.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "\n######################\n#\n# user book related calls\n#\n#\n######################\n\n##########\n# Create user book in db\n# \n##########\n_create_user_book() {\n post_data=\"$1\"\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/user_book/${USER_ID}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for user_book\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n if [[ \"$code\" == \"409\" ]]; then\n error \"Book already exists!\"\n else\n error \"Error making POST for user_book. code: $code, result: $result\"\n fi\n fi\n\n book_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n# logit \"user_book created\"\n echo \"$book_result\"\n}\n\n\n#########\n# Create generic user_book\n#\n# params:\n# 1- real book id\n#########\ncreate_generic_user_book() {\n book_id=\"$1\"\n\n # fake tag will not be added\nread -r -d '' book_data <<EOF\n{\n \"bookId\": $book_id,\n \"review\" : \"super review for generic book\",\n \"tags\": [\n \"testit\"\n ]\n}\nEOF\n\n # create book\n _create_user_book \"${book_data}\"\n}\n\n\n\n#########\n# Create user_book for asimov\n#\n# params:\n# 1- real book id\n#########\ncreate_user_book_asimov() {\n book_id=\"$1\"\n\nread -r -d '' book_data <<EOF\n{\n \"bookId\": $book_id,\n \"rating\": true,\n \"review\" : \"review for asimov book\",\n \"tags\": [\n \"ebook\", \"sci-fi\", \"best\"\n ]\n}\nEOF\n\n # create book\n _create_user_book \"${book_data}\"\n}\n\n#########\n# Create user_book for second book\n#\n# params:\n# 1- real book id\n#########\ncreate_user_book_second() {\n book_id=\"$1\"\n\n # fake tag will not be added\nread -r -d '' book_data <<EOF\n{\n \"bookId\": $book_id,\n \"rating\": true,\n \"review\" : \"review for second book\",\n \"tags\": [\n \"ebook\", \"fake\", \"super\"\n ]\n}\nEOF\n\n # create book\n _create_user_book \"${book_data}\"\n}\n\n\n\n\n##########\n# update user book\n#\n#########\n_update_user_book() {\n user_book_id=\"$1\"\n post_data=\"$2\"\n\nlogit \"UserBook id: $user_book_id\"\n\n ##########\n # update in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/user_book/${USER_ID}/${user_book_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n error \"Error making PUT for user book. code: $code: $result\"\n fi\n\n book_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n logit \"user book updated\"\n echo \"$book_result\"\n}\n\n##########\n# Update user book\n#\n# param: user book id\n##########\nupdate_user_book() {\n user_book_id=$1\n\nread -r -d '' book_data <<EOF\n{\n\"tags\": [\n \"superbook\", \"fantasy\", \"best\"\n]\n}\nEOF\n\n _update_user_book $user_book_id \"$book_data\"\n}\n\n##########\n# Update user book w/ a new review\n#\n# param: user book id\n##########\nupdate_user_book_new_review() {\n user_book_id=$1\n\nread -r -d '' book_data <<EOF\n{\n\"review\": \"new review\"\n}\nEOF\n\n _update_user_book $user_book_id \"$book_data\"\n}\n\n\n########\n# delete all user books\n#\n# param: json with all user books\n########\ndelete_all_user_books() {\n # get 1000 books\n user_books=$(get_all_user_books_with_offset_limit 0 1000 )\n ids=$(echo \"${user_books}\" | jq -r \".data[].userBookId\" )\n num=$(echo \"${user_books}\" | jq -r \".data | length\" )\n\n echo \"Delete all ($num) user_books.\"\n\n for id in $ids\n do\n $(delete_user_book $id)\n done\n}\n\n\n##########\n# delete userbook\n#\n# params:\n# userbook id\n##########\ndelete_user_book() {\n book_id=\"$1\"\n\n# logit \"delete userbook $book_id\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE ${ROOT_URL}/user_book/${USER_ID}/${book_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE to /userbook for userbook $book_id\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n echo \"Error deleting userbook '$book_id'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n}\n\n\n##########\n# Get all user_books for this user\n#\n############\nget_all_user_books() {\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/user_book/${USER_ID} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /userbook for all user books\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all user books: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# Get all user_books for this user with offset and limit\n#\n############\nget_all_user_books_with_offset_limit() {\n offset=$1\n limit=$2\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user_book/${USER_ID}?limit=${limit}&offset=${offset}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /userbook for all user books\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting all user books with limit/offset: $code: $error. http_code: $result.\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# get user_book by id\n#\n# params:\n# 1- user_book id\n##########\nget_user_book_by_id() {\n id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user_book/${USER_ID}/${id} \"\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user_book for userbook '$id'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user_book '$id'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# get user_book by title\n#\n# params:\n# 1- title\n#\n# optionally takes offset/limit\n##########\nget_user_books_by_title() {\n title=\"$1\"\n title=$(url_encode \"$title\")\n\n # create url\n url=\"${ROOT_URL}/user_book/${USER_ID}?title=$title\"\n\n # if offset/limit were specified\n if [ $# -eq 3 ]; then\n offset=\"$2\"\n limit=\"$3\"\n url=\"${url}&offset=${offset}&limit=${limit}\"\n fi\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${url}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user_book for title '$title'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user_book by title '$title'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# get user_book by title and tag\n#\n# params:\n# 1- title\n# 2- tag\n# \n# optionally takes offset/limit\n##########\nget_user_books_by_title_and_tag() {\n title=\"$1\"\n tag=\"$2\"\n\n title=$(url_encode \"$title\")\n tag=$(url_encode \"$tag\")\n\n url=\"${ROOT_URL}/user_book/${USER_ID}?title=${title}&tag=${tag}\"\n # if offset/limit were specified\n if [ $# -eq 4 ]; then\n offset=\"$3\"\n limit=\"$4\"\n url=\"${url}&offset=${offset}&limit=${limit}\"\n fi\n\nlogit \"URL URL: $url\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"$url\"\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user_book for title '$title' and tag '$tag'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user_book by title '$title' and tag '$tag'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# get user_book by tag\n#\n# params:\n# 1- tag\n##########\nget_user_books_by_tag() {\n tag=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user_book/${USER_ID}?tag=${tag} \"\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user_book for tag '$tag'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user_book for tag '$tag'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n##########\n# get user_book by book_id\n#\n# params:\n# 1- book_id\n##########\nget_user_book_by_bookid() {\n bookid=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user_book/${USER_ID}?book_id=${bookid} \"\\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /user_book for bookid '$bookid'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting user_book for bookid '$bookid'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n###############\n#\n# Clean user books\n#\n###############\nuser_book::clean() {\n echo \"\"\n delete_all_user_books\n\n book::clean\n tag::clean\n user::clean\n}\n\n\n######\n# print info for userbook\n# \n######\nprint_userbook_info() {\n book_info=\"$1\"\n title=$(echo \"$book_info\" | jq -r .title)\n id=$(echo \"$book_info\" | jq -r .userBookId)\n date=$(echo \"$book_info\" | jq -r .dateAdded)\n author_name=$(echo \"$book_info\" | jq -r .authorName)\n image_medium=$(echo \"$book_info\" | jq -r .imageMedium)\n\n echo \"UserBook: '$title', UserBookID: '$id', author: '$author_name', added: '$date', medium: '$image_medium'\"\n}\n\n\n#########\n# Validate the first/asimov userbook\n#\n# params:\n# 1- asimov book json\n# 2- userbook json\n########\nvalidate_userbook_asimov() {\n asimov_book=\"$1\"\n user_book=\"$2\"\n\n echo \"\"\n echo \"[Validating asimov user book]\"\n# jqit \"$user_book\"\n print_userbook_info \"$user_book\"\n\n # get book info to validate against what is in user_book json\n asimov_book_id=$(echo \"$asimov_book\" | jq -r .id)\n asimov_book_title=$(echo \"$asimov_book\" | jq -r .title)\n asimov_book_author_name=$(echo \"$asimov_book\" | jq -r .authorName)\n asimov_book_author_id=$(echo \"$asimov_book\" | jq -r .authorId)\n asimov_book_year=$(echo \"$asimov_book\" | jq -r .firstPublishedYear)\n asimov_book_image_medium=$(echo \"$asimov_book\" | jq -r .imageMedium)\n asimov_book_image_small=$(echo \"$asimov_book\" | jq -r .imageSmall)\n\n # book id\n book_id=$(echo \"$user_book\" | jq -r .bookId)\n assert_equals $book_id $asimov_book_id \"UserBook's mapped book ID\"\n\n # title\n title=$(echo \"$user_book\" | jq -r .title)\n assert_string_equals \"$asimov_book_title\" \"$title\" \"UserBook's mapped book title\"\n\n # author name\n authorname=$(echo \"$user_book\" | jq -r .authorName)\n assert_string_equals \"$asimov_book_author_name\" \"$authorname\" \"UserBook's mapped book author name\"\n\n # author id\n authorid=$(echo \"$user_book\" | jq -r .authorId)\n assert_equals $authorid $asimov_book_author_id \"UserBooks' mapped book author id\"\n\n # rating\n rating=$(echo \"$user_book\" | jq -r .rating)\n assert_string_equals \"true\" \"$rating\" \"UserBook's mapped book rating\"\n\n # review\n review=$(echo \"$user_book\" | jq -r .review)\n assert_string_equals \"review for asimov book\" \"$review\" \"UserBook's mapped review\"\n\n # book year\n year=$(echo \"$user_book\" | jq -r .firstPublishedYear)\n assert_equals $year $asimov_book_year \"UserBook's mapped book year\"\n\n # book images\n image=$(echo \"$user_book\" | jq -r .imageMedium)\n assert_string_equals \"$asimov_book_image_medium\" \"$image\" \"UserBook's mapped book medium image\"\n\n image=$(echo \"$user_book\" | jq -r .imageSmall)\n assert_string_equals \"$asimov_book_image_small\" \"$image\" \"UserBook's mapped book small image\"\n\n # tags\n tags=$(echo \"$user_book\" | jq -r '.tags | join(\", \")')\n assert_contains \"$tags\" \"ebook\" \"Userbook tags\"\n assert_contains \"$tags\" \"best\" \"Userbook tags\"\n assert_contains \"$tags\" \"sci-fi\" \"Userbook tags\"\n\n echo \"[Done validating asimov UserBook]\"\n echo \"\"\n\n}\n\n###############\n#\n# Test the limits and offsets for large datasets\n#\n###############\nuser_book::test_limit_offset() {\n echo \"\"\n echo \"[[ User Book Limit/Offset test ]]\"\n\n # create tag for userbook\n result=$(tag::create_tag \"testit\")\n\n # create author for the generic books\n author=$(create_author_lengle)\n author_id=$(echo \"$author\" | jq -r .id)\n\n # num user books to create\n COUNT=40\n\n echo \"Creating $COUNT books\"\n\n idx=1\n while [ $idx -le $COUNT ]\n do\n idx=$(( $idx + 1 ))\n\n # create generic book\n book_title=\"book__${idx}\"\n generic_book=$(book::create_book $book_title $author_id)\n book_id=$(echo $generic_book | jq -r .id)\n\n # create user_book now\n result=$(create_generic_user_book $book_id)\n done\n\n #####\n # by title\n echo \"\"\n echo \"Testing query by title\"\n\n all_user_books=$(get_user_books_by_title \"book__1\" 0 5)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in userbooks returned\"\n assert_equals 10 $total \"total number userbooks\"\n assert_equals 5 $limit \"limit number userbooks\"\n\n #####\n # by title and tag\n echo \"\"\n echo \"Testing query by title and tag\"\n\n all_user_books=$(get_user_books_by_title_and_tag \"book__1\" \"testit\" 0 5)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in userbooks returned\"\n assert_equals 5 $limit \"limit number userbooks\"\n assert_equals 10 $total \"total number userbooks\"\n\n #####\n # by title and tag #2\n echo \"\"\n echo \"Testing query by title and missing tag\"\n\n all_user_books=$(get_user_books_by_title_and_tag \"book__1\" \"testitx\" 0 5)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in userbooks returned\"\n assert_equals 5 $limit \"limit number userbooks\"\n assert_equals 10 $total \"total number userbooks\"\n\n\n\n #######\n # Default returns\n # get books and see how many\n echo \"\"\n echo \"Testing default limit (20)\"\n\n all_user_books=$(get_all_user_books)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in userbooks returned\"\n assert_equals $EXPECTED_DEFAULT_LIMIT $limit \"limit number userbooks\"\n assert_equals $COUNT $total \"total number userbooks\"\n\n \n #######\n # new limit\n echo \"\"\n echo \"Testing new limit (500)\"\n all_user_books=$(get_all_user_books_with_offset_limit 0 500)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in user_books returned\"\n assert_equals $COUNT $limit \"limit number user_books\"\n assert_equals $COUNT $total \"total number user_books\"\n\n\n #######\n # new offset\n echo \"\"\n echo \"Testing new offset (10)\"\n all_user_books=$(get_all_user_books_with_offset_limit 10 10)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 10 ${offset} \"offset in user_books returned\"\n assert_equals 10 $limit \"limit number user_books\"\n assert_equals $COUNT $total \"total number user_books\"\n\n #######\n # new offset\n echo \"\"\n echo \"Testing 2nd new offset (13)\"\n all_user_books=$(get_all_user_books_with_offset_limit 13 2)\n total=$(echo \"$all_user_books\" | jq -r .total)\n offset=$(echo \"$all_user_books\" | jq -r .offset)\n limit=$(echo \"$all_user_books\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 13 ${offset} \"offset in user_books returned\"\n assert_equals 2 $limit \"limit number user_books\"\n assert_equals $COUNT $total \"total number user_books\"\n\n user_book::clean\n\n echo \"[[ DONE User Book Limit/Offset test ]]\"\n}\n\n\n\n###############\n#\n# Main test for user_book\n#\n###############\nuser_book::main_test() {\n # Create tags\n echo \"\"\n echo \"Create tags\"\n result=$(tag::create_tag \"sci-fi\")\n result=$(tag::create_tag \"e-book\")\n result=$(tag::create_tag \"ebook\")\n result=$(tag::create_tag \"best\")\n result=$(tag::create_tag \"super\")\n result=$(tag::create_tag \"fantasy\")\n\n echo \"\"\n echo \"Create first book\"\n asimov_book=$(book::create_book_asimov)\n asimov_book_id=$(echo \"$asimov_book\" | jq -r .id)\n print_book_info \"$asimov_book\"\n\n echo \"\"\n echo \"Create user book for first book\"\n asimov_user_book=$(create_user_book_asimov $asimov_book_id)\n asimov_user_book_id=$(echo \"$asimov_user_book\" | jq -r .userBookId)\n print_userbook_info \"$asimov_user_book\"\n\n ##### \n # validate some things on first user book\n validate_userbook_asimov \"$asimov_book\" \"$asimov_user_book\"\n\n #######\n # validate user book after getting by id\n echo \"\"\n echo \"Test getting single user book # $asimov_user_book_id\"\n user_book=$(get_user_book_by_id \"$asimov_user_book_id\")\n validate_userbook_asimov \"$asimov_book\" \"$user_book\"\n\n\n ###########\n # create 2nd book\n echo \"Create 2nd book\"\n second_book=$(book::create_book_second)\n print_book_info \"$second_book\"\n\n echo \"\"\n echo \"Create user book for second book\"\n second_book_id=$(echo \"$second_book\" | jq -r .id)\n user_book=$(create_user_book_second $second_book_id)\n second_user_book_id=$(echo \"$user_book\" | jq -r .userBookId)\n\n # verify 'second' tag didn't make it in\n # There should only be 2 valid tag in the second book\n num_second_user_book_tags=$(echo \"$user_book\" | jq -r '.tags | length')\n assert_equals $num_second_user_book_tags 2 \"Number of tags for 2nd book.\"\n\n echo \"\"\n echo \"Test getting all userbooks\"\n all_user_books=$(get_all_user_books)\n numBooks=$(echo $all_user_books | jq -r '.data | length')\n\n assert_equals $numBooks 2 \"Number of all user books\"\n\n # Test some queries\n echo \"\"\n echo \"Test filter queries\"\n\n echo \"Test by title 'Current'\"\n books=$(get_user_books_by_title \"Current\")\n numBooks=$(echo $books | jq -r '.data | length')\n assert_equals $numBooks 2 \"Number of user books by title\"\n\n echo \"\"\n echo \"Test by title and tag\"\n books=$(get_user_books_by_title_and_tag \"Space\" \"Science Fiction\")\n# jqit \"$books\"\n numBooks=$(echo $books | jq -r '.data | length')\n assert_equals $numBooks 1 \"Number of user books by title and tag\"\n\n echo \"\"\n echo \"Check limit & offset\"\n limit=$(echo \"$books\" | jq -r .limit)\n total=$(echo \"$books\" | jq -r .total)\n offset=$(echo \"$books\" | jq -r .offset)\n\n assert_equals 1 $limit \"limit for books by title/tag\"\n assert_equals 1 $total \"total for books by title/tag\"\n assert_equals 0 $offset \"offset for books by title/tag\"\n\n echo \"\"\n echo \"Test by title and tag with same tag\"\n books=$(get_user_books_by_title_and_tag \"Current\" \"ebook\")\n# jqit \"$books\" \n numBooks=$(echo $books | jq -r '.data | length')\n assert_equals 2 $numBooks \"Number of user books by title 'current' and tag 'ebook'\"\n\n echo \"\"\n echo \"Test by title and tag with same tag and only 1 matching title\"\n books=$(get_user_books_by_title_and_tag \"space\" \"ebook\")\n# jqit \"$books\"\n numBooks=$(echo $books | jq -r '.data | length')\n assert_equals 1 $numBooks \"Number of user books by title 'space' and tag 'ebook'\"\n\n echo \"\"\n echo \"Test by title try two\"\n books=$(get_user_books_by_title \"Of Space\")\n numBooks=$(echo $books | jq -r '.data | length')\n\n assert_equals 1 $numBooks \"Number of user books by title 'Of Space'\"\n\n echo \"\"\n echo \"Check limit & offset\"\n limit=$(echo \"$books\" | jq -r .limit)\n total=$(echo \"$books\" | jq -r .total)\n offset=$(echo \"$books\" | jq -r .offset)\n\n assert_equals 1 $limit \"limit for books by title/tag #2\"\n assert_equals 1 $total \"total for books by title/tag #2\"\n assert_equals 0 $offset \"offset for books by title/tag #2\"\n\n\n echo \"\"\n echo \"Test by book_id\"\n books_by_book_id=$(get_user_book_by_bookid \"$second_book_id\")\n numBooks=$(echo $books_by_book_id | jq -r '.data | length')\n assert_equals 1 $numBooks \"Number of user books by bookid '$second_book_id'\"\n\n echo \"\"\n echo \"Test by tag 'ebook'\"\n books_by_tag=$(get_user_books_by_tag \"ebook\")\n numBooks=$(echo $books_by_tag | jq -r '.data | length')\n assert_equals 2 $numBooks \"Number of user books by by tag 'ebook'\"\n\n echo \"\"\n echo \"Test by multiple tag\"\n books_by_tag=$(get_user_books_by_tag \"super&tag=best\")\n numBooks=$(echo $books_by_tag | jq -r '.data | length')\n\n assert_equals 2 $numBooks \"Number of user books by by tags 'super & ebook'\"\n\n echo \"\"\n echo \"Test by single tag 'sci-fi'\"\n books_by_tag=$(get_user_books_by_tag \"sci-fi\")\n numBooks=$(echo $books_by_tag | jq -r '.data | length')\n assert_equals 1 $numBooks \"Number of user books by by tag 'sci-fi'\"\n\n echo \"\"\n echo \"Delete 2nd user book\"\n delete_user_book \"$second_user_book_id\"\n\n all_user_books=$(get_all_user_books)\n numBooks=$(echo $all_user_books | jq -r '.data | length')\n assert_equals 1 $numBooks \"Number of user books after deleting 2nd user book\"\n\n echo \"\"\n echo \"Update first book. adding superbook & fantasy to tags\"\n updated_book=$(update_user_book \"$asimov_user_book_id\")\n\n echo \"\"\n echo \"check tags on updated book\"\n tags=$(echo \"$updated_book\" | jq -r '.tags | join(\", \")')\n assert_contains \"$tags\" \"fantasy\" \"Userbook tags\"\n assert_contains \"$tags\" \"best\" \"Userbook tags\"\n\n\n echo \"\"\n echo \"Update first book by changing review.\"\n updated_book=$(update_user_book_new_review \"$asimov_user_book_id\")\n\n echo \"check tags on updated book\"\n echo \"\"\n tags=$(echo \"$updated_book\" | jq -r '.tags | join(\", \")')\n assert_contains \"$tags\" \"fantasy\" \"Userbook tags\"\n assert_contains \"$tags\" \"best\" \"Userbook tags\"\n\n echo \"\"\n echo \"check review on updated book\"\n review=$(echo \"$updated_book\" | jq -r '.review')\n assert_string_equals \"new review\" \"$review\" \"Userbooks review\"\n\n user_book::clean\n}\n\n###############\n# Test the user permissions\n# \n# Create a new user, get authentication for it,\n# create a user_book for admin user and try to modify it\n# via the 2nd user. This should fail.\n# Gets should pass.\n# \n###############\n user_book::test_user_permissions() {\n echo \"\"\n echo \"[ User Book permission test ]\"\n\n # create userbook for admin/1\n # create new user\n # get bearer\n # try to update userbook\n \n ####\n # create tag, author, book, etc\n echo \"\"\n echo \"Create tag\"\n result=$(tag::create_tag \"testit\")\n echo \"Create author\"\n author=$(create_author_lengle)\n author_id=$(echo \"$author\" | jq -r .id)\n \n book_title=\"generic book for permission test\"\n echo \"Create book '$book_title' for author '$author_id'\"\n generic_book=$(book::create_book \"$book_title\" \"$author_id\")\n book_id=$(echo $generic_book | jq -r .id)\n\n admin_user_book=$(create_generic_user_book $book_id)\n admin_user_bookid=$(echo \"$admin_user_book\" | jq -r .userBookId)\n\n ####\n # new user\n second_user=$(user::create_user \"notadmin\")\n authorize_second_user \"notadmin\" \"${SECOND_USER_PASSWORD}\"\n\n ######\n # try to update the admin_user_book\n\n # update user book\nread -r -d '' post_data <<EOF\n{\n\"tags\": [\n \"bad\", \"tag\"\n]\n}\nEOF\necho \"\"\n echo \"Trying to update user book $admin_user_bookid, this should fail.\"\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/user_book/${USER_ID}/${admin_user_bookid}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER_second\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n\n # code should not be 200\n assert_not_equals 200 $code \"HTTP error code when updating admin's userbook\"\n\n #########\n # create user book\n echo \"\"\n echo \"Trying to create user book for admin, this should fail.\"\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/user_book/${USER_ID}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER_second\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n\n # code should not be 200\n assert_not_equals 200 $code \"HTTP error code when creating a userbook for admin user\"\n\n #########\n # delete user book\n echo \"\"\n echo \"Trying to delete user book $admin_user_bookid, this should fail.\"\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE \"${ROOT_URL}/user_book/${USER_ID}/${admin_user_bookid}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER_second\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE for user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n\n # code should not be 200\n assert_not_equals 200 $code \"HTTP error code when deleting admin's userbook\"\n\n #########\n # get user book\n echo \"\"\n echo \"Trying to GET user book ${admin_user_bookid}, should succeed.\"\n url=\"${ROOT_URL}/user_book/${USER_ID}/${admin_user_bookid}\"\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"$url\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER_second\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making GET for user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n\n # code should be 200\n assert_equals 200 $code \"HTTP error code when GETing admin's single userbook\"\n\n #########\n # get all user book\n echo \"\"\n echo \"Trying to GET all user books for admin, should succeed.\"\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/user_book/${USER_ID}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER_second\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making GET for all user book: $result\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n\n # code should be 200\n assert_equals 200 $code \"HTTP error code when GETing all admin's userbooks\"\n\n # clean up\n user_book::clean\n\n echo \"[ End User Book permission test ]\"\n }\n\n\n###############\n#\n# Test user_book endpoint\n#\n###############\ntest_user_books() {\n echo \"\"\n echo \"[ User Book test ]\"\n \n user_book::main_test\n\n user_book::test_limit_offset\n\n user_book::test_user_permissions\n\n\n echo \"[ Done User Book test ]\"\n}\n"
},
{
"alpha_fraction": 0.7178992033004761,
"alphanum_fraction": 0.7213748097419739,
"avg_line_length": 30.773006439208984,
"blob_id": "370646cd9903444f8336bc7a41bd050b0b4a56cc",
"content_id": "e76c70adb507799de280be795a436eb1170b7cc0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 5179,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 163,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"time\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books.db:3306)/booksdatabase?parseTime=true\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// cache layer\n\tvar cache CacheLayer\n\tcache = cacheLayer{redisPool}\n\n\t///////////////\n\t// 'userbook' service\n\tvar userbookSvc UserBookService\n\tuserbookSvc = userbookService{db, cache}\n\n\t// Set up the endpoints on our service\n\t//\n\t// Note: the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one as some\n\t// care about only accepting 'admin' group.\n\n\t////////////////\n\t// Endpoints\n\n\t//////\n\t// GET /user_book/<user_id> (all books)\n\tuserBooksEndpoint := makeGetUserBooksEndpoint(userbookSvc)\n\tbaseUserBooksHandler := httptransport.NewServer(\n\t\tuserBooksEndpoint,\n\t\tdecodeGetAllUserBooksRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\tuserBooksHandler := AuthenticateForUserBook(false, redisPool, baseUserBooksHandler)\n\t// The id is used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"GET\").Path(\"/user_book/{user_id}\").Handler(userBooksHandler)\n\n\t//////\n\t// GET /user_book/<user_id>/<user_book_id>\n\tuserBookEndpoint := makeGetUserBookEndpoint(userbookSvc)\n\tbaseBookHandler := httptransport.NewServer(\n\t\tuserBookEndpoint,\n\t\tdecodeGetUserBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\tuserBookHandler := AuthenticateForUserBook(false, redisPool, baseBookHandler)\n\t// The id's are used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"GET\").Path(\"/user_book/{user_id}/{user_book_id}\").Handler(userBookHandler)\n\n\t//////\n\t// DELETE /user_book/<user_id>/<user_book_id>\n\tdeleteUserBookEndpoint := makeDeleteUserBookEndpoint(userbookSvc)\n\tbaseDeleteUserBookHandler := httptransport.NewServer(\n\t\tdeleteUserBookEndpoint,\n\t\tdecodeDeleteUserBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\tdeleteUserBookHandler := AuthenticateForUserBook(true, redisPool, baseDeleteUserBookHandler)\n\t// The id's are used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"DELETE\").Path(\"/user_book/{user_id}/{user_book_id}\").Handler(deleteUserBookHandler)\n\n\t//////\n\t// POST /user_book/<user_id>\n\tcreateUserBookEndpoint := makeCreateUserBookEndpoint(userbookSvc)\n\tbaseCreateUserBookHandler := httptransport.NewServer(\n\t\tcreateUserBookEndpoint,\n\t\tdecodeCreateUserBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\tcreateUserBookHandler := AuthenticateForUserBook(true, redisPool, baseCreateUserBookHandler)\n\trouter.Methods(\"POST\").Path(\"/user_book/{user_id}\").Handler(createUserBookHandler)\n\n\t//////\n\t// PUT /user_book/<user_id>/<user_book_id>\n\tupdateUserBookEndpoint := makeUpdateUserBookEndpoint(userbookSvc)\n\tbaseUpdateUserBookHandler := httptransport.NewServer(\n\t\tupdateUserBookEndpoint,\n\t\tdecodeUpdateUserBookRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\tupdateUserBookHandler := AuthenticateForUserBook(true, redisPool, baseUpdateUserBookHandler)\n\t// The id's are used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"PUT\").Path(\"/user_book/{user_id}/{user_book_id}\").Handler(updateUserBookHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tfmt.Println(\"book service up on \" + addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.509761393070221,
"alphanum_fraction": 0.5127193927764893,
"avg_line_length": 20.763948440551758,
"blob_id": "c5fe293e67e848cac628cf304bb30c9cd13958b2",
"content_id": "c866cb8a4d3ca96b61f956bce8fd942ed0f719e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 5071,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 233,
"path": "/buildfiles/build-service.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n####################\n# Common buildfile that can build maven or gradle builds.\n# It will attempt to use the localhost mvn or gradle, but if they\n# are not present this will use docker.\n#\n# The variable 'our_directory' must be set by each script that\n# loads this build script.\n####################\n\n# Load in sub build files\nROOT_DIRECTORY=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n. ${ROOT_DIRECTORY}/build_java.sh\n. ${ROOT_DIRECTORY}/build_golang.sh\n\n\n#############\n# Initalize variables\n#############\ninitialize_variables() {\n if [ -z \"$our_directory\" ]; then\n echo \"NOTE: Calling script must set 'our_directory' variable.\"\n exit 1\n fi\n\n # root for all images\n ROOT_NAME=\"books\"\n\n # Get name and version of project\n project=$(cat $our_directory/webservice.name | xargs)\n project_version=$(cat $our_directory/../webservice.version | xargs)\n\n image_base_name=\"${ROOT_NAME}.${project}\"\n image_name=\"${image_base_name}:${project_version}\"\n}\n\n#########\n# Usage\n#\n#########\nusage() {\n echo\n echo \"Usage: $0 [OPTION]\"\n echo \"\"\n echo \"Builds image '$image_name'\"\n echo \"\"\n echo \"Options:\"\n echo \" -h,--help : Print this message.\"\n echo \" build : Builds the application\"\n echo \" clean : Cleans the application\"\n if [ -e ${our_directory}/src/github.com/hipposareevil ]; then\n echo \" dep : Download dependencies\"\n fi\n echo \"\"\n\n # add extra info for golang builds\n if [ -e ${our_directory}/src/github.com/hipposareevil ]; then\n tmp=$(echo \"${project}\" | awk -F. '{print $1}')\n echo \"To manually build the '${project}' service in golang:\"\n echo \"$ export GOPATH=`pwd`\"\n echo \"$ go build github.com/hipposareevil/${tmp}\"\n echo \"\"\n fi\n\n\n exit 0;\n\n}\n\n#############\n# Build image\n#\n#############\nbuild_image() {\n local then=$(date +%s)\n echo \"[[Building Docker image '$image_name']]\"\n\n # These are image labels\n # set build time\n BUILD_TIME=$(date +%Y-%m-%dT%H:%M:%S%Z)\n VERSION_TAG=\"$project_version\"\n\n # run docker build\n (cd $our_directory;\n docker build -t ${image_name} \\\n --build-arg BUILD_TIME=${BUILD_TIME} \\\n --build-arg VERSION=${VERSION_TAG} \\\n \"$our_directory\" )\n build_result=$?\n\n local now=$(date +%s)\n local elapsed=$(expr $now - $then)\n\n rightnow=$(date)\n\n if [ $build_result -eq 0 ]; then\n echo \"\"\n echo \"[[Built image \\\"${image_name}\\\" in $elapsed seconds (done at: $rightnow)]]\"\n\n # tag as latest now\n output=$(docker tag ${image_name} ${image_base_name}:latest)\n tag_result=$?\n if [ $tag_result -eq 0 ]; then\n echo \"[[Tagged \\\"${image_name}\\\" as \\\"${image_base_name}:latest\\\"]]\"\n else\n echo \"[[Unable to tag image as latest!!!!]]\"\n fi\n else\n echo \"\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n echo \"Unable to build Docker image for $image_name\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n exit 1\n fi\n}\n\n#############\n# Build the project\n#\n#############\nbuild() {\n echo \"[[Building project '${project}']]\"\n\n # start of overall build\n local then=$(date +%s)\n\n ## Build project\n # Determine build type:\n # golang, maven, gradle\n if [ -e ${our_directory}/src/github.com/hipposareevil ]; then\n golang::build\n else\n java::build\n fi\n\n ## Build image\n build_image\n\n local now=$(date +%s)\n local elapsed=$(expr $now - $then)\n rightnow=$(date)\n\n echo \"[[Build for '$project' complete on '${rightnow}' in ${elapsed} seconds.]]\"\n\n}\n\n\n#############\n# Clean the project\n#\n#############\nclean() {\n echo \"[[Cleaning project '$project']]\"\n\n # determine build type:\n # golang, maven, gradle\n if [ -e ${our_directory}/src/github.com/hipposareevil ]; then\n golang::clean\n else\n java::clean\n fi\n\n echo \"[[Clean for '$project' complete]]\"\n}\n\n\n#############\n# Get dependencies\n#\n#############\ndependencies() {\n echo \"[[Getting dependencies project '$project']]\"\n\n # determine build type:\n # golang, maven, gradle\n if [ -e ${our_directory}/src/github.com/hipposareevil ]; then\n golang::clean\n golang::run_dep\n else\n echo \"[[no-op for java]]\"\n fi\n\n echo \"[[Dependencies for '$project' complete]]\"\n}\n\n############\n# main\n# \n############\nmain() {\n # init\n initialize_variables\n\n # default to \"build\"\n if [ $# -eq 0 ]\n then\n arg=\"build\"\n else\n arg=$1\n fi\n\n # check arg\n case $arg in\n \"-h\"|\"--help\")\n\t usage\n\t exit 0\n\t ;;\n \"build\")\n build\n exit 0\n ;;\n \"buildimage\")\n build_image\n exit 0\n ;;\n \"clean\")\n clean\n exit 0\n ;;\n \"dep\")\n dependencies\n exit 0\n ;;\n \\?) #unknown\n usage\n ;;\n esac\n}\n\n\n# Call main\nmain \"$@\"\n"
},
{
"alpha_fraction": 0.7784430980682373,
"alphanum_fraction": 0.78742516040802,
"avg_line_length": 59.727272033691406,
"blob_id": "751be5afbc75f0551471cedc587a39775a6e4c3d",
"content_id": "69a6f4a4847b5dbcf6ddbea762d10377837c2a8a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 668,
"license_type": "no_license",
"max_line_length": 234,
"num_lines": 11,
"path": "/images/gateway/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Introduction\n\nThis is the frontend proxy (API Gateway) for the set of microservices. It has a simple *nginx.conf* configuration file that sets up *upstream* servers for each microservice. There are two custom HTML pages; 502.html and swagger.yaml.\n\n# Included resources\n\n## 502.html\nThis page is returned when an upstream server is not ready. In general, this happens when the database is not up yet, which causes the dropwizard programs to error out.\n\n## swagger.yaml\nThis page is passed into the *swagger* container/endpoint, which combines microservice definitions into one usable end web page. The page containers overrides for *info* and *hostname* variables.\n"
},
{
"alpha_fraction": 0.650077760219574,
"alphanum_fraction": 0.6516329646110535,
"avg_line_length": 26.95652198791504,
"blob_id": "f287c90ffacecf437f65d954139faea829c81525",
"content_id": "3cfcffff01433c71880c2f8bb45ece2cccc9316c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 643,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 23,
"path": "/images/frontend/build_dev.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# our real directory (so this can be called from outside directories)\nour_directory=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n# image we build\nproject=$(cat $our_directory/webservice.name | xargs)\nproject_version=$(cat $our_directory/../webservice.version | xargs)\n\nimage_name=\"books.${project}:${project_version}-dev\"\n\n# set docker image labels\nBUILD_TIME=$(date +%Y-%m-%dT%H:%M:%S%Z)\nVERSION_TAG=\"${project_version}-dev\"\n\ndocker build \\\n \"$our_directory\" \\\n --build-arg BUILD_TIME=${BUILD_TIME} \\\n --build-arg VERSION=${VERSION_TAG} \\\n -t $image_name\n\necho \"\"\necho \"Built $image_name\"\n"
},
{
"alpha_fraction": 0.6853741407394409,
"alphanum_fraction": 0.6853741407394409,
"avg_line_length": 17.967741012573242,
"blob_id": "d51930c8cf07e76f0dc31ad9e9482b70ece4cf36",
"content_id": "9140dbc9c2977fc6068b6e84aeffeb5bde92da2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 588,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 31,
"path": "/images/review/src/github.com/hipposareevil/review/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// Reviews (all)\n// response for reviews\ntype reviewResponse struct {\n\tData Reviews `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse reviewResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse reviewResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n"
},
{
"alpha_fraction": 0.6035087704658508,
"alphanum_fraction": 0.6035087704658508,
"avg_line_length": 20.923076629638672,
"blob_id": "15977737ab0f857134219cd11c148fb8121c4029",
"content_id": "97d26fb0668f48ed0e99dbb2b00e31033832e386",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1710,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 78,
"path": "/images/frontend/content/mybooks/src/router/index.js",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "import Vue from 'vue'\nimport Router from 'vue-router'\nimport HomePage from '@/components/HomePage'\nimport AddAuthor from '@/components/AddAuthor'\nimport AddBook from '@/components/AddBook'\nimport AddTag from '@/components/AddTag'\nimport Users from '@/components/Users'\nimport ViewShelf from '@/components/shelf/ViewShelf'\nimport ViewAuthors from '@/components/author/ViewAuthors'\nimport ViewSingleAuthor from '@/components/author/ViewSingleAuthor'\nimport ViewBooks from '@/components/book/ViewBooks'\nimport SearchBooks from '@/components/book/Search'\nimport ViewSingleBook from '@/components/book/ViewSingleBook'\n\nVue.use(Router)\n\nconst router = new Router({\n routes: [\n {\n path: '/',\n name: 'HomePage',\n component: HomePage\n },\n {\n path: '/shelves',\n name: 'ViewShelf',\n component: ViewShelf\n },\n {\n path: '/addauthor',\n name: 'AddAuthor',\n component: AddAuthor\n },\n {\n path: '/addbook',\n name: 'AddBook',\n component: AddBook\n },\n {\n path: '/users',\n name: 'Users',\n component: Users\n },\n {\n path: '/authors',\n name: 'ViewAuthors',\n component: ViewAuthors\n },\n {\n path: '/authors/:id',\n name: 'SingleAuthor',\n component: ViewSingleAuthor\n },\n {\n path: '/books',\n name: 'ViewBooks',\n component: ViewBooks\n },\n {\n path: '/search',\n name: 'SearchBooks',\n component: SearchBooks\n },\n {\n path: '/books/:id',\n name: 'SingleBook',\n component: ViewSingleBook\n },\n {\n path: '/addtag',\n name: 'AddTag',\n component: AddTag\n }\n ],\n linkActiveClass: 'is-active'\n})\n\nexport default router\n"
},
{
"alpha_fraction": 0.6553528904914856,
"alphanum_fraction": 0.6563374400138855,
"avg_line_length": 29.153125762939453,
"blob_id": "a8af80a3d42dd683baa76dcf3e1e8ae7da959adc",
"content_id": "8e3a527c6f10e782e5dcec79ad7f161ffe42d36b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 19298,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 640,
"path": "/images.java/user_book/src/main/java/com/wpff/resources/UserBookHelper.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.resources;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.lang.reflect.InvocationTargetException;\nimport java.net.URLEncoder;\nimport java.util.ArrayList;\nimport java.util.Date;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\n\nimport javax.ws.rs.NotFoundException;\nimport javax.ws.rs.WebApplicationException;\nimport javax.ws.rs.core.Response;\nimport javax.ws.rs.core.SecurityContext;\n\n// utils\nimport org.apache.commons.beanutils.BeanUtils;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.client.HttpClient;\nimport org.apache.http.client.methods.HttpGet;\nimport org.apache.http.impl.client.HttpClientBuilder;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport com.wpff.common.cache.Cache;\nimport com.wpff.common.result.Segment;\nimport com.wpff.core.DatabaseUserBook;\nimport com.wpff.core.Tag;\nimport com.wpff.core.TagMapping;\nimport com.wpff.core.User;\nimport com.wpff.core.beans.FullUserBook;\nimport com.wpff.core.beans.PostUserBook;\nimport com.wpff.db.TagDAO;\nimport com.wpff.db.TagMappingDAO;\nimport com.wpff.db.UserBookDAO;\nimport com.wpff.db.UserDAO;\n\nimport io.dropwizard.hibernate.UnitOfWork;\nimport io.dropwizard.jersey.params.IntParam;\n\n/**\n * Helper for userbooks to deal with unitofwork issues\n */\npublic class UserBookHelper {\n\n /**\n * DAO used to get a UserBook.\n */\n private final UserBookDAO userBookDAO;\n\n /**\n * DAO for tags\n */\n private final TagDAO tagDAO;\n\n /**\n * DAO for users\n */\n private final UserDAO userDAO;\n\n /**\n * Cache\n */\n private final Cache cache;\n\n /**\n * DAO for tagmap\n */\n private final TagMappingDAO tagMappingDAO;\n\n public UserBookHelper(UserBookDAO userBookDAO, UserDAO userDAO, TagDAO tagDAO, TagMappingDAO tagMapDAO,\n Cache cache) {\n this.tagDAO = tagDAO;\n this.userBookDAO = userBookDAO;\n this.userDAO = userDAO;\n this.tagMappingDAO = tagMapDAO;\n this.cache = cache;\n }\n\n /**\n * Create a UserBook in the database. Tags are not created at this time.\n *\n * @param userBookBean\n * Incoming bean with UserBook information\n * @param userId\n * ID of user\n *\n * @return Newly created UserBook from database.\n */\n @UnitOfWork\n DatabaseUserBook createUserBook(PostUserBook userBookBean, IntParam userId) throws IllegalAccessException,\n IllegalArgumentException, InvocationTargetException {\n // Create transient UserBook\n DatabaseUserBook userBookToCreate = new DatabaseUserBook();\n\n // Copy over bean values - copy(destination, source)\n BeanUtils.copyProperties(userBookToCreate, userBookBean);\n userBookToCreate.setDateAdded(new Date());\n\n // Set the user_id from the URL to the 'userBook'\n userBookToCreate.setUserId(userId.get());\n\n // Create user book in DB\n return this.userBookDAO.create(userBookToCreate);\n }\n\n /**\n * Update a UserBook in the database\n * \n * @param userBookBean\n * Bean with new data\n * @param userId\n * ID of user\n * @param userBookId\n * ID of user book\n * @return\n * @throws IllegalAccessException\n * @throws IllegalArgumentException\n * @throws InvocationTargetException\n */\n @UnitOfWork\n DatabaseUserBook updateUserBook(PostUserBook userBookBean, int userId, int userBookId) \n throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n // Grab existing user book\n DatabaseUserBook userBookToUpdate = this.userBookDAO.findByUserBookId(userId, userBookId);\n if (userBookToUpdate == null) {\n throw new NotFoundException(\"No UserBook by id '\" + userBookId + \"'\");\n }\n\n // Copy non null values over\n if (userBookBean.getRating() != null) {\n BeanUtils.copyProperty(userBookToUpdate, \"rating\", userBookBean.getRating());\n }\n\n // Update book in DB\n this.userBookDAO.update(userBookToUpdate);\n\n return userBookToUpdate;\n }\n\n /**\n * Get total number of userbooks\n * \n * @param userId\n * User to get number of books for\n * @return Number of userbooks for user\n */\n @UnitOfWork\n long getTotalNumberUserBooks(Integer userId) {\n return userBookDAO.getNumberOfUserBooks(userId);\n }\n\n /**\n * Get list of UserBooks for the requested User id\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param userId\n * ID of user to get books for\n * @param desiredSegment\n * offset and limit for the query\n * @return List of UserBooks\n */\n @UnitOfWork\n List<FullUserBook> getUserBooksForUser(String authString, Integer userId, Segment desiredSegment)\n throws IllegalAccessException, InvocationTargetException {\n List<FullUserBook> userBooks = new ArrayList<FullUserBook>();\n\n // Get list of books in db\n List<DatabaseUserBook> booksInDatabase = userBookDAO.findBooksByUserId(userId, desiredSegment);\n\n // convert each book into a FullUserBook\n for (DatabaseUserBook dbBook : booksInDatabase) {\n System.out.println(\"Converting dbbook: \" + dbBook.getBookId());\n userBooks.add(convert(dbBook, authString));\n }\n\n return userBooks;\n }\n\n /**\n * Get all UserBooks for the incoming UserId and tagId\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param userId\n * ID of user to get books for\n * @param tagId\n * ID of tag to get user book for\n * @return List of user books\n * @throws IllegalAccessException\n * @throws InvocationTargetException\n */\n @UnitOfWork\n List<FullUserBook> getUserBooksByUserAndTag(String authString, Integer userId, int tagId)\n throws IllegalAccessException, InvocationTargetException {\n // Get TagMappings for this user and tag ID\n List<TagMapping> mappings = this.tagMappingDAO.findTagMappingsByTagId(userId, tagId);\n\n List<FullUserBook> userBooks = new ArrayList<FullUserBook>();\n for (TagMapping mapping : mappings) {\n int userBookId = mapping.getUserBookId();\n\n // get book by the userbook ID\n FullUserBook current = getUserBookByUserBookId(authString, userId, userBookId);\n if (current != null) {\n userBooks.add(current);\n }\n }\n\n return userBooks;\n }\n\n /**\n * Get books by the incoming title query.\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param userId\n * ID of user\n * @param titleQuery\n * Title Query\n * @return list of matching user books\n * @throws IllegalAccessException\n * @throws InvocationTargetException\n */\n @UnitOfWork\n List<FullUserBook> getUserBooksByTitle(String authString, Integer userId, String titleQuery)\n throws IllegalAccessException, InvocationTargetException {\n List<FullUserBook> userBooks = new ArrayList<FullUserBook>();\nSystem.out.println(\"getUserBooksByTitle: \" + titleQuery);\n \n // List of book IDs\n List<Integer> ids = this.getBookIdsForTitleQuery(authString, titleQuery);\n\n for (Integer bookId: ids) {\n // get book by the userbook ID\n FullUserBook current = getUserBookByBookId(authString, userId, bookId);\n\n // The book may exist, but not be\n if (current != null) {\n userBooks.add(current);\n }\n }\n\n return userBooks;\n }\n\n /**\n * Get UserBook from database.\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param userId\n * ID of the user for this user book\n * @param bookID\n * ID of Book to retrieve\n * @return FullUserBook or null if non exist\n * @throws InvocationTargetException\n * @throws IllegalAccessException\n */\n @UnitOfWork\n FullUserBook getUserBookByBookId(String authString, int userId, int bookId) throws IllegalAccessException,\n InvocationTargetException {\n // Get db book\n DatabaseUserBook bookInDb = this.userBookDAO\n .findByBookId(userId, bookId);\n\n return convert(bookInDb, authString);\n }\n \n /**\n * Get UserBook from database. If the userBookId doesn't belong to the\n * incoming user (by userId), then the book is not added.\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param userId\n * ID of the user for this user book\n * @param userBookID\n * ID of User Book to retrieve\n * @return FullUserBook or null if non exist\n * @throws InvocationTargetException\n * @throws IllegalAccessException\n */\n @UnitOfWork\n FullUserBook getUserBookByUserBookId(String authString, int userId, int userBookId) throws IllegalAccessException,\n InvocationTargetException {\n // Get db book\n DatabaseUserBook bookInDb = this.userBookDAO\n .findByUserBookId(userId, userBookId);\n\n return convert(bookInDb, authString);\n }\n\n /**\n * Delete a user book. It is assumed the caller of this function has already\n * verified the user IDs match up to the owner of this user book.\n * \n * @param userId\n * ID of the user for this user book\n * @param userBookId\n * ID of user_book to delete\n */\n @UnitOfWork\n void deleteUserBookById(int userId, int userBookId) {\n // Get book in db\n DatabaseUserBook bookInDb = this.userBookDAO.findByUserBookId(userId, userBookId);\n \n if (bookInDb == null) {\n throw new NotFoundException(\"No UserBook for user '\" + userId + \"' by id '\" + userBookId + \"'\");\n }\n\n this.userBookDAO.delete(bookInDb);\n\n // Delete tag mappings from tagmapping table\n this.tagMappingDAO.deleteTagMappingByUserBookId(userBookId);\n }\n\n /**\n * Get map of Tags from database\n *\n * @return Map of tags indexed by tag name\n */\n @UnitOfWork\n Map<String, Tag> getAllTags() {\n Map<String, Tag> tagsInDbMap = this.tagDAO.findAll();\n return tagsInDbMap;\n }\n\n /**\n * Create a single TagMapping in the database.\n *\n * @param tagMapping\n * New tag map\n * @return created TagMap\n */\n @UnitOfWork\n TagMapping createTagMapping(TagMapping tagMapping) {\n return this.tagMappingDAO.addTagMapingEntry(tagMapping);\n }\n\n /**\n * Delete all tags for a specific user book.\n * \n * @param userBookId\n * ID of user book\n */\n @UnitOfWork\n void deleteTagMappingsForUserBook(int userBookId) {\n this.tagMappingDAO.deleteTagMappingByUserBookId(userBookId);\n }\n\n /**\n * Get list of TagMappings\n *\n * @return list of TagMappings\n */\n @UnitOfWork\n List<TagMapping> getTagMap() {\n return this.tagMappingDAO.findAll();\n }\n\n /**\n * Create a new Tag in the database.\n * \n * @param tagName\n * Name of tag to create\n * @return Newly created Tag from database\n */\n @UnitOfWork\n Tag createTag(String tagName) {\n Tag t = new Tag();\n t.setName(tagName);\n Tag newtag = this.tagDAO.create(t);\n return newtag;\n }\n\n /**\n * Verify the userId in the path matches the user from the security context. Or\n * if the context user is in group 'admin'.\n *\n * @param context\n * SecurityContext to grab username from\n * @param userId\n * ID of user from the Path\n */\n @UnitOfWork\n void verifyUserIdHasAccess(SecurityContext context, int userId) throws WebApplicationException {\n // Get the username corresponding to the incoming userId and verify that is the\n // same as the authenticated caller.\n String userNameFromSecurity = context.getUserPrincipal().getName();\n User userFromId = userDAO.findById(userId).orElseThrow(\n () -> new NotFoundException(\"No user with ID '\" + userId + \"' found.\"));\n\n String userNameFromId = userFromId.getName();\n\n // Check names.\n // If:\n // user in security is in the 'admin' group\n // or\n // userNameFromSecurity == name from id\n // we can proceed\n\n if ((context.isUserInRole(\"admin\")) || (userNameFromSecurity.equals(userNameFromId))) {\n // Is ok\n System.out.println(\"User logged in as \" + userNameFromId);\n } else {\n throw new WebApplicationException(\n \"Must be logged in as user with id '\" + userFromId.getName()\n + \"' or as as a member of the 'admin' user group to access this resource.\",\n Response.Status.UNAUTHORIZED);\n }\n }\n\n ////////////////////////////////////////////////////////////////\n //\n // Helpers\n\n /**\n * Convert a DB book to a FullUserBook bean\n * \n * @param dbBook\n * Book to convert\n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @return\n */\n private FullUserBook convert(DatabaseUserBook dbBook, String authString) throws IllegalAccessException,\n InvocationTargetException {\n if (dbBook == null) {\n return null;\n }\n \n FullUserBook bookToReturn = new FullUserBook();\n\n // Copy over bean values - copy(destination, source)\n BeanUtils.copyProperties(bookToReturn, dbBook);\n\n // Add tags from tagmapping table\n addTagsToUserBook(bookToReturn);\n\n // Get extra book info from /book\n BookBean bookInfo = getBookInfo(authString, dbBook.getBookId());\n if (bookInfo != null) {\n BeanUtils.copyProperties(bookToReturn, bookInfo);\n }\n return bookToReturn;\n }\n\n /**\n * Add tags from database to userbook\n *\n * @param userBook\n * UserBook to add tags into\n */\n private void addTagsToUserBook(FullUserBook userBook) {\n // Get tag mappings for user book\n List<TagMapping> tagMappings = this.tagMappingDAO.findTagMappings(userBook.getUserBookId());\n\n // Get tag IDs for the user book\n List<Integer> tagIds = tagMappings.stream().map(TagMapping::getTagId).collect(Collectors.toList());\n\n // Get all tags in database and convert into a map keyed by tagID\n Map<String, Tag> allTags = this.tagDAO.findAll();\n Map<Integer, Tag> tagsIndexById = allTags.values().stream().collect(Collectors.toMap(Tag::getId, p -> p));\n \n // Correlate tag ids from tagMappings into tag names\n List<String> tagNames = tagIds\n .stream()\n .map(e -> tagsIndexById.get(e).getName())\n .collect(Collectors.toList());\n\n userBook.setTags(tagNames);\n }\n\n /**\n * Retrieve the book information from the 'book' webservice for the incoming book id.\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param bookId\n * ID of book to get title for\n * @return BookBean\n */\n private BookBean getBookInfo(String authString, int bookId) {\n BookBean bookBean = null;\n\n try {\n // Try the cache\n String bookBeanString = this.cache.get(\"book.info\", bookId);\n\n if (bookBeanString == null) {\n /////////////////////\n // No cached value\n // Get from WS now\n\n // Going to the 'book' web service directly\n String url = \"http://book:8080/book/\" + bookId;\n\n HttpClient client = HttpClientBuilder.create().build();\n HttpGet request = new HttpGet(url);\n\n // add request header\n request.addHeader(\"User-Agent\", \"BookAgent\");\n request.addHeader(\"content-type\", \"application/json\");\n request.addHeader(\"Authorization\", authString);\n\n // Execute request\n HttpResponse response = client.execute(request);\n\n // Get code\n int responseCode = response.getStatusLine().getStatusCode();\n\n // Convert body of result\n BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));\n StringBuffer result = new StringBuffer();\n String line = \"\";\n while ((line = rd.readLine()) != null) {\n result.append(line);\n }\n\n // Check result\n if (responseCode == 200) {\n bookBeanString = result.toString();\n } else {\n System.out.println(\"Unable to get book's information for id: \" + bookId);\n System.out.println(\"Error code: \" + responseCode);\n System.out.println(\"Error content: \" + result);\n return null;\n }\n }\n\n // Convert bookBeanString into bean\n\n // Convert into bean\n ObjectMapper mapper = new ObjectMapper();\n try {\n bookBean = mapper.readValue(bookBeanString, BookBean.class);\n } catch (IOException ioe) {\n ioe.printStackTrace();\n }\n\n // Set cache\n this.cache.set(\"book.info\", bookId, bookBeanString);\n } catch (Exception e) {\n e.printStackTrace();\n return null;\n }\n\n return bookBean;\n }\n\n /**\n * Retrieve BookIds that match the incoming title query.\n * \n * This makes call to /book?title=TITLE_QUERY\n * \n * @param authString\n * Authentication header which is necessary for a REST call to 'book'\n * web service\n * @param titleQuery\n * Title query to make\n * @return list of Book IDs for the incoming title\n */\n private List<Integer> getBookIdsForTitleQuery(String authString, String titleQuery) {\n List<Integer> bookIds = new ArrayList<Integer>();\n\n try {\n /////////////////////\n // Get from WS now\n\n // Going to the 'book' web service directly\n String queryString = URLEncoder.encode(titleQuery, \"UTF-8\");\n \n //String url = \"http://book:8080/book?title=\" + titleQuery;\n String url = \"http://book:8080/book?title=\" + queryString;\n\n System.out.println(\"making query to url: \" + url);\n\n HttpClient client = HttpClientBuilder.create().build();\n HttpGet request = new HttpGet(url);\n \n // add request header\n request.addHeader(\"User-Agent\", \"BookAgent\");\n request.addHeader(\"content-type\", \"application/json\");\n request.addHeader(\"Authorization\", authString);\n\n // Execute request\n HttpResponse response = client.execute(request);\n\n // Get code\n int responseCode = response.getStatusLine().getStatusCode();\n\n // Convert body of result\n BufferedReader rd = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));\n StringBuffer result = new StringBuffer();\n String line = \"\";\n while ((line = rd.readLine()) != null) {\n result.append(line);\n }\n\n // Check result\n if (responseCode == 200) {\n // Convert into bean\n ObjectMapper mapper = new ObjectMapper();\n BookBeanList bookBeanList = null;\n try {\n bookBeanList = mapper.readValue(result.toString(), BookBeanList.class);\n } catch (IOException ioe) {\n ioe.printStackTrace();\n }\nSystem.out.println(\"Got back \" + bookBeanList.getData().size() + \" beans for title: \" + titleQuery);\n\n for (BookBean bean : bookBeanList.getData()) {\n System.out.println(\"looking at bean: \" + bean);\n // Add book id\n bookIds.add(bean.getId());\n }\n } else {\n System.out.println(\"Error code: \" + responseCode);\n System.out.println(\"Error content: \" + result);\n }\n\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n return bookIds;\n }\n}\n"
},
{
"alpha_fraction": 0.5612353682518005,
"alphanum_fraction": 0.5612353682518005,
"avg_line_length": 17.780000686645508,
"blob_id": "6daab509d147e63f8541ea1a39aeb9597fc29563",
"content_id": "f462f0ae331586a2531a7f94cc3bee894f874b26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": true,
"language": "Java",
"length_bytes": 939,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 50,
"path": "/images.java/mybooks_common/src/main/java/com/wpff/common/cache/Cache.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.common.cache;\n\n/**\n * Interface to cache data. \n *\n */\npublic interface Cache {\n\n /**\n * Set a cache value\n * \n * @param namespace\n * Namespace for value\n * @param key\n * Key in the namespace\n * @param value\n * Value to set\n */\n public void set(String namespace, int key, String value);\n\n /**\n * Get a cache value\n * \n * @param namespace\n * Namespace\n * @param key\n * Key in namespace\n * @return The value or null if none exists\n */\n public String get(String namespace, int key);\n\n /**\n * Clear the cache for a namespace\n * \n * @param namespace\n * Namespace to clear\n */\n public void clear(String namespace);\n\n /**\n * Clear the cache for a namespace\n * \n * @param namespace\n * Namespace to clear\n * @param key\n * Key in namespace\n */\n public void clear(String namespace, int key);\n\n}\n"
},
{
"alpha_fraction": 0.6302816867828369,
"alphanum_fraction": 0.6338028311729431,
"avg_line_length": 22.5,
"blob_id": "e68a1702c9d128854d34af0b8fbc12e841747329",
"content_id": "e996925268717fb3e6cbdf9d6a049f31c4a45631",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 284,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 12,
"path": "/images.java/author/build.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n#######\n# Build project \n#######\n\n# The real directory (so this can be called from outside directories)\nour_directory=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\nroot=$our_directory/../..\n\n# load in common build file\n. $root/buildfiles/build-service.sh\n\n\n"
},
{
"alpha_fraction": 0.7548580169677734,
"alphanum_fraction": 0.7638266086578369,
"avg_line_length": 40.78125,
"blob_id": "24ae15dbd9ef4cfccf2e4c64d1a9e6486cf94163",
"content_id": "9603147febc0c309d8bfab1751e7d895a96bb9a3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1338,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 32,
"path": "/images.java/author/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Author Microservice\n\n## Introduction\n\n**/author** is a microservice for querying, listing, adding and deleting Author entries from the database. Authors are visable to all users but are only mutable by the *admin* user. \n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for an Author\nAn Author entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the author. This is used to manage the author and for reference in a Book.\nname | Full name of the author. Example: \"Isaac Asimov\".\nimageUrl | URL of image for the author. \n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Dropwizard Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [dropwizard](http://www.dropwizard.io/) for microservice framework.\n* [maven](https://maven.apache.org/) for building.\n\n"
},
{
"alpha_fraction": 0.7362731099128723,
"alphanum_fraction": 0.7368420958518982,
"avg_line_length": 28.788135528564453,
"blob_id": "1a77814e25ead5625c8a6f888b22678fc770e97d",
"content_id": "a6218a5536ebeb5bad2494c6a23041a9eb7749df",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 3515,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 118,
"path": "/images.java/book/src/main/java/com/wpff/BookApplication.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff;\n\nimport javax.ws.rs.container.DynamicFeature;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n// Jedis\nimport com.bendb.dropwizard.redis.JedisBundle;\nimport com.bendb.dropwizard.redis.JedisFactory;\nimport com.wpff.common.cache.Cache;\nimport com.wpff.common.cache.CacheFactory;\nimport com.wpff.core.Book;\n// Resources\nimport com.wpff.db.BookDAO;\nimport com.wpff.resources.BookResource;\n\nimport io.dropwizard.Application;\nimport io.dropwizard.db.DataSourceFactory;\nimport io.dropwizard.hibernate.HibernateBundle;\nimport io.dropwizard.migrations.MigrationsBundle;\nimport io.dropwizard.setup.Bootstrap;\nimport io.dropwizard.setup.Environment;\nimport io.federecio.dropwizard.swagger.SwaggerBundle;\nimport io.federecio.dropwizard.swagger.SwaggerBundleConfiguration;\nimport redis.clients.jedis.JedisPool;\n\n/**\n * Application to serve the book web service\n *\n */\npublic class BookApplication extends Application<BookConfiguration> {\n\n private static final Logger LOGGER = LoggerFactory.getLogger(BookApplication.class);\n\n /**\n * Start application\n *\n * @param args Args for application\n * @throws Exception thrown if error in application\n */\n public static void main(final String[] args) throws Exception {\n new BookApplication().run(args);\n }\n\n // Create hibernate bundle\n private final HibernateBundle<BookConfiguration> hibernateBundle =\n new HibernateBundle<BookConfiguration>(Book.class) {\n @Override\n public DataSourceFactory getDataSourceFactory(BookConfiguration configuration) {\n return configuration.getDataSourceFactory();\n }\n };\n\n @Override\n public String getName() {\n return \"book\";\n }\n\n /**\n * Initialize the application with configurations\n */\n @Override\n public void initialize(final Bootstrap<BookConfiguration> bootstrap) {\n\n // Hibernate\n bootstrap.addBundle(hibernateBundle);\n\n // Jedis for Redis\n bootstrap.addBundle(new JedisBundle<BookConfiguration>() {\n @Override\n public JedisFactory getJedisFactory(BookConfiguration configuration) {\n return configuration.getJedisFactory();\n }\n });\n\n\n // configuration\n bootstrap.addBundle(new MigrationsBundle<BookConfiguration>() {\n @Override\n public DataSourceFactory getDataSourceFactory(BookConfiguration configuration) {\n return configuration.getDataSourceFactory();\n }\n }); \n\n // Swagger\n bootstrap.addBundle(new SwaggerBundle<BookConfiguration>() {\n @Override\n protected SwaggerBundleConfiguration getSwaggerBundleConfiguration(BookConfiguration configuration) {\n return configuration.swaggerBundleConfiguration;\n }\n });\n }\n\n /**\n * Start the jersey endpoint for /book\n */\n @Override\n public void run(final BookConfiguration configuration,\n final Environment environment) {\n // Set up Jedis. Currently JedisFactory doesn't inject into a filter, just Resources.\n JedisPool jedisPool = configuration.getJedisFactory().build(environment);\n\n \t// Cache\n\t\tCache cache = CacheFactory.createCache(jedisPool);\n\n\t\t// DAO\n final BookDAO dao = new BookDAO(hibernateBundle.getSessionFactory()); \n\t\t\n // Book endpoint\n environment.jersey().register(new BookResource(dao, cache));\n\n // Add a container request filter for securing webservice endpoints.\n DynamicFeature tokenRequired = new com.wpff.common.auth.TokenRequiredFeature(jedisPool) ;\n environment.jersey().register(tokenRequired);\n }\n\n}\n"
},
{
"alpha_fraction": 0.6358785033226013,
"alphanum_fraction": 0.6358785033226013,
"avg_line_length": 26.066667556762695,
"blob_id": "bd20bf6f6c4360fa531927b342769fed4e4c8cce",
"content_id": "0821a085cc3943a3e28b8a078f69e847d1e75e7b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 2436,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 90,
"path": "/images/frontend/content/mybooks/src/main.js",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "// The Vue build version to load with the `import` command\n// (runtime-only or standalone) has been set in webpack.base.conf with an alias.\nimport Vue from 'vue'\nimport App from './App'\nimport router from './router'\nimport axios from 'axios'\nimport Vuex from 'vuex'\n\nVue.use(Vuex)\nVue.prototype.$axios = axios\nVue.config.productionTip = false\n\n// Event queue for everyone\nwindow.Event = new Vue()\n\n// Storage\n// Access via \"this.$store.state.count\"\nconst store = new Vuex.Store({\n state: {\n allBooks: {},\n booksView: {},\n allAuthors: {},\n authorsView: {},\n userBooks: {},\n userBooksView: {},\n searchString: ''\n },\n getters: {\n search: state => state.searchString\n },\n mutations: {\n // Clear everything from this store\n clearEverything (state) {\n console.log('$store. clear everything')\n\n state.allBooks = {}\n state.booksView = {}\n state.allAuthors = {}\n state.authorsView = {}\n state.userBooks = {}\n state.userBooksView = {}\n state.searchString = ''\n },\n setSearchString (state, text) {\n state.searchString = text\n },\n // The currently loaded set of 'books'\n // Note this is an object containing the books JSON and\n // state of the query (start, length, total, end)\n setAllBooks (state, newBooks) {\n state.allBooks = newBooks\n },\n // Set the view options for the BookView\n // This includes grid vs list, etc\n setBooksView (state, view) {\n state.booksView = view\n },\n // The currently loaded set of 'authors'\n // Note this is an object containing the authors JSON and\n // state of the query (start, length, total, end)\n setAllAuthors (state, newAuthors) {\n state.allAuthors = newAuthors\n },\n // Set the view options for the AuthorView\n // This includes grid vs list, etc\n setAuthorsView (state, view) {\n state.authorsView = view\n },\n // The currently loaded set of 'user books'\n // Note this is an object containing the userbooks JSON and\n // state of the query (start, length, total, end)\n setUserBooks (state, newUserBooks) {\n state.userBooks = newUserBooks\n },\n // Set the view options for the UserBooksVue\n // This includes grid vs list, etc\n setUserBooksView (state, view) {\n state.userBooksView = view\n }\n }\n})\n\n/* eslint-disable no-new */\nnew Vue({\n el: '#app',\n router,\n store,\n template: '<App/>',\n components: { App }\n})\n"
},
{
"alpha_fraction": 0.7011004090309143,
"alphanum_fraction": 0.705142617225647,
"avg_line_length": 26.658384323120117,
"blob_id": "e41c6eda11ec1a2141f4b700d06eeea46ddc3646",
"content_id": "545d733bf209810823eb0b6f33df27236ea2f8d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4453,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 161,
"path": "/images/tag/src/github.com/hipposareevil/tag/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\t\"time\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t// metrics\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books.db:3306)/booksdatabase\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// cache layer\n\tvar cache CacheLayer\n\tcache = cacheLayer{redisPool}\n\n\t// clear the cache as we are starting fresh\n\tcache.ClearAll(TAG_CACHE)\n\n\t///////////////\n\t// 'tag' service\n\tvar tagSvc TagService\n\ttagSvc = tagService{db, cache}\n\n\t// Add middleware here\n\n\t// Set up the endpoints on our service\n\t// Note the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one.\n\n\t////////////////\n\t// Endpoints\n\n\t// GET /tag\n\ttagsEndpoint := makeTagsEndpoint(tagSvc)\n\tbaseTagsHandler := httptransport.NewServer(\n\t\ttagsEndpoint,\n\t\tdecodeTagsRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\ttagsHandler := Authenticate(false, redisPool, baseTagsHandler)\n\trouter.Methods(\"GET\").Path(\"/tag\").Handler(tagsHandler)\n\n\t// GET /tag/<tag_id>\n\ttagEndpoint := makeTagEndpoint(tagSvc)\n\tbaseTagHandler := httptransport.NewServer(\n\t\ttagEndpoint,\n\t\tdecodeTagRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\ttagHandler := Authenticate(false, redisPool, baseTagHandler)\n\t// 'tag_id' is used in transport.go to grab the variable 'tag_id' from the path\n\trouter.Methods(\"GET\").Path(\"/tag/{tag_id}\").Handler(tagHandler)\n\n\t// PUT /tag/<tag_id>\n\tputTagEndpoint := makePutTagEndpoint(tagSvc)\n\tbasePutTagHandler := httptransport.NewServer(\n\t\tputTagEndpoint,\n\t\tdecodePutTagRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tputTagHandler := Authenticate(true, redisPool, basePutTagHandler)\n\t// 'tag_id' is used in transport.go to grab the variable 'tag_id' from the path\n\trouter.Methods(\"PUT\").Path(\"/tag/{tag_id}\").Handler(putTagHandler)\n\n\t// POST /tag\n\tpostTagEndpoint := makePostTagEndpoint(tagSvc)\n\tbasePostTagHandler := httptransport.NewServer(\n\t\tpostTagEndpoint,\n\t\tdecodePostTagRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tpostTagHandler := Authenticate(true, redisPool, basePostTagHandler)\n\trouter.Methods(\"POST\").Path(\"/tag\").Handler(postTagHandler)\n\n\t// DELETE /tag\n\t// This reuses 'decodeTagRequest' as it is the same for GET and DELETE\n\tdeleteTagEndpoint := makeDeleteTagEndpoint(tagSvc)\n\tbaseDeleteTagHandler := httptransport.NewServer(\n\t\tdeleteTagEndpoint,\n\t\tdecodeTagRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tdeleteTagHandler := Authenticate(true, redisPool, baseDeleteTagHandler)\n\t// 'tag_id' is used in transport.go to grab the variable 'tag_id' from the path\n\trouter.Methods(\"DELETE\").Path(\"/tag/{tag_id}\").Handler(deleteTagHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.6068376302719116,
"alphanum_fraction": 0.6068376302719116,
"avg_line_length": 17,
"blob_id": "b633116700077abfb7a0441c68394207ca5f3e64",
"content_id": "a5cb28272610662d69fcfe7ba1c11b5ae2a833eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 468,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 26,
"path": "/images/tag/src/github.com/hipposareevil/tag/tag_structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n////////////////////////////\n// Structures\n\n// Single tag\n// JSON Response sent to client\ntype Tag struct {\n\tID int `json:\"id\"`\n\tName string `json:\"name\"`\n}\n\n// List of Tags\n// JSON Response sent to client\ntype Tags struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Tag `json:\"data\"`\n}\n\n// Tag to be created or updated\n// Used in POST or PUT\ntype NewTag struct {\n\tName string `json:\"name\"`\n}\n"
},
{
"alpha_fraction": 0.673928439617157,
"alphanum_fraction": 0.6752457022666931,
"avg_line_length": 21.897911071777344,
"blob_id": "3faaf587b974877b109f2f1cd94715bab098d4f2",
"content_id": "2b4f36381adc0be8b3b2809c6ab4bbca7968c970",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 9869,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 431,
"path": "/images/book/src/github.com/hipposareevil/book/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. bookRequest, postBookRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /book/\n// Make endpoint for getting books\nfunc makeGetBooksEndpoint(svc BookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a books specific request\n\t\treq := request.(getAllBooksRequest)\n\n\t\t// call actual service with data from the req\n\t\tbooks, err := svc.GetBooks(\n\t\t\treq.Bearer,\n\t\t\treq.Offset,\n\t\t\treq.Limit,\n\t\t\treq.Title,\n\t\t\treq.AuthorId,\n\t\t\treq.BookId,\n\t\t\treq.AuthorName)\n\t\treturn booksResponse{\n\t\t\tData: books,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /book/<book_id>\n// Make endpoint for getting single Book\nfunc makeGetBookEndpoint(svc BookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a bookRequest\n\t\treq := request.(getBookRequest)\n\n\t\t// call actual service with data from the req\n\t\tbook, err := svc.GetBook(req.Bearer, req.BookId)\n\t\treturn bookResponse{\n\t\t\tData: book,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// DELETE /book/<book_id>\n// Make endpoint for deleting single Book\nfunc makeDeleteBookEndpoint(svc BookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a bookRequest\n\t\treq := request.(deleteBookRequest)\n\n\t\t// call actual service with data from the req\n\t\terr := svc.DeleteBook(req.BookId)\n\t\treturn deleteBookResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /book/\n// Make endpoint for creating (via post) a book\nfunc makeCreateBookEndpoint(svc BookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a createBookRequest\n\t\treq, ok := request.(createBookRequest)\n\t\tif !ok {\n\t\t\tfmt.Println(\"Error creating 'createBookRequest'\")\n\t\t}\n\n\t\t// call actual service with data from the req\n\t\tnewBook, err := svc.CreateBook(\n\t\t\treq.Bearer,\n\t\t\treq.AuthorId,\n\t\t\treq.Description,\n\t\t\treq.FirstPublishedYear,\n\t\t\treq.GoodReadsUrl,\n\t\t\treq.ImageLarge,\n\t\t\treq.ImageMedium,\n\t\t\treq.ImageSmall,\n\t\t\treq.Isbns,\n\t\t\treq.OpenlibraryWorkUrl,\n\t\t\treq.Subjects,\n\t\t\treq.Title)\n\n\t\tfmt.Println(\"[book.transport] got error: \", err)\n\n\t\treturn createBookResponse{\n\t\t\tData: newBook,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// PUT /book/<book_id>\n// Make endpoint for updating (via PUT) a book\nfunc makeUpdateBookEndpoint(svc BookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a updateBookRequest\n\t\treq := request.(updateBookRequest)\n\n\t\t// call actual service with data from the req (putBookRequest)\n\t\tbook, err := svc.UpdateBook(\n\t\t\treq.Bearer,\n\t\t\treq.BookId,\n\t\t\treq.AuthorId,\n\t\t\treq.Description,\n\t\t\treq.FirstPublishedYear,\n\t\t\treq.GoodReadsUrl,\n\t\t\treq.ImageLarge,\n\t\t\treq.ImageMedium,\n\t\t\treq.ImageSmall,\n\t\t\treq.Isbns,\n\t\t\treq.OpenlibraryWorkUrl,\n\t\t\treq.Subjects,\n\t\t\treq.Title)\n\n\t\treturn updateBookResponse{\n\t\t\tData: book,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a getAllBooksRequest from the context and http.Request\n// /book/\n//\n// The getAllBooksRequest has 3 variables:\n// - Offset Offset into the query\n// - Limit Number of values to return\nfunc decodeGetAllBooksRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// get Title\n\ttitle := values.Get(\"title\")\n\n\t// get author name\n\tauthorName := values.Get(\"author_name\")\n\n\t// get AuthorId list\n\ttempAuthorIds := values[\"author_id\"]\n\ttemp := strings.Join(tempAuthorIds, \",\")\n\tauthorIds := splitCsvStringAsInts(temp)\n\n\t// Get BookId list\n\ttempIds := values[\"book_id\"]\n\ttemp = strings.Join(tempIds, \",\")\n\tbookIds := splitCsvStringAsInts(temp)\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t// Make request for all books\n\tvar request getAllBooksRequest\n\trequest = getAllBooksRequest{\n\t\tBearer: bearer,\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tTitle: title,\n\t\tAuthorId: authorIds,\n\t\tBookId: bookIds,\n\t\tAuthorName: authorName,\n\t}\n\n\treturn request, nil\n}\n\n// Create getBookRequest\n// /book/id\n//\nfunc decodeGetBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get book ID from gorilla handling of vars\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t// Make request for single book\n\tvar request getBookRequest\n\trequest = getBookRequest{\n\t\tBookId: bookId,\n\t\tBearer: bearer,\n\t}\n\n\treturn request, nil\n}\n\n// Create deleteBookRequest\n// DELETE /book/id\n//\n// The (delete) bookRequest has 2 variables:\n// - BookId ID of book taken from the path\nfunc decodeDeleteBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request to delete book\n\tvar request deleteBookRequest\n\trequest = deleteBookRequest{\n\t\tBookId: bookId,\n\t}\n\n\treturn request, nil\n}\n\n// Create createBookRequest\n// POST /book\nfunc decodeCreateBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar createBookRequest createBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&createBookRequest); err != nil {\n\t\tfmt.Println(\"Error decoding book request: \", err)\n\t\treturn nil, err\n\t}\n\n\tcreateBookRequest.Bearer = bearer\n\n\treturn createBookRequest, nil\n}\n\n// Create updateBookRequest\n// PUT /book/id\nfunc decodeUpdateBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar updateBook updateBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&updateBook); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set bookid on update request\n\tupdateBook.BookId = bookId\n\n\tupdateBook.Bearer = bearer\n\n\treturn updateBook, nil\n}\n\n// Returns the bearer id without \"Bearer \"\nfunc parseBearer(r *http.Request) string {\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\treturn realBearer\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n// Decode the 'book_id' from the request.\n//\n//// Returns the book id\nfunc parseBookId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'book_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"book_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar bookId int\n\tif id != \"\" {\n\t\tbookId, _ = strconv.Atoi(id)\n\t}\n\n\treturn bookId, nil\n}\n\n////////////\n// Split a CSV string into array of ints\nfunc splitCsvStringAsInts(csv string) []int {\n\tvar newArray []int\n\n\tif len(csv) > 0 {\n\t\tstringArray := strings.Split(csv, \",\")\n\n\t\t// Convert each string to int\n\t\tfor _, element := range stringArray {\n\t\t\ttemp, _ := strconv.Atoi(element)\n\t\t\tnewArray = append(newArray, temp)\n\t\t}\n\t}\n\n\treturn newArray\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.6773892641067505,
"alphanum_fraction": 0.6773892641067505,
"avg_line_length": 17.33333396911621,
"blob_id": "582835f8b94097b70a13621148644debdb63ba4d",
"content_id": "7857c98bf97de714b4267103225baccbae7cb2d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 2145,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 117,
"path": "/images.java/query/src/main/java/wpff/result/QueryAuthorResult.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package wpff.result;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * Bean representing an Author. \n * This is filled w/ data from the OpenLibraryAuthor\n *\n */\npublic class QueryAuthorResult {\n\n\tprivate String name;\n\tprivate String olKey;\n\tprivate List<String> subjects = new ArrayList<String>();\n\tprivate String birthDate;\n\tprivate String imageSmall;\n\tprivate String imageMedium;\n\tprivate String imageLarge;\n\t\n\tpublic String getName() {\n\t\treturn name;\n\t}\n\n\tpublic void setName(String name) {\n\t\tthis.name = name;\n\t}\n\n\tpublic String getOlKey() {\n\t\treturn olKey;\n\t}\n\n\tpublic void setOlKey(String key) {\n\t\tthis.olKey = key;\n\t}\n\n\tpublic List<String> getSubjects() {\n\t\treturn subjects;\n\t}\n\n\tpublic void setSubjects(List<String> subjects) {\n\t\tif (subjects != null)\n\t\tthis.subjects.addAll(subjects);\n\t}\n\n\tpublic String getBirthDate() {\n\t\treturn birthDate;\n\t}\n\n\tpublic void setBirthDate(String birthDate) {\n\t\tthis.birthDate = birthDate;\n\t}\n\n\t\n\t\n\t/* (non-Javadoc)\n\t * @see java.lang.Object#toString()\n\t */\n\t@Override\n\tpublic String toString() {\n\t\tStringBuilder builder = new StringBuilder();\n\t\tbuilder.append(\"AuthorResult [name=\");\n\t\tbuilder.append(name);\n\t\tbuilder.append(\", key=\");\n\t\tbuilder.append(olKey);\n\t\tbuilder.append(\", subjects=\");\n\t\tbuilder.append(subjects);\n\t\tbuilder.append(\", birthDate=\");\n\t\tbuilder.append(birthDate);\n\t\tbuilder.append(\"]\");\n\t\treturn builder.toString();\n\t}\n\n /**\n * @return the imageSmall\n */\n public String getImageSmall() {\n return imageSmall;\n }\n\n /**\n * @param imageSmall the imageSmall to set\n */\n public void setImageSmall(String imageSmall) {\n this.imageSmall = imageSmall;\n }\n\n /**\n * @return the imageMedium\n */\n public String getImageMedium() {\n return imageMedium;\n }\n\n /**\n * @param imageMedium the imageMedium to set\n */\n public void setImageMedium(String imageMedium) {\n this.imageMedium = imageMedium;\n }\n\n /**\n * @return the imageLarge\n */\n public String getImageLarge() {\n return imageLarge;\n }\n\n /**\n * @param imageLarge the imageLarge to set\n */\n public void setImageLarge(String imageLarge) {\n this.imageLarge = imageLarge;\n }\n\n\n}\n"
},
{
"alpha_fraction": 0.6647639870643616,
"alphanum_fraction": 0.6698133945465088,
"avg_line_length": 24.446928024291992,
"blob_id": "5ab85e3748f6abd561d54b4546fb69b276be3c68",
"content_id": "540e01a82ea084886865cb53f7806ff7522f6cc0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4555,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 179,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n/////////////////////////\n// Authorize service\n// Supports:\n// - GET at /authorize to validate authorization header (200 or 401)\n// - POST at /authorize to create a new token\n//\n/////////////////////////\n\nimport (\n\t\"fmt\"\n\n\t// password encryption\n\t\"golang.org/x/crypto/bcrypt\"\n\n\t// UUID\n\t\"github.com/satori/go.uuid\"\n\n\t_ \"errors\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n)\n\nconst BEARER = \"Bearer\"\n\n// Service interface exposed to clients\ntype AuthorizeService interface {\n\t// ValidateToken: bearer\n\tValidateToken(string) error\n\n\t// CreateToken: name, password\n\tCreateToken(string, string) (Authorization, error)\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\n// - redisPool Redis pool\ntype authorizeService struct {\n\tmysqlDb *sql.DB\n\tredisPool *pool.Pool\n}\n\n//////////\n// METHODS on authorizationservice\n\n////////////////\n// ValidateToken\n//\n// what:\n// Validates if the incoming authorization/bearer is OK.\n// This will check the bearer string against what is in redis.\n//\n// params:\n// bearer: Authorization bearer\n//\n// returns:\n// error if bearer string is invalid\nfunc (theService authorizeService) ValidateToken(bearer string) error {\n\t/////////////////\n\t// Redis\n\tconn, err := theService.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Got error when calling redisPool.Get: \", err)\n\t\treturn ErrServerError\n\t}\n\tdefer theService.redisPool.Put(conn)\n\n\t// Check the bearer string credentials\n\n\t// Key to query in redis\n\tredisHashName := \"user:\" + bearer\n\n\t// Check redis. If it is null, authentication failed\n\t_, err = conn.Cmd(\"HGET\", redisHashName, \"name\").Str()\n\n\tif err != nil {\n\t\t// No authorization -> send a 401\n\t\treturn ErrUnauthorized\n\t}\n\n\t// Authorization is OK, send a 200\n\treturn nil\n}\n\n////////////////\n// CreateToken\n//\n// what:\n// Creates a new entry in redis and returns a token matching that entry.\n//\n// params:\n// userName: Name of user\n// password: Password\nfunc (theService authorizeService) CreateToken(userName string, password string) (Authorization, error) {\n\t/////////////////\n\t// Redis\n\tconn, err := theService.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Got error when calling redisPool.Get: \", err)\n\t\treturn Authorization{}, ErrServerError\n\t}\n\tdefer theService.redisPool.Put(conn)\n\n\t////////////////////\n\t// Mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"Got error when calling mysql.Ping: \", err)\n\t\treturn Authorization{}, ErrServerError\n\t}\n\n\t// Find user in database\n\tvar user User\n\t// Can the DB info into 'user' variable\n\terr = theService.mysqlDb.QueryRow(\"SELECT user_id, name, user_group, password FROM user WHERE name = ?\", userName).\n\t\tScan(&user.UserId, &user.Name, &user.UserGroup, &user.Password)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\tfmt.Println(\"No matching user in db\")\n\t\treturn Authorization{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"Got error from select: \", err)\n\t\treturn Authorization{}, ErrServerError\n\tdefault:\n\t}\n\n\t// Verify the password now\n\tpasswordInDatabase := []byte(user.Password)\n\tpasswordFromUser := []byte(password)\n\n\tisEncryptedError := bcrypt.CompareHashAndPassword(passwordInDatabase, passwordFromUser)\n\tif isEncryptedError != nil {\n\t\t// Passwords are NOT the same\n\t\tfmt.Println(\"Incoming password for '\" + userName + \"' is incorrect.\")\n\t\treturn Authorization{}, ErrUnauthorized\n\t}\n\n\t// User is ok. create uuid and token\n\ttokenUuid := uuid.Must(uuid.NewV4(), nil)\n\ttoken := tokenUuid.String()\n\tfullToken := BEARER + \" \" + token\n\tredisKey := \"user:\" + token\n\n\t// Put data into redis\n\tif conn.Cmd(\"HSET\", redisKey, \"name\", userName).Err != nil {\n\t\tfmt.Println(\"Unable to set name '\" + userName + \"' in redis.\")\n\t\treturn Authorization{}, ErrServerError\n\t}\n\tif conn.Cmd(\"HSET\", redisKey, \"id\", user.UserId).Err != nil {\n\t\tfmt.Println(\"Unable to set id '\", user.UserId, \"' in redis.\")\n\t\treturn Authorization{}, ErrServerError\n\t}\n\tif conn.Cmd(\"HSET\", redisKey, \"group\", user.UserGroup).Err != nil {\n\t\tfmt.Println(\"Unable to set group in redis for '\" + userName + \"'.\")\n\t\treturn Authorization{}, ErrServerError\n\t}\n\t// expire data in 3 weeks\n\tif conn.Cmd(\"expire\", redisKey, 60*60*24*7 * 3).Err != nil {\n\t\tfmt.Println(\"Unable to set expiration time for '\" + userName + \"' in redis.\")\n\t\treturn Authorization{}, ErrServerError\n\t}\n\n\treturnValue := Authorization{\n\t\tToken: fullToken,\n\t\tUserId: user.UserId,\n\t\tGroupName: user.UserGroup,\n\t}\n\n\treturn returnValue, nil\n}\n"
},
{
"alpha_fraction": 0.7478421926498413,
"alphanum_fraction": 0.7527743577957153,
"avg_line_length": 44,
"blob_id": "e02633403d125369871a24dea51e0b75a2b3a93e",
"content_id": "e62fcf7b63580420f89b7db5e4a7a06bf8d14a55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1622,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 36,
"path": "/images/tag/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Tag Microservice\n\n## Introduction\n\n**/tag** is a microservice for creating, listing, updating and deleting Tags from the database. A tag can have any name and are visable among all users. The *delete* and *put* operations are only available to the *admin* user.\n\n**Note:** This version is implemented in Go instead of Java.\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for a Tag\nA Tag entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the Tag. This is used by the *user books* to map tags to a users individual bok.\nname | Name of the tag. Examples: e-book, sci-fi, mystery, to-read.\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n\n\n"
},
{
"alpha_fraction": 0.6153212785720825,
"alphanum_fraction": 0.6161449551582336,
"avg_line_length": 16.509614944458008,
"blob_id": "89545626858f823a9d6e43e5bc5a91722a271d65",
"content_id": "189092a476fb7f9cbd8c34a0f380f11ab3456fea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 3642,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 208,
"path": "/images.java/user_book/src/main/java/com/wpff/resources/BookBean.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.resources;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\n\n/**\n * Bean representing a Book from the 'book' webservice.\n * \n * Contains the following:\n * \n * <pre>\n{\n \"authorId\": 0,\n \"firstPublishedYear\": 0,\n \"title\": \"string\",\n \"isbns\": [\n \"string\"\n ],\n \"subjects\": [\n \"string\"\n ],\n \"description\": \"string\",\n \"openlibraryWorkUrl\": \"string\",\n \"imageSmall\": \"string\",\n \"imageMedium\": \"string\",\n \"imageLarge\": \"string\",\n \"id\": 0,\n \"authorName\": \"string\"\n}\n * </pre>\n */\n@JsonIgnoreProperties(ignoreUnknown = true)\npublic class BookBean {\n \n /**\n * Title of Book\n */\n private String title;\n \n /**\n * book_id\n */\n private int id;\n \n /**\n * Year of book\n */\n private int firstPublishedYear;\n \n /**\n * Author of book\n */\n private String authorName;\n \n /**\n * Author id\n */\n \tprivate int authorId;\n\n \tprivate String imageSmall;\n\n \tprivate String imageMedium;\n\n \tprivate String imageLarge;\n\n \t\n\n /**\n * @return the title\n */\n public String getTitle() {\n return title;\n }\n\n /**\n * @param title\n * the title to set\n */\n public void setTitle(String title) {\n this.title = title;\n }\n\n /**\n * @return the id\n */\n public int getId() {\n return id;\n }\n\n /**\n * @param id the id to set\n */\n public void setId(int id) {\n this.id = id;\n }\n\n\n\n /**\n * @return the author\n */\n public String getAuthorName() {\n return authorName;\n }\n\n /**\n * @param author the author to set\n */\n public void setAuthorName(String author) {\n this.authorName = author;\n }\n\n /**\n * @return the firstPublishedYear\n */\n public int getFirstPublishedYear() {\n return firstPublishedYear;\n }\n\n /**\n * @param firstPublishedYear the firstPublishedYear to set\n */\n public void setFirstPublishedYear(int firstPublishedYear) {\n this.firstPublishedYear = firstPublishedYear;\n }\n\n /**\n * @return the authorId\n */\n public int getAuthorId() {\n return authorId;\n }\n\n /**\n * @param authorId the authorId to set\n */\n public void setAuthorId(int authorId) {\n this.authorId = authorId;\n }\n\n /**\n * @return the imageSmall\n */\n public String getImageSmall() {\n return imageSmall;\n }\n\n /**\n * @param imageSmall the imageSmall to set\n */\n public void setImageSmall(String imageSmall) {\n this.imageSmall = imageSmall;\n }\n\n /**\n * @return the imageMedium\n */\n public String getImageMedium() {\n return imageMedium;\n }\n\n /**\n * @param imageMedium the imageMedium to set\n */\n public void setImageMedium(String imageMedium) {\n this.imageMedium = imageMedium;\n }\n\n /**\n * @return the imageLarge\n */\n public String getImageLarge() {\n return imageLarge;\n }\n\n /**\n * @param imageLarge the imageLarge to set\n */\n public void setImageLarge(String imageLarge) {\n this.imageLarge = imageLarge;\n }\n\n /* (non-Javadoc)\n * @see java.lang.Object#toString()\n */\n @Override\n public String toString() {\n StringBuilder builder = new StringBuilder();\n builder.append(\"BookBean [title=\");\n builder.append(title);\n builder.append(\", id=\");\n builder.append(id);\n builder.append(\", firstPublishedYear=\");\n builder.append(firstPublishedYear);\n builder.append(\", authorName=\");\n builder.append(authorName);\n builder.append(\", authorId=\");\n builder.append(authorId);\n builder.append(\", imageSmall=\");\n builder.append(imageSmall);\n builder.append(\", imageMedium=\");\n builder.append(imageMedium);\n builder.append(\", imageLarge=\");\n builder.append(imageLarge);\n builder.append(\"]\");\n return builder.toString();\n }\n\n}\n"
},
{
"alpha_fraction": 0.654048502445221,
"alphanum_fraction": 0.655698835849762,
"avg_line_length": 22.531553268432617,
"blob_id": "a475a1b33880572d0456f19652117f64c6ea7330",
"content_id": "1197705b120a136ba22058f2a671a26883fd0a61",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 9695,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 412,
"path": "/images/tag/src/github.com/hipposareevil/tag/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. tagRequest, postTagRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /tag/\n// Make endpoint for getting TAGS\nfunc makeTagsEndpoint(svc TagService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a tagsRequest\n\t\treq := request.(tagsRequest)\n\n\t\t// call actual service with data from the req (tagsRequest)\n\t\ttags, err := svc.GetTags(req.Bearer, req.Offset, req.Limit)\n\t\treturn tagsResponse{\n\t\t\tData: tags,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /tag/\n// Make endpoint for creating (via post) a tag\nfunc makePostTagEndpoint(svc TagService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a tagsRequest\n\t\treq := request.(postTagRequest)\n\n\t\t// call actual service with data from the req (postTagRequest)\n\t\ttag, err := svc.CreateTag(req.Bearer, req.NewTagName)\n\t\treturn tagResponse{\n\t\t\tData: tag,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// PUT /tag/\n// Make endpoint for updating (via put) a tag\nfunc makePutTagEndpoint(svc TagService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a putTagRequest\n\t\treq := request.(putTagRequest)\n\n\t\t// call actual service with data from the req (putTagRequest)\n\t\terr := svc.UpdateTag(req.Bearer, req.NewTagName, req.TagId)\n\t\treturn putTagResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /tag/<tag_id>\n// Make endpoint for getting single TAG\nfunc makeTagEndpoint(svc TagService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a tagRequest\n\t\treq := request.(tagRequest)\n\n\t\t// call actual service with data from the req (tagsRequest)\n\t\ttag, err := svc.GetTag(req.Bearer, req.TagId)\n\t\treturn tagResponse{\n\t\t\tData: tag,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// DELETE /tag/<tag_id>\n// Make endpoint for deleting single TAG\nfunc makeDeleteTagEndpoint(svc TagService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a tagRequest\n\t\t// This re-uses the normal tagRequest as the parameters are identical\n\t\treq := request.(tagRequest)\n\n\t\t// call actual service with data from the req (tagsRequest)\n\t\terr := svc.DeleteTag(req.Bearer, req.TagId)\n\t\treturn deleteTagResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Structures\n\n// GET request for single tag, contains:\n// - tag_id\n// - bearer\ntype tagRequest struct {\n\tTagId int `json:\"tag_id\"`\n\tBearer string `json:\"bearer\"`\n}\n\n// DELETE request for single tag, contains:\n// - tag_id\n// - bearer\ntype deleteTagRequest struct {\n\tTagId int `json:\"tag_id\"`\n\tBearer string `json:\"bearer\"`\n}\n\n// GET request for tags, contains:\n// - offset\n// - limit\n// - bearer\ntype tagsRequest struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tBearer string `json:\"bearer\"`\n}\n\n// This contains header information like:\n// - Bearer\n// - New tag name\ntype postTagRequest struct {\n\tBearer string `json:\"bearer\"`\n\tNewTagName string `json:\"String\"`\n}\n\n// PUT request, contains:\n// - Bearer\n// - Updated tag name\n// - tag id\ntype putTagRequest struct {\n\tTagId int `json:\"tag_id\"`\n\tBearer string `json:\"bearer\"`\n\tNewTagName string `json:\"String\"`\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a tagRequest\n// (used by getTag and deleteTag\n// /TAG/<tag_id>\n//\n// The tagRequest has 2 variables:\n// - TagId ID of tag taken from the path\n// - Bearer Authorization token\nfunc decodeTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse the URL\n\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'tag_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"tag_id\"]\n\tif !ok {\n\t\treturn nil, ErrBadRouting\n\t}\n\n\tvar tagId int\n\tif id != \"\" {\n\t\ttagId, _ = strconv.Atoi(id)\n\t}\n\n\t///////////////////\n\t// parse headers\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\t// Make request for single tag\n\tvar request tagRequest\n\trequest = tagRequest{\n\t\tBearer: realBearer,\n\t\tTagId: tagId,\n\t}\n\n\treturn request, nil\n}\n\n// Create a postTagRequest\n// /TAG\n//\n// The postTagRequest has 2 variables:\n// - Name Name of tag to create\n// - Bearer Authorization token\nfunc decodePostTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse body\n\n\t// Decode the incoming JSON into a NewTag struct\n\n\tvar newTag NewTag\n\tif err := json.NewDecoder(r.Body).Decode(&newTag); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Got the newtag info\n\tfmt.Println(\"Got new tag name: \", newTag.Name)\n\n\t///////////////////\n\t// Parse headers\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\t// Make request for tags\n\tvar request postTagRequest\n\trequest = postTagRequest{\n\t\tBearer: realBearer,\n\t\tNewTagName: newTag.Name,\n\t}\n\n\treturn request, nil\n}\n\n// Create a PUT tagRequest\n// /TAG/<tag_id>\n//\n// The tagRequest has 2 variables:\n// - TagId ID of tag taken from the path\n// - Bearer Authorization token\n// - Name Name of tag to update\nfunc decodePutTagRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse the URL\n\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'tag_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"tag_id\"]\n\tif !ok {\n\t\treturn nil, ErrBadRouting\n\t}\n\n\t// Get tag id\n\tvar tagId int\n\tif id != \"\" {\n\t\ttagId, _ = strconv.Atoi(id)\n\t}\n\n\t///////////////////\n\t// Parse body\n\tvar updatedTag NewTag\n\tif err := json.NewDecoder(r.Body).Decode(&updatedTag); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Got the newtag info\n\tfmt.Println(\"Got new updated name: \", updatedTag.Name)\n\n\t///////////////////\n\t// parse headers\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\t// Make request for single tag\n\tvar request putTagRequest\n\trequest = putTagRequest{\n\t\tBearer: realBearer,\n\t\tTagId: tagId,\n\t\tNewTagName: updatedTag.Name,\n\t}\n\n\treturn request, nil\n}\n\n// Create a tagsRequest from the context and http.Request\n// /TAG/\n//\n// The tagsRequest has 3 variables:\n// - Offset Offset into the query\n// - Limit Number of values to return\n// - Bearer Authorization token\nfunc decodeTagsRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\t///////////////////\n\t// parse headers\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\t// Make request for tags\n\tvar request tagsRequest\n\trequest = tagsRequest{\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tBearer: realBearer,\n\t}\n\n\treturn request, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\tfmt.Println(\"encoding tag response.\")\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.6257787942886353,
"alphanum_fraction": 0.6296728849411011,
"avg_line_length": 28.18181800842285,
"blob_id": "fc82cd21ac8ac60eac127d8a840d84ac787812ea",
"content_id": "c104c2ea046355d2d362e670127b631357983d7e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2568,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 88,
"path": "/images/frontend/build_prod.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n####################\n#\n# Build the dev and production versions of frontend.\n#\n# 1- Create dev version.\n# This relies on the content/mybooks directory being mounted into /scratch in the container\n# \n# 2- Run npm in a container that is mounted to content/mybooks\n# This will install everything for NPM and run a build,\n# which results in the content/mybooks/dist directory\n# \n# 3- Create prod version\n# This copies the content/mybooks/dist directory into Nginx's html dir.\n# \n####################\n\n# our real directory (so this can be called from outside directories)\nour_directory=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n# image we build\nproject=$(cat $our_directory/webservice.name | xargs)\nproject_version=$(cat $our_directory/../webservice.version | xargs)\nbase_image_name=\"books.${project}\"\nimage_name=\"${base_image_name}:${project_version}\"\n\ndev_image_name=\"${image_name}-dev\"\n\n# keep track of time\nthen=$(date +%s)\necho \"[[Building Docker image '$image_name']]\"\n\n## 1\n# build dev first\necho \"Building the ${dev_image_name} version first\"\n$our_directory/build_dev.sh\n\n## 2\n# Run npm run build in container\necho \"Now building $image_name\"\n# make directory that we own so it's not owned by root\nmkdir -p $our_directory/content/mybooks/dist\ndocker run --rm -it -v $our_directory/content/mybooks:/scratch ${dev_image_name} npm install \ndocker run --rm -it -v $our_directory/content/mybooks:/scratch ${dev_image_name} npm run build\n\n# Get docker to skip the cache:\nthedate=$(date)\necho \"$thedate\" > ${our_directory}/content/mybooks/dist/build.time\n\n## 3\n# set docker image labels\nBUILD_TIME=$(date +%Y-%m-%dT%H:%M:%S%Z)\nVERSION_TAG=${project_version}\n\n# build\ndocker build \\\n -f ${our_directory}/Dockerfile.prod \\\n -t ${image_name} \\\n --build-arg BUILD_TIME=${BUILD_TIME} \\\n --build-arg VERSION=${VERSION_TAG} \\\n \"$our_directory\" \nbuild_result=$?\n\n# check result\nnow=$(date +%s)\nelapsed=$(expr $now - $then)\n\nif [ $build_result -eq 0 ]; then\n echo \"\"\n echo \"[[Built $image_name in $elapsed second(s)]]\"\n\n # tag as latest\n output=$(docker tag ${image_name} ${base_image_name}:latest)\n tag_result=$?\n if [ $tag_result -eq 0 ]; then\n echo \"[[Tagged \\\"${image_name}\\\" as \\\"${base_image_name}:latest\\\"]]\"\n else\n echo \"[[Unable to tag image as latest!!!!]]\"\n fi\n\nelse\n echo \"\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n echo \"Unable to build Docker image for $image_name\"\n echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n exit 1\nfi\n"
},
{
"alpha_fraction": 0.5660247802734375,
"alphanum_fraction": 0.5660247802734375,
"avg_line_length": 21.369230270385742,
"blob_id": "763832af0bcaf3ee156cb1d8679ab058b44c39ba",
"content_id": "92f99d8b474fc242601949d541d7af293534862c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1454,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 65,
"path": "/images/review/src/github.com/hipposareevil/review/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Request Structures\n\n// GET request for all user book, contains:\ntype getReviewsRequest struct {\n\tBearer string `json:\"bearer\"`\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tBookId int `json:\"book_id\"`\n}\n\n//////////////////////////////////\n// Response structures\n//////////////////////////////////\n\ntype Review struct {\n\tBookId int `json:\"bookId\"`\n\tRating bool `json:\"rating\"`\n\tTags []string `json:\"tags\"`\n\tReview string `json:\"review\"`\n\tUserName string `json:\"userName\"`\n\tUserId int `json:\"UserId\"`\n}\n\ntype Reviews struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Review `json:\"data\"`\n}\n\n///////////////////////\n// Structures from other services\n//\n// These will be truncated versions from the other services\n// as we don't want every field\n///////////////////////\n\ntype User struct {\n\tId int `json:\"id\"`\n\tName string `json:\"name\"`\n}\n\ntype Users struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []User `json:\"data\"`\n}\n\ntype UserBook struct {\n\tBookId int `json:\"bookId\"`\n\tRating bool `json:\"rating\"`\n\tTags []string `json:\"tags\"`\n\tReview string `json:\"review\"`\n}\n\ntype UserBooks struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []UserBook `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.6753122210502625,
"alphanum_fraction": 0.6817163228988647,
"avg_line_length": 27.91666603088379,
"blob_id": "1f5236baa663e94fce00c33a497aed223da70d97",
"content_id": "105a1fd9667123449f2ac2f17d4f268d64002f5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 3123,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 108,
"path": "/images/tag/src/github.com/hipposareevil/tag/authentication.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Authentication module for authenticating incoming REST calls against the\n// redis DB using the 'authorization' Header that should contain 'Bearer: qerty-1234'\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strings\"\n\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\t_ \"github.com/mediocregopher/radix.v2/redis\"\n)\n\n// Authenticate a handler. This will return a 401 to the client if authentication has failed\n//\n// params:\n// onlyAdminGroup - true if only users belonging to the admin group can access the resource\n// false if any authenticated user can access the resource\n// redisPool - Pool to get redis connection\n// next - Handler to call if the authentication is successful\n//\n// return:\n// a http.Handler\nfunc Authenticate(onlyAdminGroup bool, redisPool *pool.Pool, next http.Handler) http.Handler {\n\t// return new handler that wraps 'next'\n\treturn http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {\n\n\t\t/////////////////\n\t\t// Redis\n\t\tconn, err := redisPool.Get()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error when calling pool.Get: \", err)\n\t\t\twriter.WriteHeader(http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tdefer redisPool.Put(conn)\n\n\t\t///////////////////\n\t\t// Parse headers\n\t\tvar bearer string\n\t\tfullBearerString := request.Header.Get(\"authorization\")\n\n\t\t// Strip the 'Bearer ' from header\n\t\tif strings.HasPrefix(fullBearerString, \"Bearer \") {\n\t\t\tbearer = strings.Replace(fullBearerString, \"Bearer \", \"\", 1)\n\t\t}\n\n\t\t// Key to query in redis\n\t\tredisHashName := \"user:\" + bearer\n\n\t\t// Check redis. If it is null, authentication failed\n\t\tuserName, err := conn.Cmd(\"HGET\", redisHashName, \"name\").Str()\n\n\t\tfmt.Println(\"Got username: \", userName)\n\n\t\tif err != nil {\n\t\t\t// No authorization -> send a 401\n\t\t\twriteUnauthorizedError(writer)\n\t\t} else {\n\t\t\t// If this endpoint is protected by group check for 'admin'\n\t\t\tif onlyAdminGroup {\n\t\t\t\t// Check for 'admin' group\n\t\t\t\tgroupName, err := conn.Cmd(\"HGET\", redisHashName, \"group\").Str()\n\t\t\t\tfmt.Println(\"Got groupName: \", groupName)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\t// No group authorization -> send a 401\n\t\t\t\t\twriteUnauthorizedError(writer)\n\t\t\t\t} else {\n\t\t\t\t\t// is group 'admin' ?\n\t\t\t\t\tif strings.Compare(groupName, \"admin\") == 0 {\n\t\t\t\t\t\t// admin group\n\t\t\t\t\t\tnext.ServeHTTP(writer, request)\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// not admin group\n\t\t\t\t\t\t// Incorrect group authorization -> send a 401\n\t\t\t\t\t\twriteUnauthorizedError(writer)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// done checking admin group\n\t\t\t} else {\n\t\t\t\t// Endpoint doesn't need to verify 'admin' group\n\n\t\t\t\t// User is authorized, continue as normal\n\t\t\t\tnext.ServeHTTP(writer, request)\n\t\t\t}\n\t\t}\n\t})\n}\n\n// Write unauthorized error to the writer\nfunc writeUnauthorizedError(writer http.ResponseWriter) {\n\t// send 401\n\twriter.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\twriter.WriteHeader(http.StatusUnauthorized)\n\n\t// Create JSON for the output\n\tjsonError := ErrorResponse{\n\t\tCode: http.StatusUnauthorized,\n\t\tMessage: \"Must supply valid Authorization header. Authenticate at /auth/token\",\n\t}\n\n\t// Write out the JSON error\n\tjson.NewEncoder(writer).Encode(jsonError)\n\n}\n"
},
{
"alpha_fraction": 0.5818865895271301,
"alphanum_fraction": 0.5818865895271301,
"avg_line_length": 27.09756088256836,
"blob_id": "c3cc671a14d5dad88d03209cd5318e58ce2c2adf",
"content_id": "c5cea8c207208e51473e430334234b39c9fbe58a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 3456,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 123,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\nimport (\n\t\"time\"\n)\n\n///////////////////\n// Structures\n\n// GET request for all user book, contains:\ntype getAllUserBooksRequest struct {\n\tBearer string `json:\"bearer\"`\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tUserId int `json:\"user_id\"`\n\tTitle string `json:\"title\"`\n\tBookId int `json:\"book_id\"`\n\tTag []string `json:\"tag\"`\n}\n\n// GET request for single user book\ntype getUserBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tUserId int `json:\"user_id\"`\n\tUserBookId int `json:\"user_book_id\"`\n}\n\n// DELETE request for single user book\ntype deleteUserBookRequest struct {\n\tUserId int `json:\"user_id\"`\n\tUserBookId int `json:\"user_book_id\"`\n}\n\n// POST request to create user book\ntype createUserBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tUserId int `json:\"user_id\"`\n\tBookId int `json:\"bookId\"`\n\tRating bool `json:\"rating\"`\n\tTags []string `json:\"tags\"`\n\tReview string `json:\"review\"`\n}\n\n// PUT request to update user book\ntype updateUserBookRequest struct {\n\tBearer string `json:\"bearer\"`\n\tUserId int `json:\"user_id\"`\n\tUserBookId int `json:\"user_book_id\"`\n\tBookId int `json:\"bookId\"`\n\tRating *bool `json:\"rating\"`\n\tTags *[]string `json:\"tags\"`\n\tReview *string `json:\"review\"`\n}\n\n//////////////////////////////////\n// Response structures\n//////////////////////////////////\n\ntype UserBook struct {\n\tBookId int `json:\"bookId\"`\n\tRating bool `json:\"rating\"`\n\tTags []string `json:\"tags\"`\n\tUserId int `json:\"userId\"`\n\tUserBookId int `json:\"userBookId\"`\n\tDateAdded time.Time `json:\"dateAdded\"`\n\tTitle string `json:\"title\"`\n\tReview string `json:\"review\"`\n\tAuthorName string `json:\"authorName\"`\n\tAuthorId int `json:\"authorId\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n}\n\ntype UserBooks struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []UserBook `json:\"data\"`\n}\n\n//////////////////////////////////////\n// json structures from calling other services\n//\n\ntype Tag struct {\n\tID int `json:\"id\"`\n\tName string `json:\"name\"`\n}\n\n// List of Tags\n// JSON Response sent to client\ntype Tags struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Tag `json:\"data\"`\n}\n\n// Single book\ntype Book struct {\n\tAuthorId int `json:\"authorId\"`\n\tAuthorName string `json:\"authorName\"`\n\tDescription string `json:\"description\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tId int `json:\"id\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tIsbns []string `json:\"isbns\"`\n\tOpenlibraryWorkUrl string `json:\"openlibraryWorkUrl\"`\n\tSubjects []string `json:\"subjects\"`\n\tTitle string `json:\"title\"`\n}\n\ntype Books struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Book `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.5735878944396973,
"alphanum_fraction": 0.5783611536026001,
"avg_line_length": 14.911392211914062,
"blob_id": "59e4ea83601e649f18769317335410dcf6262a96",
"content_id": "b025763fedb409c5b50458521eab3903b169ba55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 1257,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 79,
"path": "/images.java/mybooks_common/src/main/java/com/wpff/common/result/Segment.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.common.result;\n\npublic class Segment {\n \n int DEFAULT_OFFSET = 0;\n int DEFAULT_LIMIT = 20;\n \n /**\n * Offset (start) of data segment\n */\n Integer offset;\n \n /**\n * Limit (length) of data segment\n */\n Integer limit;\n \n /**\n * Number of total datum\n */\n Long totalLength = 0L;\n\n public Segment(Integer offset, Integer limit) {\n super();\n if (offset == null || offset < 0) {\n offset = DEFAULT_OFFSET;\n }\n if (limit == null || limit < 0) {\n limit = DEFAULT_LIMIT;\n }\n\n this.offset = offset;\n this.limit = limit;\n }\n\n /**\n * @return the offset\n */\n public Integer getOffset() {\n return offset;\n }\n\n /**\n * @param offset the offset to set\n */\n public void setOffset(Integer offset) {\n this.offset = offset;\n }\n\n\n /**\n * @return the limit\n */\n public Integer getLimit() {\n return limit;\n }\n\n /**\n * @param limit the limit to set\n */\n public void setLimit(Integer limit) {\n this.limit = limit;\n }\n\n /**\n * @return the totalLength\n */\n public Long getTotalLength() {\n return totalLength;\n }\n\n /**\n * @param totalLength the totalLength to set\n */\n public void setTotalLength(Long totalLength) {\n this.totalLength = totalLength;\n }\n\n}\n"
},
{
"alpha_fraction": 0.5555555820465088,
"alphanum_fraction": 0.5555555820465088,
"avg_line_length": 13.351351737976074,
"blob_id": "2ce4dcb8ff477ea2da31429c55196aa3ada0b2ad",
"content_id": "5e036486b5f3ec937c7942cfebc1755ebdd90575",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 531,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 37,
"path": "/images.java/query/src/main/java/wpff/openlibrary/beans/Description.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package wpff.openlibrary.beans;\n\npublic class Description {\n\n public String type;\n public String value;\n\n /**\n * @return the type\n */\n public String getType() {\n return type;\n }\n\n /**\n * @param type\n * the type to set\n */\n public void setType(String type) {\n this.type = type;\n }\n\n /**\n * @return the value\n */\n public String getValue() {\n return value;\n }\n\n /**\n * @param value\n * the value to set\n */\n public void setValue(String value) {\n this.value = value;\n }\n}\n"
},
{
"alpha_fraction": 0.6161026954650879,
"alphanum_fraction": 0.6161026954650879,
"avg_line_length": 17.630434036254883,
"blob_id": "d829c794e8c8d915aa55878753690f2ab9903dc9",
"content_id": "876060ab9b2de9a1ccf13a0d4f4566c3854a234a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 857,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 46,
"path": "/images.java/author/src/main/java/com/wpff/result/AuthorResult.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.result;\n\nimport com.wpff.query.AuthorQuery;\n\n/**\n * Bean representing an author that is returned to the caller. This is used\n * instead of the core.Author class so we can control the structure of the\n * format more closely.\n *\n */\npublic class AuthorResult extends AuthorQuery {\n private int id;\n\n /**\n * @return the id\n */\n public int getId() {\n return id;\n }\n\n /**\n * @param id\n * the id to set\n */\n public void setId(int id) {\n this.id = id;\n }\n\n /*\n * (non-Javadoc)\n * \n * @see java.lang.Object#toString()\n */\n @Override\n public String toString() {\n StringBuilder builder = new StringBuilder();\n builder.append(\"AuthorResult [id=\");\n builder.append(id);\n builder.append(\", \");\n builder.append(super.toString());\n builder.append(\"]\");\n return builder.toString();\n }\n\n \n}\n"
},
{
"alpha_fraction": 0.5953141450881958,
"alphanum_fraction": 0.6001064777374268,
"avg_line_length": 25.45070457458496,
"blob_id": "02b1eb36719c53ca847b11ba5af06384f64df0c1",
"content_id": "4a5d1455d0ea26fa87806a3053adf11dec17c0d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1878,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 71,
"path": "/tagAndPushToRepo.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n###################\n# Script to tag all 'books' images and push them to a repository\n# \n###################\n\n########\n# Set up variables\n#\n########\ninitialize_variables() {\n OUR_DIRECTORY=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" >/dev/null 2>&1 && pwd )\"\n\n # load in metadata from the .env file\n . ${OUR_DIRECTORY}/.env\n\n if [ -z $BOOK_REPOSITORY ]; then\n echo \"Must set the BOOK_REPOSITORY environment variable\"\n exit 1\n fi\n\n # Get version from webservice.version\n IMAGE_VERSION=$(cat ${OUR_DIRECTORY}/images/webservice.version)\n}\n\n\n############\n# Tag and push an incoming name. It will be prepended with the\n# ${BOOK_REPOSITORY} repository info.\n############\ntag_and_push() {\n what_to_tag=$1\n version_tag=$2\n\n image_id=$(docker images $what_to_tag | grep $version_tag | grep -v REPOSITORY | awk '{print $3}' | head -1)\n if [ -z \"$image_id\" ]; then\n echo \"Unable to find $what_to_tag image\"\n exit 1\n fi\n echo \"\"\n echo \"Tagging and pushing '${image_id}'/'${what_to_tag}' as '${BOOK_REPOSITORY}${what_to_tag}:${version_tag}'\"\n newtag=${BOOK_REPOSITORY}${what_to_tag}:${version_tag}\n docker tag ${image_id} ${newtag}\n docker push ${newtag}\n}\n\n############\n# main\n# \n############\nmain() {\n initialize_variables\n \n\n tag_and_push \"books.author\" $IMAGE_VERSION\n tag_and_push \"books.authorize\" $IMAGE_VERSION\n tag_and_push \"books.book\" $IMAGE_VERSION\n tag_and_push \"books.frontend\" $IMAGE_VERSION\n tag_and_push \"books.frontend\" ${IMAGE_VERSION}-dev\n tag_and_push \"books.gateway\" $IMAGE_VERSION\n tag_and_push \"books.query\" $IMAGE_VERSION\n tag_and_push \"books.review\" $IMAGE_VERSION\n tag_and_push \"books.tag\" $IMAGE_VERSION\n tag_and_push \"books.user\" $IMAGE_VERSION\n tag_and_push \"books.user_book\" $IMAGE_VERSION\n}\n\n\n# Call main\nmain \"$@\"\n"
},
{
"alpha_fraction": 0.6323631405830383,
"alphanum_fraction": 0.6389466524124146,
"avg_line_length": 21.372093200683594,
"blob_id": "6a903b4e51e7cde86ba9b9b1da7ad7aa6d2162e4",
"content_id": "847b57259d78c9d85bdd98e8992783e5e9059fc5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 2886,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 129,
"path": "/images.java/user_book/src/main/java/com/wpff/core/TagMapping.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\nimport java.util.Objects;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.Id;\nimport javax.persistence.NamedQueries;\nimport javax.persistence.NamedQuery;\nimport javax.persistence.Table;\n\n/**\n * Named query to select all tags\n */\n@Entity\n@Table(name = \"tagmapping\")\n@NamedQueries({ \n\t @NamedQuery(name = \"com.wpff.core.TagMapping.findAll\", \n\t \t\t query = \"SELECT u FROM TagMapping u\"),\n\t\t\n\t @NamedQuery(name = \"com.wpff.core.TagMapping.findByUserBookId\",\n\t\t\t\t query = \"SELECT u FROM TagMapping u \"\n\t\t\t\t + \"WHERE u.user_book_id = :user_book_id\"), \n\n\t @NamedQuery(name = \"com.wpff.core.TagMapping.deleteUserBook\",\n\t\t\t\t query = \"DELETE FROM TagMapping u \"\n\t\t\t\t + \"WHERE u.user_book_id = :user_book_id\"),\n\t\t\n\t @NamedQuery(name = \"com.wpff.core.TagMapping.getTagMappingsForUserAndTagIds\",\n\t\t\t\t query = \"SELECT u FROM TagMapping u \"\n\t\t\t\t + \"WHERE u.user_id = :user_id AND \"\n\t\t\t\t + \"u.tag_id = :tag_id\")\n\t })\n/**\n * Class to map a UserBook to a set of Tags. Uses the 'tagmapping' table.\n */\npublic class TagMapping implements java.io.Serializable {\n\n\tprivate static final long serialVersionUID = -9079509977863749820L;\n\n\t/**\n\t * Tag ID.\n\t */\n\t@Column(name = \"tag_id\", nullable = false)\n\t@Id\n\tprivate int tag_id;\n\n\t/**\n\t * User book ID.\n\t */\n\t@Column(name = \"user_book_id\", nullable = false)\n\t@Id\n\tprivate int user_book_id;\n\t\n\t/**\n\t * User ID\n\t */\n\t@Column(name = \"user_id\", nullable = false)\n\t@Id\n\tprivate int user_id;\n\n\t/**\n\t * Default constructor\n\t */\n\tpublic TagMapping() {\n\t}\n\n\tpublic TagMapping(int user_id, int user_book_id, int tag_id) {\n\t this.user_id = user_id;\n\t\tthis.user_book_id = user_book_id;\n\t\tthis.tag_id = tag_id;\n\t}\n\n\tpublic String toString() {\n\t\tStringBuilder string = new StringBuilder();\n\t\tstring.append(\"TagMapping[\");\n\t\tstring.append(\"userBookId='\" + user_book_id + \"',\");\n\t\tstring.append(\"userId='\" + user_id+ \"',\");\t\t\n\t\tstring.append(\"tagId='\" + tag_id + \"' \");\n\t\tstring.append(\"]\");\n\t\treturn string.toString();\n\t}\n\n\tpublic int getTagId() {\n\t\treturn this.tag_id;\n\t}\n\n\tpublic void setTagId(int id) {\n\t\tthis.tag_id = id;\n\t}\n\n\tpublic int getUserBookId() {\n\t\treturn this.user_book_id;\n\t}\n\n public void setUserBookId(int id) {\n this.user_book_id = id;\n }\n\n public int getUserId() {\n return this.user_id;\n }\n\n public void setUserId(int id) {\n this.user_id = id;\n }\n\n\t@Override\n\tpublic boolean equals(Object o) {\n\t\tif (this == o) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!(o instanceof TagMapping)) {\n\t\t\treturn false;\n\t\t}\n\n\t\tfinal TagMapping that = (TagMapping) o;\n\n\t\treturn Objects.equals(this.user_id, that.user_id)\n\t\t && Objects.equals(this.user_book_id, that.user_book_id)\n\t\t && Objects.equals(this.tag_id, that.tag_id);\n\t}\n\n\t@Override\n\tpublic int hashCode() {\n\t\treturn Objects.hash(this.user_book_id, this.tag_id, this.user_id);\n\t}\n\n}\n"
},
{
"alpha_fraction": 0.6939461827278137,
"alphanum_fraction": 0.6939461827278137,
"avg_line_length": 30.85714340209961,
"blob_id": "40d835e4c65c58b265e62faa7dd6f58d48cfc134",
"content_id": "c15f05260078be61049cbeedc43947d45688ed45",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 892,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 28,
"path": "/images/frontend/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Frontend Web Application\n\n## Introduction\n\n**/** is a single-page application written in [vue](https://vuejs.org/) using [bulma](http://bulma.io/) for style.\n\n## Screen grab of front page\n\n\n\n## Docker Images\n\nThere are two docker images for the frontend; _prod_ and _dev_. The _prod_ is essentially a single down compiled javascript file. Where _dev_ is designed to be run out of a mounted volume.\n\nWhen running in _dev_ mode, the docker-compose.yml file should have this for the *frontend* segment:\n~~~~\n # Frontend webpage + js\n frontend:\n container_name: frontend\n image: ${BOOK_REPOSITORY}books.frontend:dev\n restart: always\n logging:\n driver: \"json-file\"\n volumes:\n - \"./images/frontend/content/mybooks/:/scratch/\"\n networks:\n - booknet\n~~~~\n"
},
{
"alpha_fraction": 0.7169042825698853,
"alphanum_fraction": 0.7169042825698853,
"avg_line_length": 24.842105865478516,
"blob_id": "9e88df260b4fa7e0230a289af73f8420f464193d",
"content_id": "dfb57b4b206132d939841d81738f578068d9f021",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 491,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 19,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/errors.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\nimport (\n\t\"errors\"\n)\n\n// Error response as JSON with 'code' and 'message'\ntype ErrorResponse struct {\n\tCode int `json:\"code\"`\n\tMessage string `json:\"message\"`\n}\n\n// List of common errors, used to write back HTTP error codes\nvar (\n\tErrNotFound = errors.New(\"User not found\")\n\tErrUnauthorized = errors.New(\"Unauthorized\")\n\tErrBadRouting = errors.New(\"Inconsistent mapping between route and handler (programmer error)\")\n\tErrServerError = errors.New(\"Server error\")\n)\n"
},
{
"alpha_fraction": 0.7044943571090698,
"alphanum_fraction": 0.7089887857437134,
"avg_line_length": 18.77777862548828,
"blob_id": "100ba5d56059e836b623011789e2e9acbc4fedca",
"content_id": "5f62c02de537b40c7404fac39be666d36570fb99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 890,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 45,
"path": "/images.java/query/src/main/java/wpff/google/BookQueryBean.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package wpff.google;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\n/**\n * Represents a book, with title and author.\n */\npublic class BookQueryBean {\n\n\t/**\n\t * Type of Identification for this book,\n\t */\n\tpublic static enum ID_TYPE {\n\t\tISBN_10, ISBN_13\n\t};\n\n\tprivate final String title;\n\tprivate final String author;\n\tprivate final String publicationDate;\n\tprivate final Map<ID_TYPE, String> ids = new HashMap<ID_TYPE, String>();\n\n\tBookQueryBean(String title, String author, String publicationDate, Map<ID_TYPE, String> ids) {\n\t\tthis.title = title;\n\t\tthis.author = author;\n\t\tthis.publicationDate = publicationDate;\n\t\tthis.ids.putAll(ids);\n\t}\n\n\tpublic String getAuthor() {\n\t\treturn this.author;\n\t}\n\n\tpublic String getTitle() {\n\t\treturn this.title;\n\t}\n\n\tpublic String getPublicationDate() {\n\t\treturn this.publicationDate;\n\t}\n\n\tpublic Map<ID_TYPE, String> getIds() {\n\t\treturn this.ids;\n\t}\n}\n"
},
{
"alpha_fraction": 0.5047821402549744,
"alphanum_fraction": 0.5115834474563599,
"avg_line_length": 21.511960983276367,
"blob_id": "28641ca62eaf41674d1af039780dd4d94a9105bb",
"content_id": "243cba5645c22285df184b051d0a900c6f884f6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 4705,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 209,
"path": "/buildfiles/build_java.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#################################\n#\n# Build file for java projects.\n# \n# This is sourced from the main 'build-services.sh'\n# build file.\n#\n# \n#################################\n\n#############\n# Copy common jars into a local lib directory to be\n# accessible for a docker version of maven or gradle\n#\n#############\n_copy_common_jars() {\n rm -rf /tmp/lib\n mkdir -p /tmp/lib\n mkdir -p ${our_directory}/lib\n cp ${our_directory}/../../mybooks_common/repos/*.jar ${our_directory}/lib/\n cp ${our_directory}/../../mybooks_common/repos/*.jar /tmp/lib\n}\n\n\n#########\n# Run a maven command\n#\n# params:\n# 1- command to run\n#########\n_maven() {\n command=$@\n\n # See if mvn is already installed\n which mvn > /dev/null\n if [ $? -eq 0 ]; then\n echo \"[[Using local maven]]\"\n (cd $our_directory; mvn $command)\n else\n echo \"[[Running maven via docker]]\"\n ###\n # build project via maven using a docker container\n # volumes:\n # our_directory ==> /usr/src/mymaven (source)\n # .m2 ==> /opt/.m2 (maven repo cache)\n docker run --rm \\\n -it \\\n -v \"$our_directory\":/usr/src/mymaven \\\n -v \"${our_directory}/lib\":/tmp/lib \\\n -v \"$our_directory\"/../../.m2:/opt/.m2 \\\n -w /usr/src/mymaven \\\n maven:3.3.9-jdk-8-alpine \\\n mvn \\\n -Dmaven.repo.local=/opt/.m2/ \\\n $command\n build_result=$?\n fi\n # return result of build\n return $build_result\n}\n\n#############\n# Build maven\n#\n#############\nbuild_maven() {\n _maven \"package\"\n return $?\n}\n\n\n\n#############\n# Load our mybooks_common\n#\n#############\nload_common_jars_for_maven() {\n echo \"Loading mybooks-common into project\"\n# _maven install:install-file -Dfile=${our_directory}/lib/mybooks_common-1.0.jar -DgroupId=com.wpff.common -DartifactId=mybooks-common -Dversion=1.0 -Dpackaging=jar\n _maven install:install-file -Dfile=/tmp/lib/mybooks_common-1.0.jar -DgroupId=com.wpff.common -DartifactId=mybooks-common -Dversion=1.0 -Dpackaging=jar\n if [ $? -ne 0 ]; then\n echo \"Unable to load mybooks_common.jar into project '$project'\"\n exit 1\n fi \n}\n\n\n#############\n# Clean maven\n#\n#############\nclean_maven() {\n _maven \"clean\"\n return $?\n}\n\n\n############\n# Run a gradle command\n#\n# params:\n# 1- command to run\n############\n_gradle() {\n command=$1\n\n which gradle > /dev/null\n if [ $? -eq 0 ]; then\n echo \"[[Using local gradle]]\"\n (cd $our_directory; gradle \"$command\")\n build_result=$?\n else\n echo \"[[Running gradle via docker]]\"\n ###\n # build project via gradle using a docker container\n # volumes:\n # our_directory ==> /work (source)\n # .gradle ==> /GRADLE_CACHE (gradle repo cache)\n docker run --rm \\\n -it \\\n -v \"$our_directory\":/work \\\n -v \"$our_directory\"/../../.gradle:/GRADLE_CACHE \\\n hipposareevil/alpine-gradle \\\n \"\"$command\"\"\n build_result=$?\n fi\n # return result of command\n return $build_result\n}\n\n#############\n# Build gradle\n#\n#############\nbuild_gradle() {\n _gradle \"build\"\n return $?\n}\n\n#############\n# Clean gradle\n#\n#############\nclean_gradle() {\n _gradle \"clean\"\n return $?\n}\n\n###############\n# Cleans a java project\n# \n###############\njava::clean() {\n # build project\n if [ -e $our_directory/pom.xml ]; then\n clean_maven\n elif [ -e $our_directory/build.gradle ]; then\n clean_gradle\n else \n echo \"Unable to find maven or gradle artifacts to perform the clean.\"\n echo \"Exiting\"\n echo 1\n fi\n\n if [ $? -ne 0 ]; then\n echo \"Unable to clean the project. Exiting\"\n exit 1\n fi\n}\n\n\n###############\n# Builds a java project\n# \n###############\njava::build() {\n # copy in common\n _copy_common_jars\n\n # build project\n local then=$(date +%s)\n\n if [ -e $our_directory/pom.xml ]; then\n load_common_jars_for_maven\n build_maven\n elif [ -e $our_directory/build.gradle ]; then\n build_gradle\n else \n echo \"[[Unable to find maven or gradle artifacts to build.]]\"\n echo \"[[Exiting]]\"\n echo 1\n fi\n\n build_success=$?\n if [ $build_success -ne 0 ]; then\n echo \"[[Unable to build successfully.]]\"\n echo \"[[Exiting]]\"\n exit 1\n fi\n\n local now=$(date +%s)\n local elapsed=$(expr $now - $then)\n echo \"[[Built application in $elapsed seconds]]\"\n\n if [ $? -ne 0 ]; then\n echo \"[[Unable to build project. Exiting.]]\"\n exit 1\n fi\n}\n"
},
{
"alpha_fraction": 0.6487325429916382,
"alphanum_fraction": 0.6487325429916382,
"avg_line_length": 25.47945213317871,
"blob_id": "e774037adaf749fd5687ea49e21cde01e621af8f",
"content_id": "0b4d740c2256d417118859daa52b3a46ef0d0f66",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1933,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 73,
"path": "/images/author/src/github.com/hipposareevil/author/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Structures\n\n// GET request for author, contains:\n// - offset\n// - limit\n// - name\ntype getAllAuthorsRequest struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tName string `json:\"name\"`\n}\n\n// GET request for single author\n// - author_id\ntype getAuthorRequest struct {\n\tAuthorId int `json:\"author_id\"`\n}\n\n// DELETE request for single author\n// - author_id\ntype deleteAuthorRequest struct {\n\tAuthorId int `json:\"author_id\"`\n}\n\n// POST request to create author\ntype createAuthorRequest struct {\n\tName string `json:\"name\"`\n\tBirthDate string `json:\"birthDate\"`\n\tOlKey string `json:\"olKey\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tSubjects []string `json:\"subjects\"`\n}\n\n// PUT request to update author\n// struct passed into service\ntype updateAuthorRequest struct {\n\tName string `json:\"name\"`\n\tId int `json:\"id,omitempty\"`\n\tBirthDate string `json:\"birthDate\"`\n\tOlKey string `json:\"olKey\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tSubjects []string `json:\"subjects\"`\n}\n\n//// Response structures\n\ntype Author struct {\n\tId int `json:\"id\"`\n\tName string `json:\"name\"`\n\tBirthDate string `json:\"birthDate\"`\n\tOlKey string `json:\"olKey\"`\n\tGoodReadsUrl string `json:\"goodreadsUrl\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tSubjects []string `json:\"subjects\"`\n}\n\ntype Authors struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Author `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.7315102815628052,
"alphanum_fraction": 0.7385950088500977,
"avg_line_length": 46.049842834472656,
"blob_id": "1d523e00a7753ff2a721d10151e7869652b050e3",
"content_id": "662a8e33869adf456729a0d99d9221666dc6bb48",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 15103,
"license_type": "no_license",
"max_line_length": 446,
"num_lines": 321,
"path": "/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "Table of Contents\n=================\n\n * [Introduction](#introduction)\n * [Screenshot](#screenshot)\n * [tldr](#tldr)\n * [Build, Run, and Access](#build-run-and-access)\n * [Prerequisites](#prerequisites)\n * [Docker images used](#docker-images-used)\n * [Frameworks used](#frameworks-used)\n * [Managing application](#managing-application)\n * [Running application](#running-application)\n * [Stopping application](#stopping-application)\n * [Cleaning maven & gradle cache](#cleaning-maven--gradle-cache)\n * [Accessing the application](#accessing-the-application)\n * [Customizing application deployment](#customizing-application-deployment)\n * [Docker image location](#docker-image-location)\n * [Deployment Host name](#deployment-host-name)\n * [Implementation](#implementation)\n * [Endpoints](#endpoints)\n * [(frontend)](#frontend)\n * [query](#query)\n * [author](#author)\n * [book](#book)\n * [user](#user)\n * [user_book](#user_book)\n * [tag](#tag)\n * [review](#review)\n * [authorize](#authorize)\n * [swagger](#swagger)\n * [Databases](#databases)\n * [MySQL](#mysql)\n * [Redis](#redis)\n * [Authentication](#authentication)\n * [Cache](#cache)\n * [Notes and Thoughts](#notes-and-thoughts)\n * [Initial](#initial)\n * [Memory Usage](#memory-usage)\n * [Framework Changes](#framework-changes)\n * [Plans](#plans)\n# Introduction\nThis project has two goals; to create a book repository (like [goodreads.com](http://goodreads.com) or [librarything.com](http://librarything.com)), and to experiment with various microservice frameworks, golang, java, docker, swagger, and javascript.\n\nThe frontend web application is a [SPA](https://en.wikipedia.org/wiki/Single-page_application) written in Javascript ([vue.js](https://vuejs.org/)).\n\nThe backend is a set of micro-services written in Golang, backed by [mysql](https://www.mysql.com/) and [redis](https://redis.io/) databases. There are various (micro-service) endpoints for interfacing with and querying the data, see [endpoints](#endpoints) below. Most endpoints requires authorization, which is obtained at the */authorize* endpoint. See the */swagger* endpoint for full information. \n\nThere is an initial user of *admin* with same password. The *admin* user can create a new user, or update the *admin* user via the */user* endpoint or the main web application. \n\n# Screenshot\n\n\n\n# tldr\n* build: *./books.sh build*\n* run: *./books.sh start*\n* swagger url: *[http://localhost:8080/swagger/](http://localhost:8080/swagger/)*\n* frontend url: *[http://localhost:8080/](http://localhost:8080/)*\n* stop: *./books.sh stop*\n* clean: *./books.sh clean*\n\n# Build, Run, and Access\n\nThere is a top level script that builds, starts, and stops the application (and the corresponding microservices):\n```\n> ./books.sh build\n```\n\nEach microservice is built with either maven or gradle and uses a Docker container to do the complication. The containers use *.m2* and *.gradle* directories that cache the corresponding repositories.\n\n## Prerequisites\n\n* bash\n* Docker (17.09) ([install here](https://docs.docker.com/engine/installation/))\n* docker-compose _optional_\n\n\n## Docker images used\n\nThese will be pulled automatically from dockerhub.com.\n\nDocker image | Purpose\n--- | ---\ndocker/compose:1.8.1 | Used to start/stop application.\nhipposareevil/alpine-dep | For golang dependency management.\nhipposareevil/swagger-combine | To combine and expose swagger documentation from each microservice.\nhipposareevil/alpine-gradle | Used to build web services; *query*.\nmaven:3.3.9-jdk-8-alpine | Used to build web services; author, title.\nmysql:latest | MySQL database.\nredis:3.0-alpine | Redis K/V database.\nnginx:latest | API Gateway.\nopenjdk:8-jdk-alpine | Base image for web services. \n\n\n## Frameworks used\n\n* [go-kit](https://github.com/go-kit/kit)\n* [swagger](http://swagger.io)\n* [bulma css](http://bulma.io/)\n* [vue javascript](https://vuejs.org/)\n\n*Deprecated*\n* [graphite](https://graphiteapp.org)\n* [grafana](https://grafana.com)\n* [dropwizard](http://www.dropwizard.io/)\n* [spring boot](https://projects.spring.io/spring-boot/)\n\n\n## Managing application\n\nThe application is managed using docker-compose via a docker-compose.yml file. A convenience script is provided that runs docker-compose via a separate container.\n\n### Running application\n```\n> ./books.sh start\n```\nor\n```\n> docker-compose -up -d\n```\n\n### Stopping application\n```\n> ./books.sh stop\n```\nor\n```\n> docker-compose down\n```\n\n### Cleaning maven & gradle cache\n```\n> ./books.sh clean\n```\n\n\n## Accessing the application\n\nThe web application runs at [localhost:8080/](http://localhost:8080/). It takes a few seconds for the MySQL database to come up.\n\nSwagger API description is at [localhost:8080/swagger/](http://localhost:8080/swagger/). This can be used to test each of the microservices\n\n\n## Customizing application deployment\nDocker compose utilizes an *.env* file where environment variables can be located. This project has the following variables.\n\n### Docker image location\nThis defaults to using the local Docker repository, not a private registry. If you want to tag and push images to a private repository (or dockerhub), you can run the docker compose with the environment variable *BOOK_REPOSITORY*.\n\nIf your repository were at *mysuperrepo.com:5432*, you add the following to the *.env* file:\n```\nBOOK_REPOSITORY=mysuperrepo.com:5432\n```\nAnd then docker compose will utilize that for the root image locations. \n\nA utility script *tagAndPushToRepo.sh* is in the root directory and will tag all of the *books* images and push to the *BOOK_REPOSITORY* repository.\n\n### Deployment Host name\nThis application defaults to *localhost* for the Swagger UI definition. When set to *localhost*, the UI can have trouble running example REST calls. For instance, if you're running the application on your hosted box *foo.com* and you access the UI via browser from your laptop, the queries will not go through. If you update the *.env* file with your host box name things will go smoother via Swagger.\n```\nDEPLOY_HOST_NAME=foo.com\n```\n\nSee the [swagger-combine](https://github.com/hipposareevil/swagger-combine) project for more information on this variable.\n\n\n# Implementation\n\nNotes on implementation.\n\n## Endpoints\nThe web application, each microservice endpoint, and backend database are contained in Docker containers. All containers are attached to the Docker network *books_booknet*.\n\nAll endpoints are exposed to the outside world via an API _gateway_ which runs in a separate container and listens on port 8080.\n\n\n\nThe following endpoints are proxied via the API Gateway on port 8080 (configurable via the *docker-compose.yml* file).\n\nEndpoint | Purpose\n--- | ---\n/ | Frontend single-page application.\n/query | Microservice to query openlibrary.org for book titles and authors.\n/author | Manage list of authors in database.\n/book | Manage list of books in database. \n/user | Manage users.\n/user_book | Manage books for a user. \n/tag | Manage tags.\n/review | List user reviews for a book. \n/authorize | Authorize access to endpoints.\n/swagger/ | [swagger](http://swagger.io) documentation describing the REST APIs.\n\n### (frontend) \nFrontend web application at **/**. This uses the microservices to manage the books, authors, users, user lists, and tags. Utilizes the [vue](https://vuejs.org/) framework and [bulma](https://bulma.io/) framework.\n\nSee [frontend](https://github.com/hipposareevil/books/blob/master/images/frontend/README.md) for more information. \n\n\n### query\nREST microservice that queries openlibrary for new authors and book titles. Would be used by frontend to add new entries to application.\n\nSee [query](https://github.com/hipposareevil/books/blob/master/images/query/README.md) for more information. \n\n\n### author\nMicroservice to manage the complete set of Authors in the database. Operations include: add, list all, list single and delete.\n\nSee [author.go](https://github.com/hipposareevil/books/blob/master/images/author/README.md) for more information. \n\nSee *deprecated* [author](https://github.com/hipposareevil/books/blob/master/images.java/author/README.md) for more information. \n\n### book\nMicroservice to manage the complete set of Books in the database. Operations include; list, query, add, delete.\n\nSett [book.go](https://github.com/hipposareevil/books/blob/master/images/book.go/README.md) for more information. \n\nSee *deprecated* [book](https://github.com/hipposareevil/books/blob/master/images.java/book/README.md) for more information. \n\n### user\nMicroservice to manage users. A *user* is used to maintain a set of *user books*, which stores which books the user is cataloging, along with metadata, tags and a rating. In addition, a *user* is used to obtain an authorization token for authenticating against the various endpoints.\n\nSee [user](https://github.com/hipposareevil/books/blob/master/images/user.go/README.md) for more information. \n\nSee *deprecated* [user](https://github.com/hipposareevil/books/blob/master/images.java/user/README.md) for more information. \n\n### user_book\nMicroservice to manage a set of books for a user. Each user has a list of books they can catalog. Each *user book* has a link to the real Book and associated Author. In addition, a *user book* has user *data* and a set of *tags*. \n\nSee [user_book](https://github.com/hipposareevil/books/blob/master/images/user_book.go/README.md) for more information. \n\nSee *deprecated* [user_book](https://github.com/hipposareevil/books/blob/master/images.java/user_book/README.md) for more information. \n\n### tag\nMicroservice to manage tags. Tags can be applied to a user's set of books via the *user_books* endpoint. Multiple tags may be applied to a single book, e.g. \"e-book\" and \"sci-fi\".\n\n*Note:* This is now implemented in Go instead of Java.\n\nSee [tag.go](https://github.com/hipposareevil/books/blob/master/images/tag.go/README.md) for more information. \n\nSee *deprecated* [tag](https://github.com/hipposareevil/books/blob/master/images.java/tag/README.md) for more information. \n\n\n### review\nMicroservice to list reviews for a book.\n\nSee [review.go](https://github.com/hipposareevil/books/blob/master/images/review.go/README.md) for more information. \n\n\n### authorize\nMicroservice to authenticate a user. This creates a token of the form 'Bearer qwerty-1234-asdf-9876', which is then added to the headers of any calls to the other endpoints.\n\nSee [authorize](https://github.com/hipposareevil/books/blob/master/images/authorization.go/README.md) for more information. \n\nSee *deprecated* [authorize](https://github.com/hipposareevil/books/blob/master/images.java/authorization/README.md) for more information. \n\n### swagger\nSwagger-ui that combines the swagger.yaml files from the REST endpoints. Uses [swagger-combine](https://hub.docker.com/r/hipposareevil/swagger-combine/) image to grab the definitions.\n\nThis waits for the various endpoints to come up and then grabs the designated (in docker-compose.yml) yaml files, combines them and then serves up the endpoint via swagger-ui.\n\n## Databases\nThere are two databases used to manage the books and users. The data is stored in the *database* directory. \n\n### MySQL\n[MySQL](https://www.mysql.com/) is used to store books, authors, users, tags and user books lists.\n\nThe database schema is stored in *database/initial/mybooks.sql*\n\nSee [mysql](https://github.com/hipposareevil/books/blob/master/mysql/README.md) for more information and method to update the *admin* user's password.\n\n### Redis\n\n[Redis](https://redis.io/) is used to store key/value pairs for the services.\n\n#### Authentication\nAuthentication tokens are created by the *authorize* service and stored in Redis. Redis is then used by all services to verify authentication.\n\n#### Cache\nRedis is also used as a cache for the services. When a service makes a REST call to another service, the calling service stores the returned data in the cache. For example, the *book* service calls the *author* service to get the author's name for a book. The *book* service stores that in the cache for the next call. The *author* service will flush the cache when mutations to the author database have been made.\n\nMap of services performing caching and their consumers\n\nCache Source | Namespace | Consumer | Notes\n--- | --- | --- | ---\nauthor.go | author.name | book.go | Author names indexed by Author ID\ntag.go | tag | user_book.go | All tags as JSON, indexed by '0' to denote all tags\nbook.go | book.info | user_book.go | Individual Book JSON indexed by book ID\n\n\n# Notes and Thoughts\n\n## Initial\nI created this project to experiment with writing multiple micro-services and wiring them together in a docker environment. I started using [dropwizard](http://www.dropwizard.io/) for a Java based server. The ramp up time was fairly quick and I was able to easily do most tasks with little pain. I played with [spring boot](https://projects.spring.io/spring-boot/) for the _query_ endpoint, but stuck with dropwizard for the rest of the services.\n\n## Memory Usage\nWith 6 services implemented, I ran everything on my [hosted environment](https://linode.com) and saw that I was running out of memory. I checked my _docker stats_ and my **micro** services were each using 250MB.\n\nI lowered the Java memory usage (*Xmx64m*) for each service and the usage dropped to ~180MB. This was still an inordinate amount of RAM for a simple web service.\n\nI came across a [blog post](http://trustmeiamadeveloper.com/2016/03/18/where-is-my-memory-java/) detailing where the memory for Java (on Docker) was going. Unsurprisingly those results correlated with this projects usage.\n\n## Framework Changes\nGiven the large memory usage, I started looking for other micro-service frameworks. I ended up using [go](https://golang.org/) for a test service and saw the memory usage was in the single MB range. I also looked at [node](https://nodejs.org/en/) but that seemed to use almost as much memory as Java and others.\n\nI ported that *tag* endpoint to golang using the [go-kit](https://github.com/go-kit/kit) framework and saw the following results:\n\nMetric | golang | java | go % of java\n--- | --- | --- | ---\nImage size| 7.56MB | 136MB | 5.5%\nMemory | 2MB | 187MB | 1.1%\n\nThe golang service's image size is 5% of the corresponding Java image.\n\nMore note worthy is golang's memory usage, being 1% the size of the Java implementation.\n\n**Caveat**: Individual image size is not always an accurate overall measurement due to image layer sharing. All of the java images share the same base image, so the accumulated size is closer to 140MB than 900MB (7 * 130MB).\n\n## Plans\n\n* Add graphite metrics to golang services.\n* Investigate [istio.io](https://istio.io/) for service discovery and metrics.\n"
},
{
"alpha_fraction": 0.7616192102432251,
"alphanum_fraction": 0.7676162123680115,
"avg_line_length": 48.407405853271484,
"blob_id": "7f60a4ae5dc1cf9f2d4d0baed12c537f917af786",
"content_id": "c71be044f0809e0888a5acb05f0fde9f6dee28f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1334,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 27,
"path": "/images/book/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Book Microservice\n\n## Introduction\n\n**/book** is a microservice for querying, listing, adding and deleting Book entries from the database. Books are visable to all users but are only mutable by the *admin* user. \n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n* [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt) - encryption library\n"
},
{
"alpha_fraction": 0.680865466594696,
"alphanum_fraction": 0.6819923520088196,
"avg_line_length": 24.5,
"blob_id": "e5338fcdd762b7e818d9a469d7246e0de875a5f4",
"content_id": "b7a1d41d669d57af769b306dfa27b777089ac4bb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4437,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 174,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t_ \"strconv\"\n\t\"strings\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// Get /authorize/validate\n// Make endpoint for validating the authorization header\nfunc makeGetValidationEndpoint(svc AuthorizeService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into an authorizationRequest\n\t\treq := request.(authorizationRequest)\n\n\t\t// call actual service with data from the req\n\t\terr := svc.ValidateToken(req.Bearer)\n\t\treturn authorizationResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /authorize/token\n// Make endpoint to create token\nfunc makeCreateTokenEndpoint(svc AuthorizeService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into an createTokenRequest\n\t\treq := request.(createTokenRequest)\n\n\t\t// call actual service with data from the req\n\t\tauthorization, err := svc.CreateToken(req.Name, req.Password)\n\t\treturn createTokenResponse{\n\t\t\tData: authorization,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode from client\n\n// Create an authorizationRequest\n// (used by service.ValidateToken)\n// /authorize/validate\n//\n// The request has 1 variable:\n// - Bearer Authorization token\nfunc decodeValidationRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// parse headers\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\t// Make request for authorization\n\tvar request authorizationRequest\n\trequest = authorizationRequest{\n\t\tBearer: realBearer,\n\t}\n\n\treturn request, nil\n}\n\n// Create a createTokenRequest\n// (used by service.CreateToken)\n//\n// The request has 2 variables:\n// - User\n// - Password\nfunc decodeCreateTokenRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse body\n\n\t// Decode the incoming JSON into a Credentials struct\n\tvar credentials Credentials\n\tif err := json.NewDecoder(r.Body).Decode(&credentials); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request\n\tvar request createTokenRequest\n\trequest = createTokenRequest{\n\t\tcredentials.Name,\n\t\tcredentials.Password,\n\t}\n\n\treturn request, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error.\n\t// All Response objects in endpoints.go should implement \"error()\" so\n\t// we can see if there was a proper error to handle\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tfmt.Println(e.error())\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// Write out normal response. See if the response is a DataHolder\n\t// Cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\t// This is a 'dataHolder'\n\t\tfmt.Println(\"dataholder response\")\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\tfmt.Println(\"normal response\")\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.6826698184013367,
"alphanum_fraction": 0.6826698184013367,
"avg_line_length": 19.094118118286133,
"blob_id": "5b20fc326d36bb46b7827def00578c5819708057",
"content_id": "4bd4e5a9023451be9f264fc8e673ed313e53f4e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1708,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 85,
"path": "/images/user/src/github.com/hipposareevil/user/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// Users (all)\n// response for users (vs. single user)\ntype usersResponse struct {\n\tData Users `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse usersResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse usersResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// USER (single)\n// response for user (single)\ntype userResponse struct {\n\tData User `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse userResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse userResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// DELETE USER (single)\n// response for user (single)\ntype deleteUserResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse deleteUserResponse) error() error {\n\treturn theResponse.Err\n}\n\n////////////////////\n/// Create USER\n// response for create user\ntype createUserResponse struct {\n\tData User `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse createUserResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse createUserResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// Update USER\n// response for update user\ntype updateUserResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse updateUserResponse) error() error {\n\treturn theResponse.Err\n}\n"
},
{
"alpha_fraction": 0.6386468410491943,
"alphanum_fraction": 0.6386468410491943,
"avg_line_length": 19.97849464416504,
"blob_id": "da771c487e827c8f4484baa3683ac2b31c2cd83f",
"content_id": "2264faae32cbfa98b3047256dfdcbe7e44fa5bf7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 1951,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 93,
"path": "/images.java/user_book/src/main/java/com/wpff/db/TagDAO.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.db;\n\nimport com.wpff.core.Tag;\n\nimport io.dropwizard.hibernate.AbstractDAO;\nimport org.hibernate.SessionFactory;\nimport org.hibernate.criterion.Restrictions;\nimport org.hibernate.criterion.MatchMode;\n\nimport java.util.*;\nimport java.util.stream.Collectors;\n\n\n/**\n * Data access object for a Tag\n */\npublic class TagDAO extends AbstractDAO<Tag> {\n\n public TagDAO(SessionFactory factory) {\n super(factory);\n }\n\n /**\n * Look tag up by name\n *\n * @param tagName Name \n * @return List of Tags. May be empty\n */\n public List<Tag> findByName(String tagName) {\n return currentSession()\n .createCriteria(Tag.class)\n .add(Restrictions.like(\"name\", tagName, MatchMode.EXACT))\n .list();\n }\n\n /**\n * Look up an Tag by id. \n *\n * @param id Tag ID\n * @return Optional Tag\n */\n public Optional<Tag> findById(Integer id) {\n System.out.println(\"tagdao.findbyid:\" + id);\n return Optional.ofNullable(get(id));\n }\n\n\n /**\n * Persists a new Tag into the backing DB.\n *\n * @param tag Tag to be created. Comes in via TagResource.\n * @return Tag that was just persisted\n */\n public Tag create(Tag tag) {\n Tag newtag = persist(tag);\n System.out.println(\"TagDAO: Create new tag: \" + newtag);\n return newtag;\n }\n\n /**\n * Update an existing tag\n *\n * @param tag to update\n */\n public void update(Tag tag) {\n currentSession().saveOrUpdate(tag);\n }\n\n /**\n * Delete tag from database.\n *\n * @param tag to delete\n */\n public void delete(Tag tag) {\n currentSession().delete(tag);\n }\n\n /**\n * Find all tags in the database. Uses the named query in com.wpff.core.Tag\n *\n * @return List of Tags, may be empty\n */\n public Map<String, Tag> findAll() {\n List<Tag> tags = list(namedQuery(\"com.wpff.core.Tag.findAll\"));\n\n Map<String, Tag> tagsMap = tags.stream().collect(\n Collectors.toMap(Tag::getName, p -> p));\n return tagsMap;\n }\n\n\n \n}\n"
},
{
"alpha_fraction": 0.6736946105957031,
"alphanum_fraction": 0.6788008809089661,
"avg_line_length": 23.479839324951172,
"blob_id": "69b90a6dcb64a8a5748bd7ad4ee95dfc0803b732",
"content_id": "44f6b51b34427ba3aec051be45d45a87e66f120a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 6071,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 248,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/other_services.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Set of functions to make calls to other services\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// namespace for looking up tags.\nconst TAG_CACHE = \"tag\"\n\n// namespace for looking up books by book id.\nconst BOOK_CACHE = \"book.info\"\n\n////////////\n// Query the /tag endpoint for all tags\n//\n// cache: Cache to utilize\n// bearer: BEARER string used to connect to other web service\n//\n// returns Tags, which has array of Tag objects\nfunc getAllTags(cache CacheLayer, bearer string) Tags {\n\t// Check the cache\n\ttagsAsBytes := cache.GetBytes(TAG_CACHE, 0)\n\tif len(tagsAsBytes) > 0 {\n\t\tallTags := Tags{}\n\t\terr := json.Unmarshal(tagsAsBytes, &allTags)\n\t\tif err == nil {\n\t\t\t// Check that there are actual tags in this data\n\t\t\tnumTags := len(allTags.Data)\n\t\t\tif numTags > 0 {\n\t\t\t\tfmt.Println(\"Got \", numTags, \" 'tags' from cache\")\n\t\t\t\treturn allTags\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"Not using 'tags' from cache as it is empty\")\n\t\t\t}\n\t\t}\n\t}\n\n\t///////////\n\t// Get from tag service\n\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\tfullUrl := \"http://tag:8080/tag\"\n\n\treq, err := http.NewRequest(http.MethodGet, fullUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to /tag\")\n\t\treturn Tags{}\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"user-book-service-client\")\n\treq.Header.Set(\"authorization\", \"Bearer \"+bearer)\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to send request to /tag\")\n\t\treturn Tags{}\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"Unable to connect to '\" + fullUrl + \"' to get names. HTTP code: \" + res.Status)\n\t\treturn Tags{}\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from /tag\")\n\t\treturn Tags{}\n\t}\n\n\t// get tags\n\tallTags := Tags{}\n\tjsonErr := json.Unmarshal(body, &allTags)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /tag\")\n\t\treturn Tags{}\n\t}\n\n\treturn allTags\n}\n\n///////////\n// Query the /book endpoint for a single book\n//\n// params:\n// cache: Cache to utilize\n// bearer: BEARER string used to connect to other web service\n// bookId: ID of book to get\n// userBook: UserBook to fill in with book data retrieved from /book service\n//\nfunc getBookById(cache CacheLayer, bearer string, bookId int, userBook *UserBook) error {\n\t// Check the cache\n\tbookAsBytes := cache.GetBytes(BOOK_CACHE, bookId)\n\tif len(bookAsBytes) > 0 {\n\t\tbook := Book{}\n\n\t\terr := json.Unmarshal(bookAsBytes, &book)\n\t\tif err == nil {\n\t\t\t// Fill in book info\n\t\t\tuserBook.Title = book.Title\n\t\t\tuserBook.AuthorName = book.AuthorName\n\t\t\tuserBook.AuthorId = book.AuthorId\n\t\t\tuserBook.FirstPublishedYear = book.FirstPublishedYear\n\t\t\tuserBook.ImageSmall = book.ImageSmall\n\t\t\tuserBook.ImageMedium = book.ImageMedium\n\t\t\tuserBook.ImageLarge = book.ImageLarge\n\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t///////////\n\t// Get from book service\n\n\t// book id as string\n\tbookIdAsString := strconv.Itoa(bookId)\n\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\tfullUrl := \"http://book:8080/book/\" + bookIdAsString\n\n\treq, err := http.NewRequest(http.MethodGet, fullUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to /book\")\n\t\treturn ErrServerError\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"user-book-service-client\")\n\treq.Header.Set(\"authorization\", \"Bearer \"+bearer)\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to send request to /book\")\n\t\treturn ErrServerError\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"getBook: Unable to connect to '\" + fullUrl + \"' to get names. HTTP code: \" + res.Status)\n\t\treturn nil\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from /book\")\n\t\treturn nil\n\t}\n\n\t// get books\n\tbook := Book{}\n\tjsonErr := json.Unmarshal(body, &book)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /book\")\n\t\treturn nil\n\t}\n\n\t// Set values on incoming UserBook\n\tuserBook.Title = book.Title\n\tuserBook.AuthorName = book.AuthorName\n\tuserBook.AuthorId = book.AuthorId\n\tuserBook.FirstPublishedYear = book.FirstPublishedYear\n\tuserBook.ImageSmall = book.ImageSmall\n\tuserBook.ImageMedium = book.ImageMedium\n\tuserBook.ImageLarge = book.ImageLarge\n\n\treturn nil\n}\n\n///////////\n// Query the /book endpoint for all books that match the incoming title\nfunc getBooksByTitle(bearer string, title string) (Books, error) {\n\ttitle = url.QueryEscape(title)\n\n\tfullUrl := \"http://book:8080/book?title=\" + title\n\tfmt.Println(\"Making title query with url '\", fullUrl, \"'\")\n\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\treq, err := http.NewRequest(http.MethodGet, fullUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to /book\")\n\t\treturn Books{}, ErrServerError\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"user-book-service-client\")\n\treq.Header.Set(\"authorization\", \"Bearer \"+bearer)\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to send request to /book\")\n\t\treturn Books{}, ErrServerError\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"getBooksByTitle: Unable to connect to '\" + fullUrl + \"' to get names. HTTP code: \" + res.Status)\n\t\treturn Books{}, ErrServerError\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from /book\")\n\t\treturn Books{}, ErrServerError\n\t}\n\n\t// get books\n\tbooks := Books{}\n\tjsonErr := json.Unmarshal(body, &books)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /book\")\n\t\treturn Books{}, ErrServerError\n\t}\n\n\treturn books, nil\n}\n"
},
{
"alpha_fraction": 0.6555555462837219,
"alphanum_fraction": 0.6555555462837219,
"avg_line_length": 17.947368621826172,
"blob_id": "6708376bac28db1984e66442b6f1d0d6272497f0",
"content_id": "bc948c56a3635af931852b04356a89c998d3e1d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 720,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 38,
"path": "/images.java/query/src/main/java/wpff/openlibrary/beans/WorksBean.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package wpff.openlibrary.beans;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\n\n/**\n * JSON data from openlibrary from a /works/ID query\n */\n@JsonIgnoreProperties(ignoreUnknown=true)\npublic class WorksBean {\n \n public String getTextDescription() {\n if (this.description != null) {\n return this.description.getValue();\n }\n else {\n return \"\";\n }\n }\n \n public Description description;\n \n /**\n * @return the description\n */\n public Description getDescription() {\n return description;\n }\n\n /**\n * @param description the description to set\n */\n public void setDescription(Description description) {\n this.description = description;\n }\n\n \n \n}\n"
},
{
"alpha_fraction": 0.6614946126937866,
"alphanum_fraction": 0.6662444472312927,
"avg_line_length": 23.671875,
"blob_id": "f17f36bdc82d1d6e26abd856c75e29dfce9e5a42",
"content_id": "a9e4050a7b5e55ee276c8ae3ce97b25ee4e0b446",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 3158,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 128,
"path": "/images/review/src/github.com/hipposareevil/review/other_services.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Set of functions to make calls to other services\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n///////////\n// Query the /user endpoint for all user names and IDs\n//\nfunc getUsers(bearer string) (Users, error) {\n\tfullUrl := \"http://books.user:8080/user\"\n\tfmt.Println(\"Making user query with url '\", fullUrl, \"'\")\n\n\t// Make request to other service\n\tbody, err := makeRequest(bearer, fullUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to /users: \", err)\n\t\treturn Users{}, err\n\t}\n\n\t// parse Users response\n\tusers := Users{}\n\tjsonErr := json.Unmarshal(body, &users)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /users:\", jsonErr)\n\t\treturn Users{}, err\n\t}\n\n\treturn users, nil\n}\n\n/////////\n// Query the /user_book endpoint to get a userbook for a\n// specified user and book\n//\nfunc getUserBook(bearer string, userId int, bookId int) (UserBook, error) {\n\tuserIdAsString := strconv.Itoa(userId)\n\tbookIdAsString := strconv.Itoa(bookId)\n\n\tfullUrl := \"http://books.user_book:8080/user_book/\" + userIdAsString + \"?book_id=\" + bookIdAsString\n\tfmt.Println(\"Making user query with url '\", fullUrl, \"'\")\n\n\t// Make request to other service\n\tbody, err := makeRequest(bearer, fullUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to /user_book: \", err)\n\t\treturn UserBook{}, err\n\t}\n\n\t// parse UserBooks response\n\tuserBooks := UserBooks{}\n\tjsonErr := json.Unmarshal(body, &userBooks)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /user_books:\", jsonErr)\n\t\treturn UserBook{}, err\n\t}\n\n\tnumData := len(userBooks.Data)\n\tuserBook := UserBook{}\n\n\tif numData > 0 {\n\t\tuserBook = userBooks.Data[0]\n\t\treturn userBook, nil\n\t} else {\n\t\tfmt.Println(\"No userbooks for user \", userId, \" for book \", bookId)\n\t\treturn userBook, errors.New(\"No userbook for user \")\n\t}\n}\n\n// Perform the boilerplate portion of making an http request\n//\n// param:\n// bearer\n// URL to query\n//\nfunc makeRequest(bearer string, queryUrl string) ([]byte, error) {\n\t///////////////\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\treq, err := http.NewRequest(http.MethodGet, queryUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to url: \", queryUrl, \" error: \", err)\n\t\treturn nil, err\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"review-service-client\")\n\treq.Header.Set(\"accept\", \"application/json\")\n\treq.Header.Set(\"content-type\", \"application/json\")\n\treq.Header.Set(\"authorization\", \"Bearer \"+bearer)\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to make send request to url: \", queryUrl, \" error: \", getErr)\n\t\treturn nil, err\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"Unable to connect to url: \", queryUrl, \" HTTP status: \", res.Status)\n\t\treturn nil, err\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from url: \", queryUrl, \" :\", readErr)\n\t\treturn nil, err\n\t}\n\n\treturn body, nil\n\n}\n"
},
{
"alpha_fraction": 0.6641318202018738,
"alphanum_fraction": 0.6641318202018738,
"avg_line_length": 18.725000381469727,
"blob_id": "8bb225ae6ac89bf12187403a3a44e9d9b334cd43",
"content_id": "166de7d08df3d0c8ecf637843647864cffad3b1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 3156,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 160,
"path": "/images.java/user_book/src/main/java/com/wpff/core/DatabaseUserBook.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\nimport java.util.Date;\nimport java.util.Objects;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.GenerationType;\nimport javax.persistence.Id;\nimport javax.persistence.Table;\n\n/**\n * DatabaseUserBook represents a book for a user in the database. It contains:\n *\n * Tags are not stored in this class/table.\n */\n@Entity\n@Table(name = \"userbook\")\npublic class DatabaseUserBook {\n\n\t/**\n\t * UserBook ID.\n\t */\n\t@Id\n\t@GeneratedValue(strategy = GenerationType.IDENTITY)\n\t@Column(name = \"user_book_id\", unique = true, nullable = false)\n\tprivate int userBookId;\n\n\t/**\n\t * ID of user\n\t */\n\t@Column(name = \"user_id\", unique = false, nullable = false)\n\tprivate int user_id;\n\n\t/**\n\t * ID of book\n\t */\n\t@Column(name = \"book_id\", unique = false, nullable = false)\n\tprivate int book_id;\n\n\t/**\n\t * Rating of book. false=down/true=up\n\t */\n\t@Column(name = \"rating\", unique = false, nullable = false)\n\tprivate boolean rating;\n\n\t/**\n\t * Date user book was added\n\t */\n\t@Column(name = \"date_added\", nullable = true)\n\tprivate Date date_added;\n\n\t/**\n\t * Default constructor\n\t */\n\tpublic DatabaseUserBook() {\n\t}\n\n\tpublic DatabaseUserBook(int id, int user_id, int bookId, boolean rating) {\n\t\tthis.userBookId = id;\n\t\tthis.user_id = user_id;\n\t\tthis.book_id = bookId;\n\t\tthis.rating = rating;\n\t}\n\n\tpublic String toString() {\n\t\tStringBuilder string = new StringBuilder();\n\t\tstring.append(\"UserBook[\");\n\t\tstring.append(\"id=\" + userBookId);\n\t\tstring.append(\", user_id=\" + user_id);\n\t\tstring.append(\", bookId=\" + book_id);\n\t\tstring.append(\", rating=\" + rating);\n\t\tstring.append(\"]\");\n\n\t\treturn string.toString();\n\t}\n\n\tpublic void setUserBookId(int id) {\n\t\tthis.userBookId = id;\n\t}\n\n\tpublic int getUserBookId() {\n\t\treturn this.userBookId;\n\t}\n\n\tpublic void setBookId(int bookId) {\n\t\tthis.book_id = bookId;\n\t}\n\n\tpublic int getBookId() {\n\t\treturn this.book_id;\n\t}\n\n\tpublic void setUserId(int user_id) {\n\t\tthis.user_id = user_id;\n\t}\n\n\tpublic int getUserId() {\n\t\treturn this.user_id;\n\t}\n\n\t\n\n\tpublic void setRating(boolean rating) {\n\t\tthis.rating = rating;\n\t}\n\n\tpublic boolean getRating() {\n\t\treturn rating;\n\t}\n\n\t@Override\n\tpublic boolean equals(Object o) {\n\t\tif (this == o) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!(o instanceof DatabaseUserBook)) {\n\t\t\treturn false;\n\t\t}\n\n\t\tfinal DatabaseUserBook that = (DatabaseUserBook) o;\n\n\t\treturn Objects.equals(this.userBookId, that.userBookId) && Objects.equals(this.user_id, that.user_id)\n\t\t\t\t&& Objects.equals(this.book_id, that.book_id) && Objects.equals(this.rating, that.rating);\n\t}\n\n\t@Override\n\tpublic int hashCode() {\n\t\treturn Objects.hash(this.userBookId, this.book_id, this.user_id, this.rating);\n\t}\n\n /**\n * @return the user_id\n */\n public int getUser_id() {\n return user_id;\n }\n\n /**\n * @param user_id the user_id to set\n */\n public void setUser_id(int user_id) {\n this.user_id = user_id;\n }\n\n /**\n * @return the dateAdded\n */\n public Date getDateAdded() {\n return date_added;\n }\n\n /**\n * @param dateAdded the dateAdded to set\n */\n public void setDateAdded(Date dateAdded) {\n this.date_added = dateAdded;\n }\n}\n"
},
{
"alpha_fraction": 0.7516985535621643,
"alphanum_fraction": 0.7591105699539185,
"avg_line_length": 42.702701568603516,
"blob_id": "57063adf94892b411c586ddf9431f07196189216",
"content_id": "b8ae5cde3b3042facabf697c601c79e4f4b68653",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1619,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 37,
"path": "/images.java/user/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# User Microservice\n\n## Introduction\n\n**/user** is a microservice for creating, listing, updating and deleting Users from the database. The *admin* user is available to manage subsequent users. (It's recommended to change that *admin* users password.)\n\nExpected usage:\n* *admin* user creates user Bob with password \"s3cret\".\n* Actual user Bob makes REST call to */authorize* with {\"name\":\"bob\",\"password\",\"s3cret\"} and recieves authorization token.\n* That token is inserted into the HTTP Headers for calls to the other endpoints.\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for a User\nA User entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the user. This is used to manage the user, obtain authorization, and link books to the user.\nname | Name of the user. Example: \"Bob D.\"\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n\n## Dropwizard Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [dropwizard](http://www.dropwizard.io/) for microservice framework.\n* [maven](https://maven.apache.org/) for building.\n\n\n"
},
{
"alpha_fraction": 0.7777777910232544,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 35,
"blob_id": "3d776e1bfd901f3bb132f5825aded617c81c6767",
"content_id": "62350c754b4e0c1799ddfaf2b6dc6eee144a1786",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Gradle",
"length_bytes": 36,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 1,
"path": "/images.java/mybooks_common/settings.gradle",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "rootProject.name = 'mybooks_common'\n"
},
{
"alpha_fraction": 0.6650290489196777,
"alphanum_fraction": 0.6668155193328857,
"avg_line_length": 21.846939086914062,
"blob_id": "3d513810a7276a57d6c1d5f58b1c03f0ac9662f7",
"content_id": "cba1fb429c7f36917da442934823753622d15500",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4478,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 196,
"path": "/images/query/src/github.com/hipposareevil/query/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"net/http\"\n\t\"strconv\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /query/author\nfunc makeQueryAuthorEndpoint(svc QueryService) endpoint.Endpoint {\n\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a queryAuthorRequest\n\t\treq := request.(queryAuthorRequest)\n\n\t\t// call actual service with data from the request\n\t\tauthors, err := svc.QueryAuthor(req.Author, req.Offset, req.Limit)\n\t\treturn authorsResponse{\n\t\t\tData: authors,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /query/title\nfunc makeQueryTitleEndpoint(svc QueryService) endpoint.Endpoint {\n\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a queryTitleRequest\n\t\treq := request.(queryTitleRequest)\n\n\t\t// call actual service with data from the request\n\t\ttitles, err := svc.QueryTitle(req.Author,\n\t\t\treq.Title,\n\t\t\treq.Isbn,\n\t\t\treq.Offset, req.Limit)\n\t\treturn titlesResponse{\n\t\t\tData: titles,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a queryAuthorRequest\nfunc decodeQueryAuthorRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\n\t// Get author name\n\tr.ParseForm()\n\tvalues := r.Form\n\n\tauthorName := values.Get(\"author\")\n\n\tvar request queryAuthorRequest\n\trequest = queryAuthorRequest{\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tAuthor: authorName,\n\t}\n\n\treturn request, nil\n}\n\n// Create a queryTitleRequest\nfunc decodeQueryTitleRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\n\t// Get params\n\tr.ParseForm()\n\tvalues := r.Form\n\n\tauthorName := values.Get(\"author\")\n\ttitle := values.Get(\"title\")\n\tisbn := values.Get(\"isbn\")\n\n\tvar request queryTitleRequest\n\trequest = queryTitleRequest{\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tAuthor: authorName,\n\t\tIsbn: isbn,\n\t\tTitle: title,\n\t}\n\n\treturn request, nil\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.7541766166687012,
"alphanum_fraction": 0.758949875831604,
"avg_line_length": 40.79999923706055,
"blob_id": "ab4fa38bba1e453a052dd879cdd101a3c42db615",
"content_id": "86b26ff03c0541ef7d92bc9b42f44a6e81d177ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 838,
"license_type": "no_license",
"max_line_length": 199,
"num_lines": 20,
"path": "/images/query/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Query Microservice\n\n## Introduction\n\n**/query** is a microservice for looking up author names and book titles. This endpoint is not authenticated and thus doesn't need an Authorization HTTP header.\n\nThis uses the [openlibrary api](https://openlibrary.org/developers/api) to query for authors and title information.\n\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n\n\n"
},
{
"alpha_fraction": 0.6648044586181641,
"alphanum_fraction": 0.6703910827636719,
"avg_line_length": 28.83333396911621,
"blob_id": "521d306afd4645c881ba3b4c283989d8370c42c5",
"content_id": "00771b4615b9d6479b82c0e3f2def5b2c7f39aa7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 179,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 6,
"path": "/images/frontend/build.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# our real directory (so this can be called from outside directories)\nour_directory=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" && pwd )\"\n\n$our_directory/build_prod.sh\n"
},
{
"alpha_fraction": 0.6901840567588806,
"alphanum_fraction": 0.7009202241897583,
"avg_line_length": 28.636363983154297,
"blob_id": "d8da8fc9b52429540f0b9c3b0a9d65440c6023fd",
"content_id": "fa227f895f9762ba38d5a23d4d98306f8a713b2e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 652,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 22,
"path": "/images.java/query/Dockerfile",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "FROM openjdk:8-jdk-alpine\n\n# Args passed in via 'docker build'\n# Used by the LABELs\nARG BUILD_TIME\nARG VERSION\n\n# setup env\nRUN alias ll='ls -la' && \\\n mkdir -p /opt/docker\n\nCOPY build/libs/query-1.0.jar /opt/docker\n\n# Putting LABEL last so we can re-use the preceding caching layers\nLABEL org.label-schema.build-date=\"$BUILD_TIME\" \\\n org.label-schema.vendor=\"github.com/hipposareevil\" \\\n org.label-schema.version=\"$VERSION\" \\\n org.label-schema.description=\"Microservice for querying openlibrary for books & authors.\" \\\n org.label-schema.name=\"books.query\" \n\n\nENTRYPOINT [\"java\", \"-Xmx64m\", \"-jar\", \"/opt/docker/query-1.0.jar\"]\n"
},
{
"alpha_fraction": 0.6429961323738098,
"alphanum_fraction": 0.6429961323738098,
"avg_line_length": 19.979591369628906,
"blob_id": "3f8b1ff2583dc3ca4250918801c69ea4259c3900",
"content_id": "04e9d140ac5aad6277bf8080e86bbeca75c47793",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1028,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 49,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////////\n// Structures\n///////////////////////\n\n////////////////\n// DB structure\ntype User struct {\n\tUserId int\n\tName string\n\tUserGroup string\n\tPassword string\n}\n\n////////////////\n// Structures used to send data from client to our server\n\n// Values used to create a token\n// Used in POST\ntype Credentials struct {\n\tName string `json:\"name\"`\n\tPassword string `json:\"password\"`\n}\n\n////////////////\n// Structures sent to client\n\n// Authorization JSON structure sent back to client\ntype Authorization struct {\n\tToken string `json:\"token\"`\n\tUserId int `json:\"userId\"`\n\tGroupName string `json:\"groupName\"`\n}\n\n/////////////\n// Requests used internally\n// They are created by transport.decode and passed along to transport.makeXXXEndpoint\n\n// GET request to verify header is OK\ntype authorizationRequest struct {\n\tBearer string `json:\"bearer\"`\n}\n\n// POST request to create token\ntype createTokenRequest struct {\n\tName string `json:\"name\"`\n\tPassword string `json:\"password\"`\n}\n"
},
{
"alpha_fraction": 0.6657129526138306,
"alphanum_fraction": 0.6709585189819336,
"avg_line_length": 23.67058753967285,
"blob_id": "3928c9b375e26af248c08ebad172d163f1c06158",
"content_id": "f41e823e91bb8ced9fc1b17c62cd5400756a99d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 2097,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 85,
"path": "/images/review/src/github.com/hipposareevil/review/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// 'review' service\n\tvar reviewSvc ReviewService\n\treviewSvc = reviewService{}\n\n\t// Set up the endpoints on our service\n\t//\n\t// Note: the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one as some\n\t// care about only accepting 'admin' group.\n\n\t////////////////\n\t// Endpoints\n\n\t//////\n\t// GET /review/<book_id>\n\treviewsEndpoint := makeGetReviewsEndpoint(reviewSvc)\n\tbaseReviewsHandler := httptransport.NewServer(\n\t\treviewsEndpoint,\n\t\tdecodeGetReviewsRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter: When true, the authenticated user's ID must match the userid in the url\n\treviewsHandler := Authenticate(false, redisPool, baseReviewsHandler)\n\t// The id is used in transport.go to grab the variable 'book_id' from the path\n\trouter.Methods(\"GET\").Path(\"/review/{book_id}\").Handler(reviewsHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tfmt.Println(\"reviews service up on \" + addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.6811594367027283,
"alphanum_fraction": 0.6811594367027283,
"avg_line_length": 18.58108139038086,
"blob_id": "3dd692c3af54002cd340c8736367f3b2df6e6b80",
"content_id": "5a319a524ae2bf71cf4c1199558011b91c9c427d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1449,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 74,
"path": "/images/tag/src/github.com/hipposareevil/tag/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// TAGS\n// response for tags (vs. single tag)\ntype tagsResponse struct {\n\tData Tags `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\n// tagsResponse.error\nfunc (theResponse tagsResponse) error() error {\n\treturn theResponse.Err\n}\n\n// tagsResponse.getData\nfunc (theResponse tagsResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// TAG (single)\n// response for tag (single)\ntype tagResponse struct {\n\tData Tag `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\n// tagResponse.error\nfunc (theResponse tagResponse) error() error {\n\treturn theResponse.Err\n}\n\n// tagResponse.getData\nfunc (theResponse tagResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// PUT TAG\ntype putTagResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\n// deleteTagResponse.error\nfunc (theResponse putTagResponse) error() error {\n\treturn theResponse.Err\n}\n\n////////////////////\n/// DELETE TAG (single)\n// response for tag (single)\ntype deleteTagResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\n// deleteTagResponse.error\nfunc (theResponse deleteTagResponse) error() error {\n\treturn theResponse.Err\n}\n"
},
{
"alpha_fraction": 0.48320311307907104,
"alphanum_fraction": 0.4945312440395355,
"avg_line_length": 16.902097702026367,
"blob_id": "a728a1febe85625b64eb543e787993a5ee897409",
"content_id": "59a16ac7954864eb8ae22d78b4ccbee22cdb700d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2560,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 143,
"path": "/test/timer_test.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n##################\n#\n# Test for multiple calls for time\n#\n#\n#################\n\nroot_dir=\"$(cd -P \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\n\ntrap \"exit 1\" TERM\nexport TOP_PID=$$\n\n\n. ${root_dir}/author.sh\n. ${root_dir}/user.sh\n. ${root_dir}/util.sh\n. ${root_dir}/user_book.sh\n. ${root_dir}/tags.sh\n. ${root_dir}/book.sh\n\n\n\n########\n# Set up variables\n#\n########\ninitialize_variables() {\n TOKEN_FILE=auth.token\n\n rm -f $TOKEN_FILE\n\n ROOT_URL=\"http://localhost:8080\"\n ADMIN_USER=\"admin\"\n ADMIN_PASSWORD=\"admin\"\n}\n\n######\n# Error out\n######\n######\nerror() {\n >&2 echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n >&2 echo \"$@\"\n >&2 echo \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n kill -s TERM $TOP_PID\n}\n\nlogit() {\n >&2 echo \"[$@]\"\n}\n\n\n##########\n# URL encode a string\n#\n##########\nurl_encode() {\n what=$1\n result=$(python ./encode.py \"$what\")\n echo \"$result\"\n}\n\n######\n# Authorize\n#\n######\nauthorize() {\n if [ ! -e $TOKEN_FILE ]; then\nread -r -d '' data <<EOF\n{\n\"name\": \"${ADMIN_USER}\",\n\"password\":\"${ADMIN_PASSWORD}\"\n}\nEOF\n\n token=$(curl -s -X POST \\\n ${ROOT_URL}/authorize/token \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json' \\\n -d \"${data}\"\n )\n\n echo \"$token\" > $TOKEN_FILE\n fi\n\n\n # get variables\n BEARER=$(<$TOKEN_FILE jq -r '.token')\n if [ -z \"$BEARER\" ] || [[ \"$BEARER\" == \"null\" ]]; then\n error \"Couldn't get bearer from authentication token.\"\n fi\n USER_ID=$(<$TOKEN_FILE jq -r '.userId')\n if [ -z \"$USER_ID\" ] || [[ \"$USER_ID\" == \"null\" ]]; then\n error \"Couldn't get user ID from authentication token.\"\n fi\n}\n\n# Test book query\ntest_books() {\n all_books=$(get_all_books_with_offset_limit 0 1000)\n numBooks=$(echo $all_books | jq -r '.data | length')\n echo \"Num books: $numBooks\"\n}\n\n# User books\ntest_user_books() {\n USER_ID=2\n all_books=$(get_all_user_books_with_offset_limit 0 1000)\n numBooks=$(echo $all_books | jq -r '.data | length')\n echo \"Num User books: $numBooks\"\n}\n\n# Test author query\ntest_authors() {\n all_authors=$(get_all_authors_with_offset_limit 0 1000)\n num=$(echo $all_authors | jq -r '.data | length')\n echo \"Num Authors: $num\"\n}\n\n\n#################\n# Main function.\n# \n#################\nmain() {\n # Initialize\n initialize_variables\n\n # authorize\n authorize\n\n for i in `seq 1 3`;\n do\n# test_books\n test_user_books\n done \n \n}\n\n# main\nmain \"$@\"\n"
},
{
"alpha_fraction": 0.6460905075073242,
"alphanum_fraction": 0.6460905075073242,
"avg_line_length": 19.94827651977539,
"blob_id": "e783417f7e30d9793b2da9c9999a9b5c68232187",
"content_id": "a8058f15d3ddc7b1c2c7c70fcd15c48eeaf238db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1215,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 58,
"path": "/images/user/src/github.com/hipposareevil/user/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Structures\n\n// GET request for users, contains:\n// - offset\n// - limit\ntype getAllUsersRequest struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n}\n\n// GET request for single user\n// - user_id\ntype getUserRequest struct {\n\tUserId int `json:\"user_id\"`\n}\n\n// DELETE request for single user\n// - user_id\ntype deleteUserRequest struct {\n\tUserId int `json:\"user_id\"`\n}\n\n// POST request to create user\ntype createUserRequest struct {\n\tName string `json:\"name\"`\n\tUserGroup string `json:\"userGroup\"`\n\tData string `json:\"data\"`\n\tPassword string `json:\"password\"`\n}\n\n// PUT request to update user\n// struct passed into service\ntype updateUserRequest struct {\n\tName string `json:\"name\"`\n\tUserGroup string `json:\"userGroup\"`\n\tData string `json:\"data\"`\n\tPassword string `json:\"password\"`\n\tId int `json:\"id,omitempty\"`\n}\n\n//// Response structures\n\ntype User struct {\n\tId int `json:\"id\"`\n\tName string `json:\"name\"`\n\tData string `json:\"data\"`\n\tUserGroup string `json:\"userGroup\"`\n}\n\ntype Users struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []User `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.6449963450431824,
"alphanum_fraction": 0.6457267999649048,
"avg_line_length": 17.5,
"blob_id": "b3b25567a5c630ee89cef3851252c8c16e068b96",
"content_id": "e0e19b4de0a51485b51bc2d5ad53175b04210d44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1369,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 74,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/utils.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\nimport (\n\t\"strconv\"\n\t\"strings\"\n)\n\n////////\n// Return a untion of the two incoming arrays\n//\n//\nfunc Union(left []int, right []int) []int {\n\tvar result []int\n\n\tfor _, leftValue := range left {\n\t\t// See if leftValue is in right array\n\t\tif inArray(leftValue, right) {\n\t\t\tresult = append(result, leftValue)\n\t\t}\n\t}\n\n\treturn result\n}\n\nfunc inArray(valueToCheck int, array []int) bool {\n\tfor _, currentValue := range array {\n\t\tif currentValue == valueToCheck {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\n////////////\n// Split a CSV string into array\nfunc splitCsvStringToArray(csv string) []string {\n\tif len(csv) == 0 {\n\t\treturn []string{}\n\t} else {\n\t\tarray := strings.Split(csv, \",\")\n\t\treturn array\n\t}\n}\n\n//////////\n// Convert string array to CSV string\nfunc convertArrayToCsv(array []string) string {\n\treturn strings.Join(array, \",\")\n}\n\n////////////\n// Convert incoming int array to CSV string\nfunc convertIntArrayToCsv(intArray []int) string {\n\ttempArray := make([]string, len(intArray))\n\tfor i, v := range intArray {\n\t\ttempArray[i] = strconv.Itoa(v)\n\t}\n\n\treturn strings.Join(tempArray, \",\")\n}\n\n////////////\n// Convert Tags structure into map of Tag objects indexed by name\n//\nfunc convertTagsJsonToArray(tags Tags) map[string]Tag {\n\tnewMap := make(map[string]Tag)\n\n\tfor _, item := range tags.Data {\n\t\tname := item.Name\n\t\tnewMap[name] = item\n\t}\n\n\treturn newMap\n}\n"
},
{
"alpha_fraction": 0.7114136219024658,
"alphanum_fraction": 0.7151832580566406,
"avg_line_length": 27.939393997192383,
"blob_id": "333c8822f427e99c2b8f0de134a622aa96672dd7",
"content_id": "eb302197becc397045fccda781fa65c434821650",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4775,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 165,
"path": "/images/author/src/github.com/hipposareevil/author/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"time\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books.db:3306)/booksdatabase\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// cache layer\n\tvar cache CacheLayer\n\tcache = cacheLayer{redisPool}\n\n\t// Clear the cache on startup\n\tcache.ClearAll(AUTHOR_CACHE)\n\n\t///////////////\n\t// 'author' service\n\tvar authorSvc AuthorService\n\tauthorSvc = authorService{db, cache}\n\n\t// Set up the endpoints on our service\n\t//\n\t// Note: the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one as some\n\t// care about only accepting 'admin' group.\n\n\t////////////////\n\t// Endpoints\n\n\t//////\n\t// GET /author (all authors)\n\tauthorsEndpoint := makeGetAuthorsEndpoint(authorSvc)\n\tbaseAuthorsHandler := httptransport.NewServer(\n\t\tauthorsEndpoint,\n\t\tdecodeGetAllAuthorsRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tauthorsHandler := Authenticate(false, redisPool, baseAuthorsHandler)\n\trouter.Methods(\"GET\").Path(\"/author\").Handler(authorsHandler)\n\n\t//////\n\t// GET /author/<author_id>\n\tauthorEndpoint := makeGetAuthorEndpoint(authorSvc)\n\tbaseAuthorHandler := httptransport.NewServer(\n\t\tauthorEndpoint,\n\t\tdecodeGetAuthorRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tauthorHandler := Authenticate(false, redisPool, baseAuthorHandler)\n\t// 'author_id' is used in transport.go to grab the variable 'author_id' from the path\n\trouter.Methods(\"GET\").Path(\"/author/{author_id}\").Handler(authorHandler)\n\n\t//////\n\t// DELETE /author/<author_id>\n\tdeleteAuthorEndpoint := makeDeleteAuthorEndpoint(authorSvc)\n\tbaseDeleteAuthorHandler := httptransport.NewServer(\n\t\tdeleteAuthorEndpoint,\n\t\tdecodeDeleteAuthorRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tdeleteAuthorHandler := Authenticate(true, redisPool, baseDeleteAuthorHandler)\n\t// 'author_id' is used in transport.go to grab the variable 'author_id' from the path\n\trouter.Methods(\"DELETE\").Path(\"/author/{author_id}\").Handler(deleteAuthorHandler)\n\n\t//////\n\t// POST /author\n\tcreateAuthorEndpoint := makeCreateAuthorEndpoint(authorSvc)\n\tbaseCreateAuthorHandler := httptransport.NewServer(\n\t\tcreateAuthorEndpoint,\n\t\tdecodeCreateAuthorRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tcreateAuthorHandler := Authenticate(true, redisPool, baseCreateAuthorHandler)\n\trouter.Methods(\"POST\").Path(\"/author\").Handler(createAuthorHandler)\n\n\t//////\n\t// PUT /author/<author_id>\n\tupdateAuthorEndpoint := makeUpdateAuthorEndpoint(authorSvc)\n\tbaseUpdateAuthorHandler := httptransport.NewServer(\n\t\tupdateAuthorEndpoint,\n\t\tdecodeUpdateAuthorRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tupdateAuthorHandler := Authenticate(true, redisPool, baseUpdateAuthorHandler)\n\t// 'author_id' is used in transport.go to grab the variable 'author_id' from the path\n\trouter.Methods(\"PUT\").Path(\"/author/{author_id}\").Handler(updateAuthorHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tfmt.Println(\"author service up on \" + addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.6721311211585999,
"alphanum_fraction": 0.6901639103889465,
"avg_line_length": 54.45454406738281,
"blob_id": "5a05b39d89b54e66a1596012c5ad6b707ed385ea",
"content_id": "aeb769a37663c71cff923672b1c116d3761f7558",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 610,
"license_type": "no_license",
"max_line_length": 201,
"num_lines": 11,
"path": "/database/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# MySQL Database\n\nThis contains a preliminary SQL dump file that is used to bootstrap the database. It contains the *admin* user with initial password of *admin*. This can be updated via curl commands to */user/1*.\n\nProcess to update the *admin*'s password to 'super':\n\n```\n$> bearer=$( curl -X POST http://localhost:8080/authorize/token --header 'Content-Type: application/json' -d '{\"name\":\"admin\", \"password\":\"admin\"}')\n$> curl -X PUT http://localhost:8080/user/1 -H \"Authorization: ${bearer}\" -H \"Content-Type: application/json\" -d '{ \"id\": 1, \"name\":\"admin\", \"password\":\"super\"}'\n\n```\n"
},
{
"alpha_fraction": 0.7241379022598267,
"alphanum_fraction": 0.7413793206214905,
"avg_line_length": 28,
"blob_id": "7fe18235ed08d74cd05660abd822a3eab2e9925d",
"content_id": "ecaa976bf572805b2a772ee6e5eecdd37cb9d8a8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 58,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 2,
"path": "/test/encode.py",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "import sys, urllib as ul\nprint ul.quote_plus(sys.argv[1])\n"
},
{
"alpha_fraction": 0.6549707651138306,
"alphanum_fraction": 0.6633249521255493,
"avg_line_length": 20.567567825317383,
"blob_id": "8990f63357050a797660e9b1a491671ce47e80e0",
"content_id": "18699efcb02c136c4da2ae308551c60937936539",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 2394,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 111,
"path": "/images/authorization/src/github.com/hipposareevil/authorization/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\t\"time\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n\n\t// bcrypt\n\t_ \"golang.org/x/crypto/bcrypt\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books_db:3306)/booksdatabase\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// 'authorization' service\n\tvar authorizeSvc AuthorizeService\n\tauthorizeSvc = authorizeService{\n\t\tdb,\n\t\tredisPool,\n\t}\n\n\t// Set up the endpoints on our service\n\n\t////////////////\n\t// Endpoints\n\n\t////////////\n\t// #1\n\t// GET /authorize/validate\n\tvalidateEndpoint := makeGetValidationEndpoint(authorizeSvc)\n\tbaseValidateHandler := httptransport.NewServer(\n\t\tvalidateEndpoint,\n\t\tdecodeValidationRequest,\n\t\tencodeResponse,\n\t)\n\trouter.Methods(\"GET\").Path(\"/authorize/validate\").Handler(baseValidateHandler)\n\n\t////////////\n\t// #2\n\t// POST /authorize/token\n\tcreateTokenEndpoint := makeCreateTokenEndpoint(authorizeSvc)\n\tbaseCreateTokenHandler := httptransport.NewServer(\n\t\tcreateTokenEndpoint,\n\t\tdecodeCreateTokenRequest,\n\t\tencodeResponse,\n\t)\n\trouter.Methods(\"POST\").Path(\"/authorize/token\").Handler(baseCreateTokenHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.7111274600028992,
"alphanum_fraction": 0.7361827492713928,
"avg_line_length": 29.795454025268555,
"blob_id": "7c2a12bfd2f74d70ecb3978834cdf91befba8059",
"content_id": "b202607c4937d794b238d614382d26d00c02cf8c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1357,
"license_type": "no_license",
"max_line_length": 183,
"num_lines": 44,
"path": "/images.java/authorization/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Authorize Microservice\n\n## Introduction\n\n**/authorize** is a microservice for creating authorization tokens. Each token expires in 24 hours. The returned token should be placed into the HTTP Headers with key 'Authorization'.\n\nExample usage:\n\n* *admin* user creates user Bob with password \"s3cret\".\n* Actual user Bob makes a REST call to */authorize* with:\n```\n{ \"name\":\"bob\",\n \"password\":\"s3cret\" }\n```\n* Bob receives a response with:\n```\nBearer qwerty-1234-asdf-9876\n```\n* Bob inserts the following into the HTTP Headers for calls to any endpoint.\n```\nAuthorization : Bearer qwerty-1234-asdf-9876\n```\n\nExample of *authorize* call via curl. This is slightly different than what the *swagger* UI shows:\n```\n$> curl -X POST http://localhost:8080/authorize/token --header 'Content-Type: application/json' -d '{\"name\":\"bob, \"password\":\"s3cret}'\n\n```\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n\n## Dropwizard Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [dropwizard](http://www.dropwizard.io/) for microservice framework.\n* [maven](https://maven.apache.org/) for building.\n\n\n"
},
{
"alpha_fraction": 0.46888062357902527,
"alphanum_fraction": 0.47607988119125366,
"avg_line_length": 17.96916389465332,
"blob_id": "97ffb278d0399701f889a3735c9f1d0bf60c6f90",
"content_id": "b42a880a87490ae6dee3b4849b1706cf165d3d99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 4306,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 227,
"path": "/books.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Script to manage the books application. Supports\n# * build\n# * start\n# * stop\n# * clean\n# * clean-cache\n\n\n# See if docker-compose exists. If it doesn't, use the docker-compose-run container.\n# taken from https://docs.docker.com/compose/install/#/install-as-a-container\n\n\n########\n# Set up variables\n#\n########\ninitialize_variables() {\n dc=docker-compose\n if ! type \"$dc\" > /dev/null 2>&1 ; then\n echo \"Unable to find docker-compose, using script from utils\"\n dc=utils/docker-compose-run.sh\n fi\n}\n\n############\n# Usage\n# \n############\nusage() {\n echo\n echo \"Usage: $0 [build] [start] [stop] [clean]\"\n echo \"\"\n echo \"* build : Builds application\"\n echo \"* start : Starts application\"\n echo \"* stop : Stops application\"\n echo \"* clean : Cleans each application's build\"\n echo \"* clean-cache : Cleans maven & gradle repositories\"\n echo \"\"\n exit 0;\n}\n\n\n\n############\n# Stop the application\n# \n############\nstop() {\n $dc down\n echo \"Stopped 'books' app\"\n}\n\n############\n# Start the application\n# \n############\nstart() {\n # start up the containers and daemonize docker-compose\n $dc up -d\n\n echo \"\"\n echo \"Running 'books' on localhost:8080. Try http://localhost:8080/swagger/\"\n echo \"Note: the app takes a few seconds to warm up while the DB initializes.\"\n}\n\n############\n# Clean\n# remove the .m2 and .gradle directories\n# \n############\nclean-cache() {\n echo \"Cleaning up maven and gradle repositories\"\n rm -rf .m2 .gradle\n}\n\n\n############\n# Perform a build\n#\n# params:\n# 1- param passed to build.sh in each directory. This can be empty\n# \n############\n_build() {\n build_param=\"$1\"\n\n ####\n # build all projects and their docker images\n\n # get all build.sh files\n projects=$(ls images/*/build.sh)\n\n # break up into array by space\n IFS=$'\\n' projectList=(${projects//$' '/ })\n\n # go through all build files\n for i in \"${!projectList[@]}\"\n do\n project=$(dirname \"${projectList[i]}\")\n echo \"\"\n echo \" -------------------------------------------------\"\n echo \"\"\n echo \" Building project '$project'}\"\n $project/build.sh ${build_param} \n if [ $? -ne 0 ]; then\n echo \"Unable to build $project, exiting.\"\n exit 1\n fi\n\n echo \" '$project' completed.\"\n done\n\n echo \"\"\n echo \"All webservices built!\" \n}\n\n############\n# Build everything\n# \n############\nbuild() {\n _build\n}\n\n\n############\n# Build just images\n# \n############\nbuildimages() {\n _build \"buildimage\"\n}\n\n###########\n# Iterate on all of the projects and call some executable.\n#\n# params:\n# 1- script to call, e.g. 'build.sh'\n###########\n_iterate_projects() {\n script=$1\n\n # find all scripts \n projects=$(ls images/*/$script)\n\n # break up into array by space\n IFS=$'\\n' projectList=(${projects//$' '/ })\n\n # go through all build files\n for i in \"${!projectList[@]}\"\n do\n project=$(dirname \"${projectList[i]}\")\n echo \"\"\n echo \" -------------------------------------------------\"\n echo \"\"\n echo \" Running project '$project'\"\n $project/$script\n if [ $? -ne 0 ]; then\n echo \"Unable to execute $script for $project, exiting\"\n exit 1\n fi\n\n echo \"$script for '$project' completed.\"\n done\n}\n\n##############\n# Clean all projects\n##############\nclean() {\n _iterate_projects \"build.sh clean\"\n}\n\n\n############\n# main\n# \n############\nmain() {\n initialize_variables\n\n if [ $# -eq 0 ]\n then\n usage\n fi\n arg=$1\n\n for arg in $@\n do\n case $arg in\n \"-h\"|\"--help\")\n\t usage\n\t exit 0\n\t ;;\n \"build\")\n build\n exit 0\n ;;\n \"buildimages\")\n buildimages\n exit 0\n ;;\n \"clean\")\n clean\n exit 0\n ;;\n \"start\")\n start\n exit 0\n ;;\n \"stop\")\n stop\n exit 0\n ;;\n \"clean-cache\")\n clean-cache\n exit 0\n ;;\n esac\n done\n}\n\n\n# Call main\nmain \"$@\"\n"
},
{
"alpha_fraction": 0.6690811514854431,
"alphanum_fraction": 0.6701726317405701,
"avg_line_length": 21.198238372802734,
"blob_id": "a2e4867da4b30666ef0d69961aa89d91a5f8aa24",
"content_id": "dcb8ae1ec60b5b841b3a30b7382fa16fb12b8dfb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 10078,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 454,
"path": "/images/user_book/src/github.com/hipposareevil/user_book/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. bookRequest, postBookRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /user_book/<user_id>\n// Make endpoint for getting books\nfunc makeGetUserBooksEndpoint(svc UserBookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a books specific request\n\t\treq := request.(getAllUserBooksRequest)\n\n\t\t// call actual service with data from the req\n\t\tuserBooks, err := svc.GetUserBooks(\n\t\t\treq.Bearer,\n\t\t\treq.UserId,\n\t\t\treq.Offset,\n\t\t\treq.Limit,\n\t\t\treq.BookId,\n\t\t\treq.Title,\n\t\t\treq.Tag)\n\n\t\treturn userBooksResponse{\n\t\t\tData: userBooks,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /book/<book_id>\n// Make endpoint for getting single Book\nfunc makeGetUserBookEndpoint(svc UserBookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a bookRequest\n\t\treq := request.(getUserBookRequest)\n\n\t\t// call actual service with data from the req\n\t\tbook, err := svc.GetUserBook(\n\t\t\treq.Bearer,\n\t\t\treq.UserId,\n\t\t\treq.UserBookId)\n\n\t\treturn userBookResponse{\n\t\t\tData: book,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// DELETE /book/<book_id>\n// Make endpoint for deleting single Book\nfunc makeDeleteUserBookEndpoint(svc UserBookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a bookRequest\n\t\treq := request.(deleteUserBookRequest)\n\n\t\t// call actual service with data from the req\n\t\terr := svc.DeleteUserBook(\n\t\t\treq.UserId,\n\t\t\treq.UserBookId)\n\t\treturn deleteUserBookResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /book/\n// Make endpoint for creating (via post) a book\nfunc makeCreateUserBookEndpoint(svc UserBookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a createBookRequest\n\t\treq := request.(createUserBookRequest)\n\n\t\t// call actual service with data from the req\n\t\tnewBook, err := svc.CreateUserBook(\n\t\t\treq.Bearer,\n\t\t\treq.UserId,\n\t\t\treq.BookId,\n\t\t\treq.Rating,\n\t\t\treq.Tags,\n\t\t\treq.Review)\n\n\t\treturn createUserBookResponse{\n\t\t\tData: newBook,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// PUT /book/<book_id>\n// Make endpoint for updating (via PUT) a book\nfunc makeUpdateUserBookEndpoint(svc UserBookService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a updateBookRequest\n\t\treq := request.(updateUserBookRequest)\n\n\t\t// call actual service with data from the req (putBookRequest)\n\t\tbook, err := svc.UpdateUserBook(\n\t\t\treq.Bearer,\n\t\t\treq.UserId,\n\t\t\treq.UserBookId,\n\t\t\treq.BookId,\n\t\t\treq.Rating,\n\t\t\treq.Tags,\n\t\t\treq.Review)\n\n\t\treturn updateUserBookResponse{\n\t\t\tData: book,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a getAllUserBooksRequest from the context and http.Request\n// /user_book/<user_id>\n//\nfunc decodeGetAllUserBooksRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get offset, limit and bearer\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\tbearer := parseBearer(r)\n\n\t// Get userId\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\ttemp := values[\"tag\"]\n\ttagsString := strings.Join(temp, \",\")\n\ttags := splitCsvStringToArray(tagsString)\n\n\t// get book_id\n\ttempId := values.Get(\"book_id\")\n\tbookId, _ := strconv.Atoi(tempId)\n\n\t// get title\n\ttitle := values.Get(\"title\")\n\n\t// Make request for all books\n\tvar request getAllUserBooksRequest\n\trequest = getAllUserBooksRequest{\n\t\tBearer: bearer,\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\n\t\tUserId: userId,\n\t\tTitle: title,\n\t\tBookId: bookId,\n\t\tTag: tags,\n\t}\n\n\treturn request, nil\n}\n\n// Create getUserBookRequest\n// /user_book/<user_id>/<user_book_id>\n//\nfunc decodeGetUserBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tbearer := parseBearer(r)\n\n\t// Get userId\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get user_book_id\n\tuserBookId, err := parseUserBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request for all books\n\tvar request getUserBookRequest\n\trequest = getUserBookRequest{\n\t\tBearer: bearer,\n\t\tUserId: userId,\n\t\tUserBookId: userBookId,\n\t}\n\n\treturn request, nil\n\n}\n\n// Create deleteUserBookRequest\n// DELETE /user_book/<user_id>/<user_book_id>\n//\nfunc decodeDeleteUserBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get userId\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get user_book_id\n\tuserBookId, err := parseUserBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request for all books\n\tvar request deleteUserBookRequest\n\trequest = deleteUserBookRequest{\n\t\tUserId: userId,\n\t\tUserBookId: userBookId,\n\t}\n\n\treturn request, nil\n\n}\n\n// Create creatUsereBookRequest\n// POST /user_book/<user_id>\nfunc decodeCreateUserBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get userId\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar request createUserBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&request); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set rest on update request\n\trequest.UserId = userId\n\trequest.Bearer = bearer\n\n\treturn request, nil\n}\n\n// Create updateBookRequest\n// PUT /user_book/<user_id>/<user_book_id>\nfunc decodeUpdateUserBookRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get userId\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get user_book_id\n\tuserBookId, err := parseUserBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get bearer from headers\n\tbearer := parseBearer(r)\n\n\t///////////////////\n\t// Parse body\n\tvar request updateUserBookRequest\n\tif err := json.NewDecoder(r.Body).Decode(&request); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set rest on update request\n\trequest.UserId = userId\n\trequest.UserBookId = userBookId\n\trequest.Bearer = bearer\n\n\treturn request, nil\n}\n\n/////////////////////////////\n// Helper methods\n\n// Returns the bearer id without \"Bearer \"\nfunc parseBearer(r *http.Request) string {\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\treturn realBearer\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n// Decode the 'user_book_id' from the request.\n//\n//// Returns the user book id\nfunc parseUserBookId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'user_book_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"user_book_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar userBookId int\n\tif id != \"\" {\n\t\tuserBookId, _ = strconv.Atoi(id)\n\t}\n\n\treturn userBookId, nil\n}\n\n// Decode the 'user_id' from the request.\n//\n//// Returns the user id\nfunc parseUserId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'user_book_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"user_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar userId int\n\tif id != \"\" {\n\t\tuserId, _ = strconv.Atoi(id)\n\t}\n\n\treturn userId, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\tfmt.Println(\"Sending back error '\" + err.Error() + \"'\")\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.612468957901001,
"alphanum_fraction": 0.6137759685516357,
"avg_line_length": 20.797719955444336,
"blob_id": "183306b7fdd763d646e0d350546ddba94e90e17f",
"content_id": "410aba56f2000f66f2ef98b92e5fd379a968ed41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 7651,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 351,
"path": "/images.java/book/src/main/java/com/wpff/core/Book.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.GenerationType;\nimport javax.persistence.Id;\nimport javax.persistence.Table;\n\n/***\n * Represents a book. This is marshalled to/from the database.\n */\n@Entity\n@Table(name = \"book\")\npublic class Book implements Comparable {\n @Id\n @GeneratedValue(strategy = GenerationType.IDENTITY)\n @Column(name = \"book_id\", nullable=false)\n private int id;\n\n @Column(name = \"author_id\", nullable=false)\n private int authorId;\n\n @Column(name = \"year\", nullable=false)\n private int year;\n\n @Column(name = \"title\", unique=true, nullable = false)\n private String title;\n \n @Column(name = \"description\", unique=true, nullable = false)\n private String description; \n\n // csv list of isbns\n @Column(name = \"isbn\", unique=false, nullable = true)\n private String isbn;\n \n // csv list of subjects\n @Column(name = \"subjects\", unique=false, nullable = true)\n private String subject; \n\n // openlibrary.org 'works' location\n @Column(name = \"ol_works\", unique=false, nullable = true)\n private String olWorks;\n \n // goodreads URL for book\n @Column(name = \"goodreads_url\", unique=false, nullable = true)\n private String goodreadsUrl;\n \n\n // small image URL\n @Column(name = \"image_small\", unique=false, nullable = true)\n private String imageSmall;\n\n // medium image URL\n @Column(name = \"image_medium\", unique=false, nullable = true)\n private String imageMedium;\n\n // large image URL\n @Column(name = \"image_large\", unique=false, nullable = true)\n private String imageLarge;\n \n //////////////////////////////////////////////////\n\n\n\n /* (non-Javadoc)\n * @see java.lang.Object#hashCode()\n */\n @Override\n public int hashCode() {\n final int prime = 31;\n int result = 1;\n result = prime * result + authorId;\n result = prime * result + id;\n result = prime * result + ((imageLarge == null) ? 0 : imageLarge.hashCode());\n result = prime * result + ((imageMedium == null) ? 0 : imageMedium.hashCode());\n result = prime * result + ((imageSmall == null) ? 0 : imageSmall.hashCode());\n result = prime * result + ((isbn == null) ? 0 : isbn.hashCode());\n result = prime * result + ((olWorks == null) ? 0 : olWorks.hashCode());\n result = prime * result + ((title == null) ? 0 : title.hashCode());\n result = prime * result + year;\n return result;\n }\n\n /* (non-Javadoc)\n * @see java.lang.Object#equals(java.lang.Object)\n */\n @Override\n public boolean equals(Object obj) {\n if (this == obj)\n return true;\n if (obj == null)\n return false;\n if (getClass() != obj.getClass())\n return false;\n Book other = (Book) obj;\n if (authorId != other.authorId)\n return false;\n if (id != other.id)\n return false;\n if (imageLarge == null) {\n if (other.imageLarge != null)\n return false;\n } else if (!imageLarge.equals(other.imageLarge))\n return false;\n if (imageMedium == null) {\n if (other.imageMedium != null)\n return false;\n } else if (!imageMedium.equals(other.imageMedium))\n return false;\n if (imageSmall == null) {\n if (other.imageSmall != null)\n return false;\n } else if (!imageSmall.equals(other.imageSmall))\n return false;\n if (isbn == null) {\n if (other.isbn != null)\n return false;\n } else if (!isbn.equals(other.isbn))\n return false;\n if (olWorks == null) {\n if (other.olWorks != null)\n return false;\n } else if (!olWorks.equals(other.olWorks))\n return false;\n if (title == null) {\n if (other.title != null)\n return false;\n } else if (!title.equals(other.title))\n return false;\n if (year != other.year)\n return false;\n return true;\n }\n\n @Override\n public int compareTo(Object o) {\n final Book that = (Book) o;\n if (this == that) return 0;\n\n return (this.id - that.id);\n }\n\n /**\n * @return the id\n */\n public int getId() {\n return id;\n }\n\n /**\n * @param id the id to set\n */\n public void setId(int id) {\n this.id = id;\n }\n\n /**\n * @return the authorId\n */\n public int getAuthorId() {\n return authorId;\n }\n\n /**\n * @param authorId the authorId to set\n */\n public void setAuthorId(int authorId) {\n this.authorId = authorId;\n }\n\n /**\n * @return the year\n */\n public int getYear() {\n return year;\n }\n\n /**\n * @param year the year to set\n */\n public void setYear(int year) {\n this.year = year;\n }\n\n /**\n * @return the title\n */\n public String getTitle() {\n return title;\n }\n\n /**\n * @param title the title to set\n */\n public void setTitle(String title) {\n this.title = title;\n }\n\n /**\n * @return the isbn\n */\n public String getIsbn() {\n return isbn;\n }\n\n /**\n * @param isbn the isbn to set\n */\n public void setIsbn(String isbn) {\n this.isbn = isbn;\n }\n\n /**\n * @return the olWorks\n */\n public String getOlWorks() {\n return olWorks;\n }\n\n /**\n * @param olWorks the olWorks to set\n */\n public void setOlWorks(String olWorks) {\n this.olWorks = olWorks;\n }\n\n /**\n * @return the imageSmall\n */\n public String getImageSmall() {\n return imageSmall;\n }\n\n /**\n * @param imageSmall the imageSmall to set\n */\n public void setImageSmall(String imageSmall) {\n this.imageSmall = imageSmall;\n }\n\n /**\n * @return the imageMedium\n */\n public String getImageMedium() {\n return imageMedium;\n }\n\n /**\n * @param imageMedium the imageMedium to set\n */\n public void setImageMedium(String imageMedium) {\n this.imageMedium = imageMedium;\n }\n\n /**\n * @return the imageLarge\n */\n public String getImageLarge() {\n return imageLarge;\n }\n\n /**\n * @param imageLarge the imageLarge to set\n */\n public void setImageLarge(String imageLarge) {\n this.imageLarge = imageLarge;\n }\n\n /**\n * @return the description\n */\n public String getDescription() {\n return description;\n }\n\n /**\n * @param description the description to set\n */\n public void setDescription(String description) {\n this.description = description;\n }\n\n /**\n * @return the subjects\n */\n public String getSubject() {\n return subject;\n }\n\n /**\n * @param subject the subjects to set\n */\n public void setSubject(String subject) {\n this.subject = subject;\n }\n\n \n\n /* (non-Javadoc)\n * @see java.lang.Object#toString()\n */\n @Override\n public String toString() {\n StringBuilder builder = new StringBuilder();\n builder.append(\"Book [id=\");\n builder.append(id);\n builder.append(\", authorId=\");\n builder.append(authorId);\n builder.append(\", year=\");\n builder.append(year);\n builder.append(\", title=\");\n builder.append(title);\n builder.append(\", description=\");\n builder.append(description);\n builder.append(\", isbn=\");\n builder.append(isbn);\n builder.append(\", subject=\");\n builder.append(subject);\n builder.append(\", olWorks=\");\n builder.append(olWorks);\n builder.append(\", goodreads_url=\");\n builder.append(goodreadsUrl);\n builder.append(\", imageSmall=\");\n builder.append(imageSmall);\n builder.append(\", imageMedium=\");\n builder.append(imageMedium);\n builder.append(\", imageLarge=\");\n builder.append(imageLarge);\n builder.append(\"]\");\n return builder.toString();\n }\n\n /**\n * @return the goodreadsUrl\n */\n public String getGoodreadsUrl() {\n return goodreadsUrl;\n }\n\n /**\n * @param goodreadsUrl the goodreadsUrl to set\n */\n public void setGoodreadsUrl(String goodreadsUrl) {\n this.goodreadsUrl = goodreadsUrl;\n }\n\n\n\n\n}\n"
},
{
"alpha_fraction": 0.5072786808013916,
"alphanum_fraction": 0.5152617692947388,
"avg_line_length": 21.35433006286621,
"blob_id": "569b7248256bda6bf880e761b93ede0f7ec595a1",
"content_id": "5f6e224802b45e122d13ef0423bb9eaf4b6607b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 8518,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 381,
"path": "/test/tags.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "\n######################\n#\n# tag related calls\n#\n#\n######################\n\n\n##########\n# get tag by id\n#\n##########\nget_tag_by_id() {\n tag_id=\"$1\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET ${ROOT_URL}/tag/${tag_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /tag for tag '$tagName'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error getting tag '$tag_id'. code: $code: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n##########\n# Get all tags\n#\n##########\nget_all_tags() {\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/tag\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /tag for all tags\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n logit \"Error getting all tags: $code: $result\"\n error \"http code: $code\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n##########\n# Get all tags w/ offset and limit\n#\n##########\nget_all_tags_with_offset_limit() {\n offset=$1\n limit=$2\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/tag?offset=${offset}&limit=${limit}\" \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making GET to /tag for all tags\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n logit \"Error getting all tags: $code: $result\"\n error \"http code: $code\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n\n\n########\n# delete all tags\n#\n########\ndelete_all_tags() {\n # get 1000 tags\n tags=$(get_all_tags_with_offset_limit 0 1000 )\n ids=$(echo \"${tags}\" | jq -r \".data[].id\" )\n num=$(echo \"${tags}\" | jq -r \".data | length\" )\n\n echo \"\"\n echo \"Delete all ($num) tags.\"\n\n for id in $ids\n do\n $(delete_tag $id)\n done\n}\n\n##########\n# delete tag\n#\n# params:\n# tag id\n##########\ndelete_tag() {\n tag_id=\"$1\"\n\n# logit \"delete tag $tag_id\"\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X DELETE ${ROOT_URL}/tag/${tag_id} \\\n -H \"authorization: $BEARER\" \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json')\n if [ $? -ne 0 ]; then\n error \"Error making DELETE to /tag for tag $tag_id\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n echo \"Error deleting tag '$tag_id'. code: $code: $error\"\n fi\n\n# logit \"got error code from delete: $code\"\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n}\n\n\n\n##########\n# update tag\n#\n#########\nupdate_tag() {\n tag_id=\"$1\"\n new_name=\"$2\"\n\nread -r -d '' post_data <<EOF\n{\n\"name\":\"$new_name\"\n}\nEOF\n\n\n ##########\n # update in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X PUT \"${ROOT_URL}/tag/${tag_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making PUT for test tag\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n error \"Error making PUT for test tag. code: $code: $result\"\n fi\n\n tag_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n logit \"tag updated\"\n}\n\n\n##########\n# Create tag in db\n# \n##########\ntag::create_tag() {\n name=\"$1\"\n\n read -r -d '' post_data <<EOF\n{\n\"name\":\"$name\"\n}\nEOF\n\n ##########\n # create in database now\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X POST \"${ROOT_URL}/tag\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: $BEARER\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n if [ $? -ne 0 ]; then\n error \"Error making POST for test tag\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" != \"200\" ]]; then\n if [[ \"$code\" == \"409\" ]]; then\n error \"Tag already exists!\"\n else\n error \"Error making POST for test tag. code: $code: $result\"\n fi\n fi\n\n tag_result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n# logit \"tag created\"\n echo \"$tag_result\"\n}\n\n\n######\n# print info for tag\n# \n######\nprint_tag_info() {\n tag_info=\"$1\"\n name=$(echo \"$tag_info\" | jq -r .name)\n id=$(echo \"$tag_info\" | jq -r .id)\n\n echo \"Tag: '$name', ID: '$id'\"\n}\n\n\n\n##############\n# Clean tags\n##############\ntag::clean() {\n delete_all_tags \n\n}\n\n##########\n# Main tag test\n#\n#########\ntag::main_test() {\n echo \"Get all tags\"\n all_tags=$(get_all_tags)\n\n echo \"\"\n echo \"Delete all tags\"\n delete_all_tags \n echo \"done deleting tags\"\n\n echo \"\"\n echo \"Create tag: 'sci-fi'\"\n tag=$(tag::create_tag \"sci-fi\")\n tagname=$(echo \"$tag\" | jq -r .name)\n assert_string_equals \"sci-fi\" $tagname \"Tag's name\"\n\n echo \"\"\n echo \"Create tag: 'e-book'\"\n tag=$(tag::create_tag \"e-book\")\n tag_id=$(echo \"$tag\" | jq -r .id)\n tagname=$(echo \"$tag\" | jq -r .name)\n assert_string_equals \"e-book\" $tagname \"Tag's name\"\n\n echo \"\"\n echo \"Get single tag: $tag_id\"\n tag=$(get_tag_by_id $tag_id)\n tagname=$(echo \"$tag\" | jq -r .name)\n assert_string_equals \"e-book\" $tagname \"Tag's name\"\n \n echo \"\"\n echo \"Update single tag with new name\"\n ignore=$(update_tag $tag_id \"ebookkkks\")\n\n echo \"Verifying new tag...\"\n tag=$(get_tag_by_id $tag_id)\n tagname=$(echo \"$tag\" | jq -r .name)\n assert_string_equals \"ebookkkks\" $tagname \"Tag's updated name\"\n\n # remove everything\n tag::clean\n}\n\n\n###############\n#\n# Test the limits and offsets for large datasets\n#\n###############\ntag::test_limit_offset() {\n echo \"\"\n echo \"[[ Tag Limit/Offset test ]]\"\n\n # num tags to create\n COUNT=40\n\n echo \"Creating $COUNT tags\"\n\n idx=1\n while [ $idx -le $COUNT ]\n do\n idx=$(( $idx + 1 ))\n tagname=\"tag_${idx}\"\n result=$(tag::create_tag $tagname)\n done\n\n ############\n # Default returns\n # get tags and see how many\n all_tags=$(get_all_tags)\n total=$(echo \"$all_tags\" | jq -r .total)\n offset=$(echo \"$all_tags\" | jq -r .offset)\n limit=$(echo \"$all_tags\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in tags returned\"\n assert_equals $EXPECTED_DEFAULT_LIMIT $limit \"limit number tags\"\n assert_equals $COUNT $total \"total number tags\"\n\n ############\n # new limit\n echo \"\"\n echo \"Testing new limit\"\n all_tags=$(get_all_tags_with_offset_limit 0 500)\n total=$(echo \"$all_tags\" | jq -r .total)\n offset=$(echo \"$all_tags\" | jq -r .offset)\n limit=$(echo \"$all_tags\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in tags returned\"\n assert_equals $COUNT $limit \"limit number tags\"\n assert_equals $COUNT $total \"total number tags\"\n\n\n ############\n # new offset\n echo \"\"\n echo \"Testing new offset\"\n all_tags=$(get_all_tags_with_offset_limit 10 10)\n total=$(echo \"$all_tags\" | jq -r .total)\n offset=$(echo \"$all_tags\" | jq -r .offset)\n limit=$(echo \"$all_tags\" | jq -r .limit)\n\n echo \"Checking limit/offset/total\"\n assert_equals 10 ${offset} \"offset in tags returned\"\n assert_equals 10 $limit \"limit number tags\"\n assert_equals $COUNT $total \"total number tags\"\n\n # remove everything\n tag::clean\n\n echo \"\"\n echo \"[[ DONE Tag Limit/Offset test ]]\"\n}\n\n\n###############\n#\n# Test tag endpoint\n#\n###############\ntest_tag() {\n echo \"\"\n echo \"[ Tag test ]\"\n\n tag::main_test\n\n tag::test_limit_offset\n\n echo \"[ DoneTag test ]\"\n}\n"
},
{
"alpha_fraction": 0.6014235019683838,
"alphanum_fraction": 0.6014235019683838,
"avg_line_length": 26.639345169067383,
"blob_id": "b105f755b2f8b2c959ad1f0efd1781742e563cc3",
"content_id": "57ef665d9555f1c1c667b8c12e6e78a5c0bbb123",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1686,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 61,
"path": "/images/query/src/github.com/hipposareevil/query/structures.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n//////////////////////\n// Structures\n\ntype queryAuthorRequest struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tAuthor string `json:\"author\"`\n}\n\ntype queryTitleRequest struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tAuthor string `json:\"author\"`\n\tTitle string `json:\"title\"`\n\tIsbn string `json:\"isbn\"`\n}\n\n//////////////////////////////////\n// Response structures\n//////////////////////////////////\n\ntype Author struct {\n\tBirthDate string `json:\"birthDate\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tName string `json:\"name\"`\n\tOlKey string `json:\"olKey\"`\n\tSubjects []string `json:\"subjects\"`\n}\n\ntype Authors struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Author `json:\"data\"`\n}\n\ntype Title struct {\n\tTitle string `json:\"title\"`\n\tAuthorKey string `json:\"authorKey\"`\n\tAuthorName string `json:\"authorName\"`\n\tDescription string `json:\"description\"`\n\tFirstPublishedYear int `json:\"firstPublishedYear\"`\n\tImageLarge string `json:\"imageLarge\"`\n\tImageMedium string `json:\"imageMedium\"`\n\tImageSmall string `json:\"imageSmall\"`\n\tOpenLibraryWorkUrl string `json:\"openLibraryWorkUrl\"`\n\tOpenLibraryKeys []string `json:\"openLibraryKeys\"`\n\tIsbns []string `json:\"isbns\"`\n\tSubjects []string `json:\"subjects\"`\n}\n\ntype Titles struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Title `json:\"data\"`\n}\n"
},
{
"alpha_fraction": 0.44565218687057495,
"alphanum_fraction": 0.4492753744125366,
"avg_line_length": 13.375,
"blob_id": "5faa270eb14e6921463b9d0ae9c03de02a4115f5",
"content_id": "994d4989896adbe8a19861f115f2f6cdcd47d0c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1380,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 96,
"path": "/test/test.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n##################\n#\n# Test harness for books\n#\n#\n#################\n\nroot_dir=\"$(cd -P \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\n\ntrap \"exit 1\" TERM\nexport TOP_PID=$$\n\n# utilities\n. ${root_dir}/util.sh\n\n# tests\n. ${root_dir}/author.sh\n. ${root_dir}/user.sh\n. ${root_dir}/tags.sh\n. ${root_dir}/book.sh\n. ${root_dir}/user_book.sh\n. ${root_dir}/reviews.sh\n\n\n#################\n# Main function.\n# \n#################\nclean() {\n echo \"*****************\"\n echo \"** Clean **\"\n\n\n tag::clean\n\n user_book::clean\n\n # this cleans authors as well\n book::clean\n\n user::clean\n\n review::clean\n\n echo \"\"\n echo \"*****************\"\n\n}\n\n\n#################\n# Main function.\n# \n#################\nmain() {\n # Initialize\n initialize_variables\n\n # authorize\n authorize_admin\n\n # parse options\n while getopts \"cx\" opt; do\n case \"$opt\" in\n c) clean\n exit 1\n ;;\n x) NO_FAIL=1\n ;;\n \\?) #unknown\n usage\n ;;\n esac\n done\n shift $(($OPTIND - 1))\n\n # clean first\n logit \"Cleaning first.\"\n ignore=$(clean)\n\n test_tag\n test_user\n test_author\n test_book\n test_user_books\n test_reviews\n\n echo \"\"\n echo \"[[[ Done with all tests ]]]\"\n echo \"Passed $PASSED_TESTS tests\"\n}\n\n# main\nmain \"$@\"\n"
},
{
"alpha_fraction": 0.7318611741065979,
"alphanum_fraction": 0.7397476434707642,
"avg_line_length": 27.81818199157715,
"blob_id": "4fec600f415ae06e86446a038206f319bca24cbb",
"content_id": "2c5d37262e7f1ce0be35cd9411b1b833f11a937b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 634,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 22,
"path": "/images/gateway/Dockerfile",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Basic nginx image with specific config and content\nFROM nginx\nCOPY conf /etc/nginx\nCOPY content/* /usr/share/nginx/html/\n\nVOLUME /etc/docker/certs\n\n# Args passed in via 'docker build'\n# Used by the LABELs\nARG BUILD_TIME\nARG VERSION\n\n# Putting LABEL last so we can re-use the preceding caching layers\nLABEL org.label-schema.build-date=\"$BUILD_TIME\" \\\n org.label-schema.vendor=\"github.com/hipposareevil\" \\\n org.label-schema.version=\"$VERSION\" \\\n org.label-schema.description=\"Frontend proxy (API Gateway) to the microservices in the books project.\" \\\n org.label-schema.name=\"books.gateway\" \n\n\nEXPOSE 80\nEXPOSE 443\n"
},
{
"alpha_fraction": 0.5036481618881226,
"alphanum_fraction": 0.5139846205711365,
"avg_line_length": 19.05691146850586,
"blob_id": "9bcea84a613c3af07fc11df5d0304ca976bcdcea",
"content_id": "e670d731b17e27ed55a00e15af94a9a01199c267",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 4950,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 246,
"path": "/test/util.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "###############\n# constants\n\n# the default 'limit'\nEXPECTED_DEFAULT_LIMIT=20\n\n# Fonts\nbold=$(tput bold)\nnormal=$(tput sgr0)\n\n\n\n########\n# Set up variables\n#\n########\ninitialize_variables() {\n TOKEN_FILE=auth.token\n rm -f $TOKEN_FILE\n\n TOKEN_FILE_second=auth.token\n rm -f $TOKEN_FILE_second\n\n\n ROOT_URL=\"http://localhost:8080\"\n ADMIN_USER=\"admin\"\n ADMIN_PASSWORD=\"admin\"\n\n SECOND_USER_PASSWORD=\"otherpassword\"\n\n # flag to keep running when error is hit\n NO_FAIL=0\n\n # count of passed tests\n PASSED_TESTS=0\n}\n\n\n\n######\n# Authorize\n#\n######\nauthorize_admin() {\n if [ ! -e $TOKEN_FILE ]; then\n echo \"[Generate new authentication file]\"\n\nread -r -d '' data <<EOF\n{\n\"name\": \"${ADMIN_USER}\",\n\"password\":\"${ADMIN_PASSWORD}\"\n}\nEOF\n\n token=$(curl -s -X POST \\\n ${ROOT_URL}/authorize/token \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json' \\\n -d \"${data}\"\n )\n\n echo \"$token\" > $TOKEN_FILE\n fi\n\n\n # get variables\n BEARER=$(<$TOKEN_FILE jq -r '.token')\n if [ -z \"$BEARER\" ] || [[ \"$BEARER\" == \"null\" ]]; then\n error \"Couldn't get bearer from authentication token.\"\n fi\n USER_ID=$(<$TOKEN_FILE jq -r '.userId')\n if [ -z \"$USER_ID\" ] || [[ \"$USER_ID\" == \"null\" ]]; then\n error \"Couldn't get user ID from authentication token.\"\n fi\n\n echo \"[Got authentication, user ID: ${USER_ID}]\"\n}\n\n\n#######\n# Authorize a second user\n#\n# takes name & password\n#######\nauthorize_second_user() {\n name=$1\n password=$2\n\n # create data for POST\nread -r -d '' data <<EOF\n{\n\"name\": \"${name}\",\n\"password\":\"${password}\"\n}\nEOF\n\n token=$(curl -s -X POST \\\n ${ROOT_URL}/authorize/token \\\n -H 'cache-control: no-cache' \\\n -H 'content-type: application/json' \\\n -d \"${data}\"\n )\n echo \"$token\" > $TOKEN_FILE_second\n \n\n # get variables\n BEARER_second=$(<$TOKEN_FILE_second jq -r '.token')\n if [ -z \"$BEARER_second\" ] || [[ \"$BEARER_second\" == \"null\" ]]; then\n error \"Couldn't get bearer from authentication token for 2nd user.\"\n fi\n USER_ID_second=$(<$TOKEN_FILE_second jq -r '.userId')\n if [ -z \"$USER_ID_second\" ] || [[ \"$USER_ID_second\" == \"null\" ]]; then\n error \"Couldn't get user ID from authentication token for 2nd user.\"\n fi\n}\n\n\n\n##########\n# URL encode a string\n#\n##########\nurl_encode() {\n what=$1\n result=$(python ./encode.py \"$what\")\n echo \"$result\"\n}\n\n\n############\n# asserts that the incoming string values are equal\n#\n# params:\n# 1- expected value\n# 2- actual value\n# 3- error message\n#############\nassert_string_equals() {\n expected=\"$1\"\n actual=\"$2\"\n message=\"$3\"\n\n if [[ \"$expected\" != \"$actual\" ]]; then\n error \"${bold}✗ ${normal} Incorrect '${message}'. Expected: '$expected', got '$actual'.\"\n else\n echo \"✓ Correct '${message}'. Got '$expected'.\"\n PASSED_TESTS=$(( $PASSED_TESTS + 1))\n fi\n}\n\n############\n# asserts that the incoming csv contains the desired value\n#\n# params:\n# 1- csv string\n# 2- desired value\n# 3- what\n#############\nassert_contains() {\n csv=\"$1\"\n value=\"$2\"\n what=\"$3\"\n\n if [[ $csv != *\"${value}\"* ]]; then\n error \"${bold}✗ ${normal} '${what}' [$csv] is missing '$value'\"\n else\n echo \"✓ '${what}' contains '$value' ---> [$csv]\"\n PASSED_TESTS=$(( $PASSED_TESTS + 1))\n fi\n}\n\n\n\n############\n# asserts that the incoming number values are equal\n#\n# params:\n# 1- expected value\n# 2- actual value\n# 3- error message\n#############\nassert_equals() {\n expected=\"$1\"\n actual=\"$2\"\n message=\"$3\"\n\n if [ $expected -ne $actual ]; then\n error \"${bold}✗ ${normal} Incorrect '${message}'. Expected: '$expected', got '$actual'.\"\n else\n echo \"✓ Correct '${message}'. Got '$expected'.\"\n PASSED_TESTS=$(( $PASSED_TESTS + 1))\n fi\n}\n\n############\n# asserts that the incoming number values are NOT equal\n#\n# params:\n# 1- expected value\n# 2- actual value\n# 3- error message\n#############\nassert_not_equals() {\n expected=\"$1\"\n actual=\"$2\"\n message=\"$3\"\n\n if [ $expected -eq $actual ]; then\n error \"${bold}✗ ${normal} Incorrect '${message}'. Expected NOT '$expected', got '$actual'.\"\n else\n echo \"✓ Correct '${message}'. Got '$actual', correctly NOT '$expected'\"\n PASSED_TESTS=$(( $PASSED_TESTS + 1))\n fi\n}\n\n\n######\n# Error out\n######\nerror() {\n >&2 echo \"\"\n >&2 echo \"****************************\"\n >&2 echo \"$@\"\n >&2 echo \"****************************\"\n if [ ${NO_FAIL} -ne 1 ]; then\n kill -s TERM $TOP_PID\n else\n >&2 echo \"NO_FAIL was set, not exiting from error. \"\n >&2 echo \"\"\n fi\n\n}\n#######\n# Log\n######\nlogit() {\n >&2 echo \"[$@]\"\n}\n\n\n########\n# print incoming data to jq\n#######\njqit() {\n echo \"$1\" | jq -r .\n}\n"
},
{
"alpha_fraction": 0.7277277112007141,
"alphanum_fraction": 0.7437437176704407,
"avg_line_length": 34.67856979370117,
"blob_id": "18ace25f16b8355934c14e07464c77674cab8635",
"content_id": "679b9b8aeba048a36df8b6ef628c3acf0a6832c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 999,
"license_type": "no_license",
"max_line_length": 191,
"num_lines": 28,
"path": "/metrics/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# Metrics\n\n## Introduction\n\nMetrics are provided via [Dropwizard](http://www.dropwizard.io/) and sent to a [graphite](https://graphiteapp.org/) server for collection. This is exposed via [grafana](https://grafana.com/).\n\nBy default these two services are _not_ running as graphite utilizes a fair amount of CPU. To enable, run the following from the _metrics_ subdirectory:\n~~~~\nbooks/metrics> docker-compose up -d\n~~~~\n\nThis will bring the graphite and grafana servers up and join them to the existing *books_booknet* docker network.\n\nrun the following to disconnect the two servers:\n~~~~\nbooks/metrics> docker-compose down\n~~~~\n\n\n## Exposed Ports\n\nGrafana is viewable at [http://localhost:3000/](http://localhost:3000/). The default username & password is admin/admin.\n\nThe Graphite UI is viewable at [http://localhost:3001/](http://localhost:3001/).\n\n## Volume Mounts\n\nBoth servers utilize a volume mount to the */metrics/* subdirectory. This is customizable via the _docker-compose.yml_ file.\n"
},
{
"alpha_fraction": 0.6777259111404419,
"alphanum_fraction": 0.6818939447402954,
"avg_line_length": 31.421621322631836,
"blob_id": "ce502e24399e0ad52b822233c00efff8f8f285a0",
"content_id": "b4d14a257ec08748c37bb3a41a88412bcef68404",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 5998,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 185,
"path": "/images.java/authorization/src/main/java/com/wpff/resources/AuthResource.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.resources;\n\nimport java.util.UUID;\n\nimport javax.ws.rs.Consumes;\nimport javax.ws.rs.GET;\nimport javax.ws.rs.HeaderParam;\nimport javax.ws.rs.NotFoundException;\nimport javax.ws.rs.POST;\nimport javax.ws.rs.Path;\nimport javax.ws.rs.Produces;\nimport javax.ws.rs.WebApplicationException;\nimport javax.ws.rs.core.Context;\nimport javax.ws.rs.core.MediaType;\nimport javax.ws.rs.core.Response;\n\n// password encryption\nimport org.jasypt.util.password.BasicPasswordEncryptor;\n\nimport com.google.common.net.HttpHeaders;\nimport com.wpff.core.Bearer;\n// books\nimport com.wpff.core.Credentials;\nimport com.wpff.core.User;\nimport com.wpff.db.UserDAO;\n\nimport io.dropwizard.hibernate.UnitOfWork;\n// Swagger\nimport io.swagger.annotations.Api;\nimport io.swagger.annotations.ApiOperation;\nimport io.swagger.annotations.ApiParam;\n// Jedis\nimport redis.clients.jedis.Jedis;\n\n\n\n/**\n * Resource at /authorize that authenticates/authorizes and de-authenticates a user.\n *\n * Successful authentication will create and store a token associated with\n * the user, which is then used later via the TokenFilter to authenticate\n * Resource calls (like GET /user/bob)\n */\n@Api(\"/authorize\")\n@Path(\"/authorize\")\n@Produces(MediaType.APPLICATION_JSON)\n@Consumes(MediaType.APPLICATION_JSON)\npublic class AuthResource {\n\n // Static Bearer text\n private static String BEARER = \"Bearer\";\n\n /**\n * DAO used to get a User and the associated password\n * that is then used for authorization\n */\n private final UserDAO userDAO;\n\n /**\n * Create new AuthResource with a user DAO for validating username/password\n *\n * @param userDAO DAO used find a user for authentication\n */\n public AuthResource(UserDAO userDAO) {\n this.userDAO = userDAO;\n }\n\n /**\n * Validate an authorization token.\n * \n * This utilizes code from TokenFilter\n * \n * @param authHeader\n * Header to validate\n * @param jedis\n * Jedis instance used to validate token data. (INJECTED)\n * @return Response\n */\n @ApiOperation(\n value = \"Validates an authentication token\",\n notes = \"Returns a 200 if the authorization header is OK, 401 otherwise\")\n @GET\n @Path(\"validate\")\n public Response validate(\n @HeaderParam(HttpHeaders.AUTHORIZATION) String authHeader, \n @Context Jedis jedis) {\n // validate authorization bearer\n\n if ((authHeader == null) || (!authHeader.startsWith(BEARER))) {\n throw new WebApplicationException(\"Invalid Authorization header.\", Response.Status.UNAUTHORIZED);\n }\n \n // Grab token text from Header\n String token = authHeader.substring(BEARER.length() + 1);\n token = token.trim();\n \n // Get username and group from Jedis.\n String redisHashName = \"user:\" + token;\n String username = jedis.hget(redisHashName, \"name\");\n\n if ((username == null) || (username.isEmpty())) {\n throw new WebApplicationException(\"Invalid Authorization header.\", Response.Status.UNAUTHORIZED);\n }\n\n return Response.ok().build();\n }\n\n /**\n * Creates an authorization token for an incoming user.\n * If there is no matching user in the database an error is thrown.\n *\n * The resulting token is to be put in the HTTP headers. e.g.\n * Authorization : Bearer qwerty-1234-asdf-9876\n * \n * @param creds A credentials bean with name and password.\n * @param jedis Jedis instance used to store token data. (INJECTED)\n * @return Bearer with authentication token and ID.\n */\n @ApiOperation(\n value=\"Creates authentication token which is then used for various endpoints.\",\t \n notes=\"Token is created for the user being authenticated. Token is of form 'Bearer qwerty-1234-asdf-9876'. Where required, it should be put in the HTTP Headers with key 'AUTHORIZATION'.\"\n )\n @POST\n @UnitOfWork\n @Path(\"/token\")\n public Bearer authenticate(\n @ApiParam(value = \"Credentials for creating authentication token\", required = true) \n Credentials creds,\n @Context Jedis jedis) {\n\n // Get user/password from incoming JSON/bean\n String userToAuthenticate = creds.getName();\n String userPassword = creds.getPassword();\n\n // See if there is a matching user in the database\n User userInDatabase = findSafely(userToAuthenticate);\n\n // User exists. Take incoming password and compare against\n // the encrypted one\n BasicPasswordEncryptor passwordEncryptor = new BasicPasswordEncryptor();\n if (passwordEncryptor.checkPassword(userPassword, userInDatabase.getPassword())) {\n // password is OK. Create and return a token\n String token = UUID.randomUUID().toString();\n String fullToken = BEARER + \" \" + token;\n\n ///////////\n // Create token in redis that will last a week\n \n // hset user:token name \"user name\"\n // hset user:token group \"user group\"\n String redisHashName = \"user:\" + token;\n jedis.hset(redisHashName, \"name\", userInDatabase.getName());\n jedis.hset(redisHashName, \"group\", userInDatabase.getUserGroup());\n jedis.expire(redisHashName, 60 * 60 * 24 * 7);\n \n // Create Bearer bean to return to user\n Bearer tokenToReturn = new Bearer();\n tokenToReturn.setToken(fullToken);\n tokenToReturn.setUserId(userInDatabase.getId());\n tokenToReturn.setGroupName(userInDatabase.getUserGroup());\n \n return tokenToReturn;\n }\n else {\n // bad password\n System.out.println(\"AuthResource.authenticate: invalid password for '\" + userToAuthenticate + \"'\");\n throw new WebApplicationException(Response.Status.UNAUTHORIZED);\n }\n }\n\n\n /****************************************************************\n\n Helper methods\n\n ****************************************************************/\n\n /**\n * Look for User by incoming name. If returned User is null, throw Not Found (404).\n */\n private User findSafely(String name) {\n return this.userDAO.findByName(name).orElseThrow(() -> new NotFoundException(\"No user by name '\" + name + \"'\"));\n }\n\n}\n"
},
{
"alpha_fraction": 0.6486486196517944,
"alphanum_fraction": 0.6525096297264099,
"avg_line_length": 18.185184478759766,
"blob_id": "02fd687e7f26afe2c04efdb035a0b7219a85ae2f",
"content_id": "e68f71c8475b2ae2f12a6c44755217cfbd3a0f41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 518,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 27,
"path": "/images/book/src/github.com/hipposareevil/book/utils.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\nimport (\n\t\"strconv\"\n\t\"strings\"\n)\n\n////////////\n// Split a CSV string into array\nfunc splitCsvStringToArray(subjectCsv string) []string {\n\tif len(subjectCsv) > 0 {\n\t\treturn strings.Split(subjectCsv, \",\")\n\t} else {\n\t\treturn make([]string, 0)\n\t}\n}\n\n////////////\n// Convert incoming int array to CSV string\nfunc convertIntArrayToCsv(intArray []int) string {\n\ttempArray := make([]string, len(intArray))\n\tfor i, v := range intArray {\n\t\ttempArray[i] = strconv.Itoa(v)\n\t}\n\n\treturn strings.Join(tempArray, \",\")\n}\n"
},
{
"alpha_fraction": 0.6662784218788147,
"alphanum_fraction": 0.6662784218788147,
"avg_line_length": 17.26595687866211,
"blob_id": "055b1cad24b13212808026b984ea4e2d55034004",
"content_id": "2023ae230d590da0e1bce9f193f369b3f37b3fc6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 1717,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 94,
"path": "/images.java/user_book/src/main/java/com/wpff/core/Tag.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\nimport java.util.Objects;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.GenerationType;\nimport javax.persistence.Id;\nimport javax.persistence.NamedQueries;\nimport javax.persistence.NamedQuery;\nimport javax.persistence.Table;\n\nimport io.swagger.annotations.ApiModelProperty;\n\n/**\n * Named query to select all tags\n */\n@Entity\n@Table(name = \"tag\")\n@NamedQueries({ @NamedQuery(name = \"com.wpff.core.Tag.findAll\", query = \"SELECT u FROM Tag u\") })\n/**\n * Tag class\n */\npublic class Tag {\n\n\t/**\n\t * Tag ID.\n\t */\n\t@Id\n\t@GeneratedValue(strategy = GenerationType.IDENTITY)\n\t@Column(name = \"tag_id\", unique = true, nullable = false)\n\t@ApiModelProperty(hidden = true)\n\tprivate int id;\n\n\t/**\n\t * Name of Tag\n\t */\n\t@Column(name = \"name\", unique = true, nullable = false)\n\tprivate String name;\n\n\t/**\n\t * Default constructor\n\t */\n\tpublic Tag() {\n\t}\n\n\tpublic Tag(String name, int id) {\n\t\tthis.name = name;\n\t\tthis.id = id;\n\t}\n\n\tpublic String toString() {\n\t\treturn \"Tag[id=\" + id + \", \" + \"name='\" + name + \"']\";\n\t}\n\n\n\tpublic String getName() {\n\t\treturn name;\n\t}\n\n\tpublic void setName(String name) {\n\t\tthis.name = name;\n\t}\n\n\t// @ApiModelProperty(hidden=true)\n\tpublic int getId() {\n\t\treturn this.id;\n\t}\n\n\t// @ApiModelProperty(hidden=true)\n\tpublic void setId(int id) {\n\t\tthis.id = id;\n\t}\n\n\t@Override\n\tpublic boolean equals(Object o) {\n\t\tif (this == o) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!(o instanceof Tag)) {\n\t\t\treturn false;\n\t\t}\n\n\t\tfinal Tag that = (Tag) o;\n\n\t\treturn Objects.equals(this.name, that.name) && Objects.equals(this.id, that.id);\n\t}\n\n\t@Override\n\tpublic int hashCode() {\n\t\treturn Objects.hash(this.name, this.id);\n\t}\n}\n"
},
{
"alpha_fraction": 0.6392674446105957,
"alphanum_fraction": 0.6402612328529358,
"avg_line_length": 21.291139602661133,
"blob_id": "de8630f5134b052f816628b9e0382ff1b7830242",
"content_id": "3cab948bebddb74189ae6a0c2edfc0202dd364ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 7044,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 316,
"path": "/images/tag/src/github.com/hipposareevil/tag/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Tag service\n// Supports:\n// - DELETE at /tag/<tag_id> to delete a single 'tag'\n// - PUT at /tag/<tag_id> to update a 'tag'\n// - GET at /tag/<tag_id> to return a single 'tag'\n// - GET at /tag to return all 'tags'\n// - POST at /tag to create a new 'tag'\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\n\t\"errors\"\n\n\t\"encoding/json\"\n\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n)\n\nconst TAG_CACHE = \"tag\"\n\n// Service interface exposed to clients\ntype TagService interface {\n\t// GetTag: bearer, tag_id\n\tGetTag(string, int) (Tag, error)\n\n\t// Get tags: bearer, offset, limit\n\tGetTags(string, int, int) (Tags, error)\n\n\t// GetTag: bearer, tag_id\n\tDeleteTag(string, int) error\n\n\t// Create tag: bearer, tag name\n\tCreateTag(string, string) (Tag, error)\n\n\t// Update tag: bearer, tag name\n\tUpdateTag(string, string, int) error\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\n// - cache layer\ntype tagService struct {\n\tmysqlDb *sql.DB\n\tcache CacheLayer\n}\n\n//////////\n// METHODS on tagService\n\n////////////////\n// Get tag\n//\n// params:\n// bearer: Authorization bearer\n// tagId : ID of tag to get\n//\n// returns:\n// tag\n// error\nfunc (theService tagService) GetTag(bearer string, tagId int) (Tag, error) {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- GetTag --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Tag{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tvar tag Tag\n\t// Scan the DB info into 'tag' variable\n\terr := theService.mysqlDb.QueryRow(\"SELECT tag_id, name FROM tag WHERE tag_id = ?\", tagId).Scan(&tag.ID, &tag.Name)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn Tag{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"Got error from select: \", err)\n\t\treturn Tag{}, ErrServerError\n\t}\n\n\treturn tag, nil\n}\n\n////////////////\n// Get tags\n//\n// params:\n// bearer: authorization bearer\n// offset : offset into list\n// limit : number of items to get from list\n//\n// returns:\n// tags\n// error\nfunc (theService tagService) GetTags(bearer string, offset int, limit int) (Tags, error) {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- GetTags --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Tags{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Get total number of rows\n\tvar totalNumberOfRows int\n\t_ = theService.mysqlDb.QueryRow(\"SELECT COUNT(*) from tag\").Scan(&totalNumberOfRows)\n\n\tif limit > totalNumberOfRows {\n\t\tlimit = totalNumberOfRows\n\t}\n\n\t// Make query\n\tresults, err := theService.mysqlDb.Query(\"SELECT tag_id, name FROM tag LIMIT ?,? \", offset, limit)\n\tif err != nil {\n\t\treturn Tags{}, errors.New(\"unable to query mysql\")\n\t}\n\n\t// slice of Tag entities\n\tdatum := make([]Tag, 0, 0)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar tag Tag\n\t\t// For each row, scan the result into our tag composite object:\n\t\t// tag_id, name\n\t\terr = results.Scan(&tag.ID, &tag.Name)\n\t\tif err != nil {\n\t\t\treturn Tags{}, errors.New(\"unable to query mysql\")\n\t\t}\n\t\t// and then print out the tag's Name attribute\n\t\tdatum = append(datum, tag)\n\t}\n\n\t// Create Tags to return\n\treturnValue := Tags{\n\t\tOffset: offset,\n\t\tLimit: limit,\n\t\tTotal: totalNumberOfRows,\n\t\tData: datum,\n\t}\n\n\t// Save all tags into cache if we have values\n\tif totalNumberOfRows > 0 {\n\t\ttagsAsBytes, err := json.Marshal(returnValue)\n\t\tif err == nil {\n\t\t\t// save to cache\n\t\t\tfmt.Println(\"Saving bytes to tag cache\")\n\t\t\tgo theService.cache.SetBytes(TAG_CACHE, 0, tagsAsBytes)\n\t\t} else {\n\t\t\tfmt.Println(\"Unable to save tags to cache:\", err)\n\t\t}\n\t} else {\n\t\tfmt.Println(\"Not saving tags to cache as there are no datum yet\")\n\t}\n\n\treturn returnValue, nil\n}\n\n////////////////\n// Delete tag\n//\n// params:\n// bearer: Authorization bearer\n// tagId : ID of tag to delete\n//\n// returns:\n// error\nfunc (theService tagService) DeleteTag(bearer string, tagId int) error {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- DeleteTag --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Verify the tag exists, if not, throw ErrNotFound\n\t_, getErr := theService.GetTag(bearer, tagId)\n\tif getErr != nil {\n\t\treturn getErr\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM tag WHERE tag_id = ?\", tagId)\n\n\t// Delete from tagmapping as well.\n\t// Ignore the error for now.\n\t_, _ = theService.mysqlDb.Exec(\"DELETE FROM tagmapping WHERE tag_id = ?\", tagId)\n\n\t// Delete the cache\n\tfmt.Println(\"Clearing TAG cache as we deleted a tag\")\n\ttheService.cache.Clear(TAG_CACHE, 0)\n\n\treturn err\n}\n\n////////////////\n// CreateTag\n//\n// params:\n// bearer: authorization bearer\n// name: name of new tag\n//\n// returns:\n// tag\n// error\nfunc (theService tagService) CreateTag(bearer string, tagName string) (Tag, error) {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- CreateTag --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Tag{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.Prepare(\"INSERT INTO tag SET name=?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn Tag{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\tres, err := stmt.Exec(tagName)\n\tif err != nil {\n\t\tfmt.Println(\"Error inserting into DB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn Tag{}, ErrAlreadyExists\n\t\t} else {\n\t\t\treturn Tag{}, errors.New(\"Unable to run INSERT against DB: \")\n\t\t}\n\t}\n\n\t// get the id\n\tid, _ := res.LastInsertId()\n\n\t// Create tag\n\tvar tag Tag\n\ttag = Tag{\n\t\tID: int(id),\n\t\tName: tagName,\n\t}\n\n\t// Delete the cache\n\tfmt.Println(\"Clearing TAG cache as we added a tag\")\n\ttheService.cache.Clear(TAG_CACHE, 0)\n\n\treturn tag, nil\n}\n\n////////////////\n// UpdateTag\n//\n// params:\n// bearer: authorization bearer\n// tagName: new name of tag\n// tagId: id of tag to update\n//\n// returns:\n// tag\n// error\nfunc (theService tagService) UpdateTag(bearer string, tagName string, tagId int) error {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- CreateTag --\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.Prepare(\"UPDATE tag SET name=? WHERE tag_id = ?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\t// Delete the cache\n\tfmt.Println(\"Clearing TAG cache as we updated a tag\")\n\ttheService.cache.Clear(TAG_CACHE, 0)\n\n\t_, err = stmt.Exec(tagName, tagId)\n\tif err != nil {\n\t\tfmt.Println(\"Error updatingDB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn ErrAlreadyExists\n\t\t} else {\n\t\t\treturn errors.New(\"Unable to run update against DB: \")\n\t\t}\n\t}\n\n\treturn nil\n}\n"
},
{
"alpha_fraction": 0.7033236622810364,
"alphanum_fraction": 0.707338809967041,
"avg_line_length": 27.373416900634766,
"blob_id": "419f166a88d33a03c6bda88360b042e533478c85",
"content_id": "ad78400cbea0174b519059a1bdd2cb42b32214d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4483,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 158,
"path": "/images/user/src/github.com/hipposareevil/user/main.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Main application\n//\n// This will create the databases, router, static files\n// and wire everything together\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"os\"\n\t\"time\"\n\n\t// mysql\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// redis\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\n\t// gorilla routing\n\t\"github.com/gorilla/mux\"\n\n\t// metrics\n\n\t\"github.com/go-kit/kit/log\"\n\thttptransport \"github.com/go-kit/kit/transport/http\"\n)\n\n// Main\nfunc main() {\n\tlogger := log.NewLogfmtLogger(os.Stderr)\n\n\t/////////////////\n\t// Make redis pool\n\tredisPool, err := pool.New(\"tcp\", \"books.token_db:6379\", 10)\n\tif err != nil {\n\t\tfmt.Println(\"Got error when making connection to redis: \", err)\n\t}\n\n\t/////////////////\n\t// Make Mysql db connection\n\tdb, err := sql.Open(\"mysql\", \"booksuser:books@tcp(books.db:3306)/booksdatabase\")\n\n\t// if there is an error opening the connection, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer db.Close()\n\tdb.SetMaxIdleConns(0)\n\tdb.SetConnMaxLifetime(time.Second * 10)\n\n\t///////////////////\n\t// create services and endpoints\n\n\t/////////\n\t// ROUTER\n\trouter := mux.NewRouter()\n\t// Make gorilla be router for everything\n\thttp.Handle(\"/\", router)\n\n\t/////////////////\n\t// Swagger static html file\n\thtmlDir := \"/html\"\n\n\t// Create server for swagger file\n\tfs := http.FileServer(http.Dir(htmlDir))\n\trouter.PathPrefix(\"/swagger.yaml\").Handler(http.StripPrefix(\"/\", fs))\n\n\t///////////////\n\t// 'user' service\n\tvar userSvc UserService\n\tuserSvc = userService{db}\n\n\t// Set up the endpoints on our service\n\t//\n\t// Note: the Authentication middleware is done on each endpoint\n\t// individually so we can tightly control each one as some\n\t// care about only accepting 'admin' group.\n\n\t////////////////\n\t// Endpoints\n\n\t//////\n\t// GET /user (all users)\n\tusersEndpoint := makeGetUsersEndpoint(userSvc)\n\tbaseUsersHandler := httptransport.NewServer(\n\t\tusersEndpoint,\n\t\tdecodeGetAllUsersRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tusersHandler := Authenticate(false, redisPool, baseUsersHandler)\n\trouter.Methods(\"GET\").Path(\"/user\").Handler(usersHandler)\n\n\t//////\n\t// GET /user/<user_id>\n\tuserEndpoint := makeGetUserEndpoint(userSvc)\n\tbaseUserHandler := httptransport.NewServer(\n\t\tuserEndpoint,\n\t\tdecodeGetUserRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tuserHandler := Authenticate(false, redisPool, baseUserHandler)\n\t// 'user_id' is used in transport.go to grab the variable 'user_id' from the path\n\trouter.Methods(\"GET\").Path(\"/user/{user_id}\").Handler(userHandler)\n\n\t//////\n\t// DELETE /user/<user_id>\n\tdeleteUserEndpoint := makeDeleteUserEndpoint(userSvc)\n\tbaseDeleteUserHandler := httptransport.NewServer(\n\t\tdeleteUserEndpoint,\n\t\tdecodeDeleteUserRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tdeleteUserHandler := Authenticate(true, redisPool, baseDeleteUserHandler)\n\t// 'user_id' is used in transport.go to grab the variable 'user_id' from the path\n\trouter.Methods(\"DELETE\").Path(\"/user/{user_id}\").Handler(deleteUserHandler)\n\n\t//////\n\t// POST /user\n\tcreateUserEndpoint := makeCreateUserEndpoint(userSvc)\n\tbaseCreateUserHandler := httptransport.NewServer(\n\t\tcreateUserEndpoint,\n\t\tdecodeCreateUserRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tcreateUserHandler := Authenticate(true, redisPool, baseCreateUserHandler)\n\trouter.Methods(\"POST\").Path(\"/user\").Handler(createUserHandler)\n\n\t//////\n\t// PUT /user/<user_id>\n\tupdateUserEndpoint := makeUpdateUserEndpoint(userSvc)\n\tbaseUpdateUserHandler := httptransport.NewServer(\n\t\tupdateUserEndpoint,\n\t\tdecodeUpdateUserRequest,\n\t\tencodeResponse,\n\t)\n\t// Add middleware to authenticate the endpoint.\n\t// first parameter denotes if only 'admin' group can access the endpoint.\n\tupdateUserHandler := Authenticate(true, redisPool, baseUpdateUserHandler)\n\t// 'user_id' is used in transport.go to grab the variable 'user_id' from the path\n\trouter.Methods(\"PUT\").Path(\"/user/{user_id}\").Handler(updateUserHandler)\n\n\t//////////////\n\t// Start server\n\taddr := \":8080\"\n\tlogger.Log(\"msg\", \"HTTP\", \"addr\", addr)\n\tfmt.Println(\"user service up on \" + addr)\n\tlogger.Log(\"err\", http.ListenAndServe(addr, nil))\n}\n"
},
{
"alpha_fraction": 0.6496062874794006,
"alphanum_fraction": 0.6496062874794006,
"avg_line_length": 15.354838371276855,
"blob_id": "9c656e0a1bce9799d2947f571633b6a22dbf8de1",
"content_id": "cb151c552549d219ba44290b961030815a555905",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 508,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 31,
"path": "/images.java/user/src/main/java/com/wpff/core/VisableUser.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\n\n/**\n * Truncated version of User.\n * This will be returned from 'getUsers'.\n */\npublic class VisableUser {\n private int id;\n private String name;\n private String userGroup;\n\n public VisableUser(String name, String userGroup, int id) {\n this.name = name;\n this.userGroup = userGroup;\n this.id = id;\n }\n \n public String getUserGroup() {\n\t return this.userGroup;\n }\n\n public int getId() {\n return this.id;\n }\n\n public String getName() {\n return name;\n }\n\n}\n\n"
},
{
"alpha_fraction": 0.6524493098258972,
"alphanum_fraction": 0.6545608043670654,
"avg_line_length": 21.339622497558594,
"blob_id": "cc48d7e89f1a824e3a31adffd3f170b10ae34eda",
"content_id": "82b08fc70569769befa52462db9e18fdc90ffe7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4736,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 212,
"path": "/images/review/src/github.com/hipposareevil/review/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. bookRequest, postBookRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /review/<book_id>\n// Make endpoint for getting reviews\nfunc makeGetReviewsEndpoint(svc ReviewService) endpoint.Endpoint {\n\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a reviews specific request\n\t\treq := request.(getReviewsRequest)\n\n\t\t// call actual service with data from the req\n\t\treviews, err := svc.GetReviews(\n\t\t\treq.Bearer,\n\t\t\treq.Offset,\n\t\t\treq.Limit,\n\t\t\treq.BookId)\n\n\t\treturn reviewResponse{\n\t\t\tData: reviews,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a getReviewRequestfrom the context and http.Request\n// /review/<book_id>\n//\nfunc decodeGetReviewsRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t// Get offset, limit and bearer\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\tbearer := parseBearer(r)\n\n\t// Get book id\n\tbookId, err := parseBookId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request for all books\n\tvar request getReviewsRequest\n\trequest = getReviewsRequest{\n\t\tBearer: bearer,\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t\tBookId: bookId,\n\t}\n\n\treturn request, nil\n}\n\n/////////////////////////////\n// Helper methods\n\n// Returns the bearer id without \"Bearer \"\nfunc parseBearer(r *http.Request) string {\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\treturn realBearer\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n// Decode the 'book_id' from the request.\n//\n//// Returns the book id\nfunc parseBookId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'book_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"book_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar bookId int\n\tif id != \"\" {\n\t\tbookId, _ = strconv.Atoi(id)\n\t}\n\n\treturn bookId, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\tfmt.Println(\"Sending back error '\" + err.Error() + \"'\")\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.660649836063385,
"alphanum_fraction": 0.7400721907615662,
"avg_line_length": 35.93333435058594,
"blob_id": "b78daa8c1a5ef99d4091805a3caf14a88b8f53ca",
"content_id": "bdb6e954e3857aab1e16efbfee1930898a96d2b0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 554,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 15,
"path": "/images.java/authorization/startDatabases.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Download and start Redis and MySQL. Do MySQL first as it's slow to start up\n# these will both be exposed to the localhost/127.0.0.1\n\necho \"Downloading and running mysql\"\ndocker run --name db -p 3306:3306 -e MYSQL_USER=booksuser -e MYSQL_PASSWORD=books -e MYSQL_ROOT_PASSWORD=booksit -e MYSQL_DATABASE=booksdatabase -d mysql:latest\n\necho \"mysql started on 127.0.0.1:3306\"\necho \"\"\n\necho \"Downloading and starting Redis\"\ndocker run --name redis -p 6379:6379 -d redis:3.0-alpine\necho \"redis started on 127.0.0.1:6379\"\necho \"\"\n"
},
{
"alpha_fraction": 0.5532439351081848,
"alphanum_fraction": 0.5576238632202148,
"avg_line_length": 23.675676345825195,
"blob_id": "77e9a9893a3f36b3b2fc2312494a676908be5745",
"content_id": "03963f454377644e697a6a7104532ffbe64e6d21",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 3653,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 148,
"path": "/test/reviews.sh",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "\n######################\n#\n# review related calls\n#\n#\n######################\n\n##################\n#\n# Get reviews\n#\n# params:\n# 1- (optional) bearer string\n##################\nget_reviews_for_book() {\n bearer=\"$BEARER\"\n book_id=$1\n\n # if bearer string is specified, use it, otherwise\n # we default to the original\n if [ $# -eq 2 ]; then\n bearer=\"$2\"\n fi\n\n result=$(curl -s -w 'CODE%{http_code}CODE' \\\n -X GET \"${ROOT_URL}/review/${book_id}\" \\\n -H 'content-type: application/json' \\\n -H \"authorization: ${bearer}\" \\\n -d \"$post_data\" \\\n -H 'cache-control: no-cache')\n\n if [ $? -ne 0 ]; then\n error \"Error making GET to /review for book '$book_id'\"\n fi\n\n # get code\n code=$(echo \"$result\" | awk -FCODE '{print $2}' | xargs)\n if [[ \"$code\" -ne \"200\" ]]; then\n error \"Error making GET to /review for book '$book_id': code: $code, error: $error\"\n fi\n\n # strip code\n result=$(echo \"$result\" | sed \"s/CODE${code}CODE//g\")\n echo \"$result\"\n}\n\n\n###############\n#\n# Clean user books\n#\n###############\nreview::clean() {\n echo \"\"\n delete_all_user_books\n\n book::clean\n tag::clean\n user::clean\n}\n\n\n##############\n#\n# main test\n#\n#\n##############\nreview::main_test() {\n echo \"[ Review main test ]\"\n\n\n # create userbook for admin/1\n # create new user\n \n ####\n # create tag, author, book, etc\n echo \"\"\n echo \"Create tag\"\n result=$(tag::create_tag \"testit\")\n echo \"Create author\"\n author=$(create_author_lengle)\n author_id=$(echo \"$author\" | jq -r .id)\n \n book_title=\"generic book for review test\"\n echo \"Create book '$book_title' for author '$author_id'\"\n generic_book=$(book::create_book \"$book_title\" \"$author_id\")\n book_id=$(echo $generic_book | jq -r .id)\n\n admin_user_book=$(create_generic_user_book $book_id)\n\n # Create second user to get reviews from\n second_user=$(user::create_user \"notadmin\")\n authorize_second_user \"notadmin\" \"${SECOND_USER_PASSWORD}\"\n\n # get reviews as 2nd user\n reviews=$(get_reviews_for_book $book_id \"$BEARER_second\")\n\n # Validate review\n total=$(echo \"$reviews\" | jq -r .total)\n offset=$(echo \"$reviews\" | jq -r .offset)\n limit=$(echo \"$reviews\" | jq -r .limit)\n\n echo \"\"\n echo \"Checking limit/offset/total\"\n assert_equals 0 ${offset} \"offset in reviews returned\"\n assert_equals 1 $total \"total number reviews\"\n assert_equals 1 ${limit} \"limit number reviews\"\n\n num=$(echo \"${reviews}\" | jq -r \".data | length\" )\n assert_equals 1 $num \"Number of reviews\"\n\n first_review=$(echo \"$reviews\" | jq -r '.data[0]')\n\n echo \"\"\n echo \"Checking review contents\"\n review_books_id=$(echo \"$first_review\" | jq -r .bookId)\n review_rating=$(echo \"$first_review\" | jq -r .rating)\n review_review=$(echo \"$first_review\" | jq -r .review)\n review_tags=$(echo \"$first_review\" | jq -r '.tags | join(\", \")')\n review_user=$(echo \"$first_review\" | jq -r .userName)\n\n assert_equals $bookId $review_books_id \"Review's book id\"\n assert_string_equals \"false\" $review_rating \"Review's rating\"\n assert_string_equals \"super review for generic book\" \"$review_review\" \"Review's review\"\n assert_string_equals \"admin\" \"$review_user\" \"Review's user\"\n assert_contains \"$review_tags\" \"testit\" \"Review's tags\"\n\n\n # clean up\n review::clean\n echo \"[ End review main test ]\"\n}\n\n\n###############\n#\n# Test review endpoint\n#\n###############\ntest_reviews() {\n echo \"\"\n echo \"[ Review test ]\"\n \n review::main_test\n\n echo \"[ Done Review test ]\"\n}\n"
},
{
"alpha_fraction": 0.6142131686210632,
"alphanum_fraction": 0.6326903700828552,
"avg_line_length": 24.51813507080078,
"blob_id": "01f2113a6f26d61b66539d5ed4b21d899115d0bd",
"content_id": "1b1bc0a9f4a6726aa3e15b22ab5d082f8b9704d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 4925,
"license_type": "no_license",
"max_line_length": 296,
"num_lines": 193,
"path": "/docker-compose.yml",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "version: '3.2'\n\n###############\n# NOTES:\n# \n# .env file can have the following variables:\n# BOOK_REPOSITORY = location of repository or repo name\n# BOOK_VERSION = version of book images, e.g. 1.0.1\n###############\n\nservices:\n\n # Query openlibrary\n query:\n container_name: books.query\n image: ${BOOK_REPOSITORY}books.query:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # List and manage book authors\n author:\n container_name: books.author\n image: ${BOOK_REPOSITORY}books.author:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # List and manage books\n book:\n container_name: books.book\n image: ${BOOK_REPOSITORY}books.book:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # Manage users\n user:\n container_name: books.user\n image: ${BOOK_REPOSITORY}books.user:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # Manage books for user\n user_book:\n container_name: books.user_book\n image: ${BOOK_REPOSITORY}books.user_book:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # Manage tags\n tag:\n container_name: books.tag\n image: ${BOOK_REPOSITORY}books.tag:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # List reviews\n review:\n container_name: books.review\n image: ${BOOK_REPOSITORY}books.review:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # Manage authorizations\n authorize:\n container_name: books.authorize\n image: ${BOOK_REPOSITORY}books.authorize:${BOOK_VERSION:-latest}\n expose:\n - 8080\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # Frontend webpage + js\n frontend:\n container_name: books.frontend\n# DEV with hot loading\n image: ${BOOK_REPOSITORY}books.frontend:${BOOK_VERSION:-latest}-dev\n# PROD:\n# image: ${BOOK_REPOSITORY}books.frontend:${BOOK_VERSION:-latest}\n restart: always\n logging:\n driver: \"json-file\"\n volumes:\n - \"./images/frontend/content/mybooks/:/scratch/\"\n networks:\n - booknet\n\n # Gateway-api runs on port 80 internally\n # It also exposes a swagger.yaml file for the 'swagger' container below.\n gateway:\n container_name: books.gateway\n image: ${BOOK_REPOSITORY}books.gateway:${BOOK_VERSION:-latest}\n ports:\n - 8080:80 \n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n\n # MySQL database for storing books, authors and users.\n # This loads the books.sql database on initialization\n books_db:\n container_name: books.db\n image: mysql:latest\n volumes:\n - \"./database/mysql/:/var/lib/mysql\"\n - \"./database/initial/books.sql:/docker-entrypoint-initdb.d/books.sql\"\n restart: always\n logging:\n driver: \"json-file\"\n networks:\n - booknet\n environment:\n - MYSQL_ROOT_PASSWORD=booksit \n - MYSQL_DATABASE=booksdatabase\n - MYSQL_USER=booksuser \n - MYSQL_PASSWORD=books \n\n\n # Redis key/value store for user and tokens\n token_db:\n container_name: books.token_db\n image: redis:3.2-alpine\n restart: always\n logging:\n driver: \"json-file\"\n volumes:\n - \"./database/redis/:/data\"\n networks:\n - booknet\n \n\n # Swagger-UI running on port 8080 that is a single UI showing all of the\n # microservices' swagger information.\n swagger:\n container_name: books.swagger\n image: hipposareevil/swagger-combine\n restart: always\n logging:\n driver: \"json-file\"\n expose:\n - 8080\n networks:\n - booknet\n environment:\n # DEPLOY_HOST_NAME: Name of host where this is going to be deployed.\n # COMBINE_URLS: comma separated list of URLs to gather yaml entries\n # Later yaml files will override previous yaml entries.\n # For example, gateway:80/swagger.yaml contains 'info' which will override any existing 'info' entries.\n - COMBINE_URLS=books.author:8080/swagger.yaml,books.user:8080/swagger.yaml,books.authorize:8080/swagger.yaml,books.book:8080/swagger.yaml,books.tag:8080/swagger.yaml,books.user_book:8080/swagger.yaml,books.query:8080/swagger.yaml,books.gateway:80/swagger.yaml,books.review:8080/swagger.yaml\n - DEPLOY_HOST_NAME=${DEPLOY_HOST_NAME}\n\n\nnetworks:\n # network for all containers\n booknet:\n"
},
{
"alpha_fraction": 0.6631391644477844,
"alphanum_fraction": 0.6663872599601746,
"avg_line_length": 23.101009368896484,
"blob_id": "8bbab9e7b02759e9e5122d8c04c848747f302218",
"content_id": "65578ac5fe66cdce9121a036e4bc27900811f82e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 9544,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 396,
"path": "/images/query/src/github.com/hipposareevil/query/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Query service\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"sort\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// Service interface exposed to clients\ntype QueryService interface {\n\tQueryAuthor(string, int, int) (Authors, error)\n\tQueryTitle(string, string, string, int, int) (Titles, error)\n}\n\n////////////////////////\n// Actual service\ntype queryService struct{}\n\n//////////\n// METHODS on queryService\n\n// Constants for URLs in openlibrary\nconst BASE_AUTHOR_URL = \"https://openlibrary.org/search/authors?q=\"\nconst BASE_TITLE_URL = \"https://openlibrary.org/search?\"\n\nconst ROOT_URL = \"https://openlibrary.org\"\nconst WORKS_BASE_URL = \"https://openlibrary.org\"\n\n// for images\nconst ROOT_AUTHOR_IMAGE = \"https://covers.openlibrary.org/a/olid/\"\nconst ROOT_COVER_IMAGE = \"https://covers.openlibrary.org/b/\"\n\n////////////////\n// Query Authors\n//\n// returns:\n// Authors\n// error\nfunc (theService queryService) QueryAuthor(authorName string, offset int, limit int) (Authors, error) {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- QueryAuthor --\")\n\n\tauthorName = url.QueryEscape(authorName)\n\tqueryUrl := BASE_AUTHOR_URL + authorName\n\tfmt.Println(\"looking for:\", queryUrl)\n\n\t// Make request to openlibrary\n\tbody, err := makeRequest(queryUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to openlibrary for authors: \", err)\n\t\treturn Authors{}, err\n\t}\n\n\t// parse Authors response\n\topenlibraryAuthors := OpenLibraryAuthors{}\n\tjsonErr := json.Unmarshal(body, &openlibraryAuthors)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from openlibrary for author query:\", jsonErr)\n\t\treturn Authors{}, err\n\t}\n\n\t// data to return\n\tdatum := make([]Author, 0, 0)\n\n\t// Convert openlibrary response into our structure\n\tfor _, current := range openlibraryAuthors.Data {\n\t\t// Parse this author\n\n\t\t// Update subjects to not have a few entries we don't want\n\t\tnewSubjects := make([]string, 0, len(current.Subjects))\n\t\tfor _, subject := range current.Subjects {\n\t\t\tif (!strings.Contains(subject, \"Accessible book\")) &&\n\t\t\t\t(!strings.Contains(subject, \"Protected DAISY\")) {\n\t\t\t\t// add to subjects\n\t\t\t\tnewSubjects = append(newSubjects, subject)\n\t\t\t}\n\t\t}\n\n\t\t// images\n\t\tsmallImage := ROOT_AUTHOR_IMAGE + current.OlKey + \"-S.jpg\"\n\t\tmediumImage := ROOT_AUTHOR_IMAGE + current.OlKey + \"-M.jpg\"\n\t\tlargeImage := ROOT_AUTHOR_IMAGE + current.OlKey + \"-L.jpg\"\n\n\t\t// Create author to return\n\t\tnewAuthor := Author{\n\t\t\tBirthDate: current.BirthDate,\n\t\t\tName: current.Name,\n\t\t\tOlKey: current.OlKey,\n\t\t\tSubjects: newSubjects,\n\t\t\tImageSmall: smallImage,\n\t\t\tImageMedium: mediumImage,\n\t\t\tImageLarge: largeImage,\n\t\t}\n\t\t// Add to data\n\t\tdatum = append(datum, newAuthor)\n\t}\n\n\t////////////////////\n\t// Update the offset/limit\n\n\t// Get the total number of rows\n\trealNumberRows := len(openlibraryAuthors.Data)\n\n\t// fix offset\n\tif (offset > realNumberRows) || (offset < 0) {\n\t\toffset = 0\n\t}\n\n\t// fix limit\n\tif limit <= 0 {\n\t\tlimit = 20\n\t}\n\n\t// determine slice of datum to use\n\twhereToEnd := offset + limit\n\tif whereToEnd > realNumberRows {\n\t\twhereToEnd = realNumberRows\n\t}\n\n\tdatum = datum[offset:whereToEnd]\n\n\tauthorsToReturn := Authors{\n\t\tOffset: offset,\n\t\tLimit: len(datum),\n\t\tTotal: realNumberRows,\n\t\tData: datum,\n\t}\n\n\treturn authorsToReturn, nil\n}\n\n////////////////\n// Query Titles\n//\n// returns:\n// Titles\n// error\nfunc (theService queryService) QueryTitle(authorName string, title string, isbn string, offset int, limit int) (Titles, error) {\n\tfmt.Println(\" \")\n\tfmt.Println(\"-- QueryTitle --\")\n\n\t// base url for titles\n\tqueryUrl := BASE_TITLE_URL\n\n\t// string used in concating\n\tand := \"\"\n\n\tif len(title) > 0 {\n\t\ttitle = url.QueryEscape(title)\n\t\tqueryUrl += and + \"title=\" + title\n\t\tand = \"&\"\n\t}\n\tif len(isbn) > 0 {\n\t\tisbn = url.QueryEscape(isbn)\n\t\tqueryUrl += and + \"isbn=\" + isbn\n\t\tand = \"&\"\n\t}\n\tif len(authorName) > 0 {\n\t\tauthorName = url.QueryEscape(authorName)\n\t\tqueryUrl += and + \"author=\" + authorName\n\t\tand = \"&\"\n\t}\n\tfmt.Println(\"looking for:\", queryUrl)\n\n\t// Make request to openlibrary\n\tbody, err := makeRequest(queryUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to openlibrary for titles: \", err)\n\t\treturn Titles{}, err\n\t}\n\n\t// parse Titles response\n\topenlibraryTitles := OpenLibraryTitles{}\n\tjsonErr := json.Unmarshal(body, &openlibraryTitles)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from openlibrary for title query:\", jsonErr)\n\t\treturn Titles{}, err\n\t}\n\n\t// data to return\n\tdatum := make([]Title, 0, 0)\n\n\t// Convert openlibrary response into our structure\n\tfor _, current := range openlibraryTitles.Data {\n\t\t// Parse this title\n\n\t\tfmt.Println(\"\")\n\t\tfmt.Println(\"Current: \", current)\n\n\t\t// Update subjects to not have a few entries we don't want\n\t\tnewSubjects := make([]string, 0, len(current.Subjects))\n\t\tfor _, subject := range current.Subjects {\n\t\t\tif (!strings.Contains(subject, \"Accessible book\")) &&\n\t\t\t\t(!strings.Contains(subject, \"In library\")) &&\n\t\t\t\t(!strings.Contains(subject, \"Protected DAISY\")) {\n\t\t\t\t// add to subjects\n\t\t\t\tnewSubjects = append(newSubjects, subject)\n\t\t\t}\n\t\t}\n\n\t\t// description\n\t\tdescription := getDescription(current.WorksKey)\n\n\t\t// work url\n\t\tworkUrl := ROOT_URL + current.WorksKey\n\n\t\t// author name\n\t\tvar authorName string\n\t\tif len(current.AuthorNames) > 0 {\n\t\t\tauthorName = current.AuthorNames[0]\n\t\t}\n\n\t\t// author key\n\t\tvar authorKey string\n\t\tif len(current.AuthorKeys) > 0 {\n\t\t\tauthorKey = current.AuthorKeys[0]\n\t\t}\n\n\t\t// images\n\t\tsmallImage := createCoverImage(current, \"-S.jpg\")\n\t\tmediumImage := createCoverImage(current, \"-M.jpg\")\n\t\tlargeImage := createCoverImage(current, \"-L.jpg\")\n\n\t\t// Create title to return\n\t\tnewTitle := Title{\n\t\t\t// These are derived\n\t\t\tAuthorKey: authorKey,\n\t\t\tAuthorName: authorName,\n\t\t\tDescription: description,\n\t\t\tOpenLibraryWorkUrl: workUrl,\n\t\t\tImageSmall: smallImage,\n\t\t\tImageMedium: mediumImage,\n\t\t\tImageLarge: largeImage,\n\n\t\t\t// These are passed through\n\t\t\tTitle: current.Title,\n\t\t\tFirstPublishedYear: current.FirstPublishYear,\n\t\t\tOpenLibraryKeys: current.EditionKeys,\n\t\t\tIsbns: current.Isbns,\n\t\t\tSubjects: current.Subjects,\n\t\t}\n\n\t\t// Add to data\n\t\tdatum = append(datum, newTitle)\n\t}\n\n\t// Sort datum by # of isbns\n\tsort.Slice(datum, func(i, j int) bool {\n\t\treturn len(datum[i].Isbns) > len(datum[j].Isbns)\n\t})\n\n\t////////////////////\n\t// Update the offset/limit\n\n\t// Get the total number of rows\n\trealNumberRows := len(openlibraryTitles.Data)\n\n\t// fix offset\n\tif (offset > realNumberRows) || (offset < 0) {\n\t\toffset = 0\n\t}\n\n\t// fix limit\n\tif limit <= 0 {\n\t\tlimit = 20\n\t}\n\n\t// determine slice of datum to use\n\twhereToEnd := offset + limit\n\tif whereToEnd > realNumberRows {\n\t\twhereToEnd = realNumberRows\n\t}\n\n\tdatum = datum[offset:whereToEnd]\n\n\ttitlesToReturn := Titles{\n\t\tOffset: offset,\n\t\tLimit: len(datum),\n\t\tTotal: realNumberRows,\n\t\tData: datum,\n\t}\n\n\treturn titlesToReturn, nil\n}\n\n//////////////////////////////////////////////////\n\n// Create image for a title/book\n//\n// params:\n// book: book to get cover for\n// size: size string\nfunc createCoverImage(book OpenLibraryTitle, size string) string {\n\tfmt.Println(\"\")\n\tfmt.Println(\"Create image for book: \", book.CoverImage)\n\n\t// Check the cover image value itself\n\tif book.CoverImage > 0 {\n\t\timageUrl := ROOT_COVER_IMAGE + \"id/\" + strconv.Itoa(book.CoverImage) + size\n\t\treturn imageUrl\n\t}\n\n\t// If that fails, try the first ISBN\n\tif len(book.Isbns) > 0 {\n\t\tfirstIsbn := book.Isbns[0]\n\t\timageUrl := ROOT_COVER_IMAGE + \"isbn/\" + firstIsbn + size\n\t\treturn imageUrl\n\t}\n\n\treturn \"\"\n}\n\n// Get description by querying openlibrary again\n//\n// params:\n// worksKey: openlibrary works key\nfunc getDescription(worksKey string) string {\n\tfmt.Println(\"Get description for works key: \", worksKey)\n\n\tqueryUrl := WORKS_BASE_URL + worksKey\n\n\tfmt.Println(\"get description via url: \", queryUrl)\n\n\t// Make request to openlibrary\n\tbody, err := makeRequest(queryUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to openlibrary for getting description: \", err)\n\t\treturn \"\"\n\t}\n\n\t// parse response\n\tdescription := OpenLibraryDescription{}\n\tjsonErr := json.Unmarshal(body, &description)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from openlibrary for description query:\", jsonErr)\n\t\treturn \"\"\n\t}\n\n\treturn description.Description.Value\n}\n\n// Perform the boilerplate portion of making an http request\n//\n// param:\n// URL to query\n//\nfunc makeRequest(queryUrl string) ([]byte, error) {\n\t///////////////\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\treq, err := http.NewRequest(http.MethodGet, queryUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to openlibrary for url: \", queryUrl, \" error: \", err)\n\t\treturn nil, err\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"query-service-client\")\n\treq.Header.Set(\"accept\", \"application/json\")\n\treq.Header.Set(\"content-type\", \"application/json\")\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to make send request to openlibrary for url: \", queryUrl, \" error: \", getErr)\n\t\treturn nil, err\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"Unable to connect to openlibrary for url: \", queryUrl, \" HTTP status: \", res.Status)\n\t\treturn nil, err\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from openlibrary: \", readErr)\n\t\treturn nil, err\n\t}\n\n\treturn body, nil\n\n}\n"
},
{
"alpha_fraction": 0.6916099786758423,
"alphanum_fraction": 0.6916099786758423,
"avg_line_length": 19.752941131591797,
"blob_id": "843f650908c337e60846b7ba2d2c01fdfc7e2572",
"content_id": "c7c0dd178712156620d16fdbd7c0890fc1023071",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 1764,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 85,
"path": "/images/author/src/github.com/hipposareevil/author/endpoints.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Base for all responses\ntype errorer interface {\n\terror() error\n}\n\n// interface for holding data\ntype dataHolder interface {\n\t// Get the data\n\tgetData() interface{}\n}\n\n////////////////\n// Responses are passed to 'transport.encodeResponse'\n\n////////////////////\n/// Authors (all)\n// response for authors (vs. single author)\ntype authorsResponse struct {\n\tData Authors `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse authorsResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse authorsResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// AUTHOR (single)\n// response for author (single)\ntype authorResponse struct {\n\tData Author `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse authorResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse authorResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// DELETE AUTHOR (single)\n// response for author (single)\ntype deleteAuthorResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse deleteAuthorResponse) error() error {\n\treturn theResponse.Err\n}\n\n////////////////////\n/// Create AUTHOR\n// response for create author\ntype createAuthorResponse struct {\n\tData Author `json:\"all,omitempty\"`\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse createAuthorResponse) error() error {\n\treturn theResponse.Err\n}\n\nfunc (theResponse createAuthorResponse) getData() interface{} {\n\treturn theResponse.Data\n}\n\n////////////////////\n/// Update AUTHOR\n// response for update author\ntype updateAuthorResponse struct {\n\tErr error `json:\"err,omitempty\"`\n}\n\nfunc (theResponse updateAuthorResponse) error() error {\n\treturn theResponse.Err\n}\n"
},
{
"alpha_fraction": 0.6406391263008118,
"alphanum_fraction": 0.6885740160942078,
"avg_line_length": 25.11811065673828,
"blob_id": "188893618314194d1b03f0e90b21f7d17193fdbb",
"content_id": "184f6acb0c9c2f1690e7b3765b784f8bab862519",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 3317,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 127,
"path": "/database/initial/books.sql",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "-- TAG\nDROP TABLE IF EXISTS `tag`;\n\n\nCREATE TABLE `tag` (\n `tag_id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(255) NOT NULL UNIQUE,\n PRIMARY KEY (`tag_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=latin1;\n\nLOCK TABLES `tag` WRITE;\n\nINSERT INTO `tag` VALUES (1, \"sci-fi\");\nINSERT INTO `tag` VALUES (2, \"e-book\");\n\nUNLOCK TABLES;\n\n\n\n-- User Book\n-- Rating: 0 thumbs down, 1 thumbs up\n\nDROP TABLE IF EXISTS `userbook`;\n\nCREATE TABLE `userbook` (\n `user_book_id` int(11) NOT NULL AUTO_INCREMENT,\n `user_id` int(11) NOT NULL,\n `book_id` int(11) NOT NULL,\n `rating` tinyint(1) DEFAULT 0,\n `date_added` DATETIME DEFAULT NULL,\n `review` varchar(8000) DEFAULT '',\n PRIMARY KEY (`user_book_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=latin1;\n\nALTER TABLE userbook ADD CONSTRAINT unique_book UNIQUE(user_id, book_id);\n\n\n-- TAG MAP\nDROP TABLE IF EXISTS `tagmapping`;\n \n\nCREATE TABLE `tagmapping` (\n `user_book_id` int(11) NOT NULL,\n `user_id` int(11) NOT NULL,\n `tag_id` int(11) NOT NULL\n) ENGINE=InnoDB DEFAULT CHARSET=latin1;\n\n\n-- USER\n\nDROP TABLE IF EXISTS `user`;\n\n\nCREATE TABLE `user` (\n `user_id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(255) NOT NULL UNIQUE,\n `user_group` varchar(255) DEFAULT '',\n `data` varchar(2048) DEFAULT '',\n `password` varchar(2048) DEFAULT '',\n PRIMARY KEY (`user_id`)\n) ENGINE=InnoDB DEFAULT CHARSET=latin1;\n\nALTER TABLE user ADD CONSTRAINT unique_user UNIQUE(name);\n\nLOCK TABLES `user` WRITE;\n\n-- Insert initial admin/admin user.\n-- This was encrypted via bcrypt: https://godoc.org/golang.org/x/crypto/bcrypt\n\nINSERT INTO `user` VALUES (1, 'admin', 'admin', '', '$2a$04$H8mgQszUXgk95cafRxfc5e1Yb1wGi8hbiysxtMSHNclcjNWmDqGsG');\n\nUNLOCK TABLES;\n\n\n\n--\n-- Table structure for table `author`\n--\n\n\nDROP TABLE IF EXISTS `author`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n/*!40101 SET character_set_client = utf8 */;\nCREATE TABLE `author` (\n `author_id` int(11) NOT NULL AUTO_INCREMENT,\n `name` varchar(40) DEFAULT '' UNIQUE,\n `birth_date` varchar(50) DEFAULT '',\n `subjects` varchar(2000) DEFAULT '',\n `image_small` varchar(250) DEFAULT '',\n `image_medium` varchar(250) DEFAULT '',\n `image_large` varchar(250) DEFAULT '',\n `goodreads_url` varchar(500) DEFAULT '',\n `ol_key` varchar(100) DEFAULT '',\n PRIMARY KEY (`author_id`)\n) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=latin1;\n\nALTER TABLE author ADD CONSTRAINT unique_author UNIQUE(name);\n\n\n/*!40101 SET character_set_client = @saved_cs_client */;\n\n\n \n\n--\n-- Table structure for table `book`\n--\n\nDROP TABLE IF EXISTS `book`;\n/*!40101 SET @saved_cs_client = @@character_set_client */;\n/*!40101 SET character_set_client = utf8 */;\nCREATE TABLE `book` (\n `book_id` int(11) NOT NULL AUTO_INCREMENT,\n `author_id` int(11) NOT NULL DEFAULT '0',\n `year` char(11) DEFAULT NULL,\n `title` char(100) DEFAULT NULL,\n `isbn` varchar(1200) DEFAULT NULL,\n `subjects` varchar(2000) DEFAULT NULL,\n `ol_works` char(100) DEFAULT NULL,\n `goodreads_url` varchar(500) DEFAULT NULL,\n `description` varchar(3000) DEFAULT NULL,\n `image_small` varchar(1000) DEFAULT NULL,\n `image_medium` varchar(1000) DEFAULT NULL,\n `image_large` varchar(1000) DEFAULT NULL,\n PRIMARY KEY (`book_id`)\n) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=latin1;\n/*!40101 SET character_set_client = @saved_cs_client */;\n"
},
{
"alpha_fraction": 0.6755985617637634,
"alphanum_fraction": 0.6772112846374512,
"avg_line_length": 22.570175170898438,
"blob_id": "b7a190cea0cdb115571f7018ba9bca2e74cebcda",
"content_id": "bc26ecf016d6d6e88f0b5d382bee84597be19bc2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 8061,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 342,
"path": "/images/user/src/github.com/hipposareevil/user/transport.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Transport module\n//\n// Contains:\n// - endpoint creation\n// - encode responses to client\n// - decode client requests\n// - structures used. e.g. userRequest, postUserRequest, etc\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/gorilla/mux\"\n\n\t\"github.com/go-kit/kit/endpoint\"\n)\n\n//////////////////////////////////////////////////////////\n//\n// Create endpoints\n\n// GET /user/\n// Make endpoint for getting users\nfunc makeGetUsersEndpoint(svc UserService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a users specific request\n\t\treq := request.(getAllUsersRequest)\n\n\t\t// call actual service with data from the req\n\t\tusers, err := svc.GetUsers(req.Offset, req.Limit)\n\t\treturn usersResponse{\n\t\t\tData: users,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// GET /user/<user_id>\n// Make endpoint for getting single User\nfunc makeGetUserEndpoint(svc UserService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a userRequest\n\t\treq := request.(getUserRequest)\n\n\t\t// call actual service with data from the req\n\t\tuser, err := svc.GetUser(req.UserId)\n\t\treturn userResponse{\n\t\t\tData: user,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// DELETE /user/<user_id>\n// Make endpoint for deleting single User\nfunc makeDeleteUserEndpoint(svc UserService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a userRequest\n\t\treq := request.(deleteUserRequest)\n\n\t\t// call actual service with data from the req\n\t\terr := svc.DeleteUser(req.UserId)\n\t\treturn deleteUserResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// POST /user/\n// Make endpoint for creating (via post) a user\nfunc makeCreateUserEndpoint(svc UserService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a createUserRequest\n\t\treq := request.(createUserRequest)\n\n\t\t// call actual service with data from the req\n\t\tnewUser, err := svc.CreateUser(req.Name,\n\t\t\treq.UserGroup,\n\t\t\treq.Data,\n\t\t\treq.Password)\n\n\t\treturn createUserResponse{\n\t\t\tData: newUser,\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n// PUT /user/<user_id>\n// Make endpoint for updating (via PUT) a user\nfunc makeUpdateUserEndpoint(svc UserService) endpoint.Endpoint {\n\treturn func(ctx context.Context, request interface{}) (interface{}, error) {\n\t\t// convert request into a updateUserRequest\n\t\treq := request.(updateUserRequest)\n\n\t\t// call actual service with data from the req (putUserRequest)\n\t\terr := svc.UpdateUser(req.Id,\n\t\t\treq.Name,\n\t\t\treq.UserGroup,\n\t\t\treq.Data,\n\t\t\treq.Password)\n\n\t\treturn updateUserResponse{\n\t\t\tErr: err,\n\t\t}, nil\n\t}\n}\n\n//////////////////////////////////////////////////////////\n//\n// Decode\n\n// Create a getAllUsersRequest from the context and http.Request\n// /user/\n//\n// The getAllUsersRequest has 3 variables:\n// - Offset Offset into the query\n// - Limit Number of values to return\nfunc decodeGetAllUsersRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\trealOffset, realLimit := parseOffsetAndLimit(r)\n\n\t// Make request for all users\n\tvar request getAllUsersRequest\n\trequest = getAllUsersRequest{\n\t\tOffset: realOffset,\n\t\tLimit: realLimit,\n\t}\n\n\treturn request, nil\n}\n\n// Create getUserRequest\n// /user/id\n//\n// The userRequest has 2 variables:\n// - UserId ID of user taken from the path\nfunc decodeGetUserRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request for single user\n\tvar request getUserRequest\n\trequest = getUserRequest{\n\t\tUserId: userId,\n\t}\n\n\treturn request, nil\n}\n\n// Create deleteUserRequest\n// DELETE /user/id\n//\n// The (delete) userRequest has 2 variables:\n// - UserId ID of user taken from the path\nfunc decodeDeleteUserRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Make request to delete user\n\tvar request deleteUserRequest\n\trequest = deleteUserRequest{\n\t\tUserId: userId,\n\t}\n\n\treturn request, nil\n}\n\n// Create createUserRequest\n// POST /user\nfunc decodeCreateUserRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\t///////////////////\n\t// Parse body\n\tvar newUser createUserRequest\n\tif err := json.NewDecoder(r.Body).Decode(&newUser); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn newUser, nil\n}\n\n// Create updateUserRequest\n// PUT /user/id\nfunc decodeUpdateUserRequest(_ context.Context, r *http.Request) (interface{}, error) {\n\tuserId, err := parseUserId(r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t///////////////////\n\t// Parse body\n\tvar updateUser updateUserRequest\n\tif err := json.NewDecoder(r.Body).Decode(&updateUser); err != nil {\n\t\treturn nil, err\n\t}\n\n\tupdateUser.Id = userId\n\n\treturn updateUser, nil\n}\n\n// Decode the common parts of a request:\n// * offset\n// * limit\n//\n// Instead of erroring out, it will return defaults\n//\n// Returns the two values in order: offset & limit\nfunc parseOffsetAndLimit(r *http.Request) (int, int) {\n\t///////////////////\n\t// Parse parameters\n\tr.ParseForm()\n\tvalues := r.Form\n\n\t// Get values from the form, where 'offset' & 'limit' are parameters\n\tvar realOffset int\n\tvar realLimit int\n\n\t// Offset, use a default of 0\n\toffset := values.Get(\"offset\")\n\tif offset != \"\" {\n\t\trealOffset, _ = strconv.Atoi(offset)\n\t} else {\n\t\trealOffset = 0\n\t}\n\n\t// Limit, set a default if it doesn't exist\n\tlimit := values.Get(\"limit\")\n\tif limit != \"\" {\n\t\trealLimit, _ = strconv.Atoi(limit)\n\t} else {\n\t\t// default to get 20\n\t\trealLimit = 20\n\t}\n\n\treturn realOffset, realLimit\n}\n\n// Decode the bearer string from the request\n//\n// Returns the bearer id without \"Bearer \"\nfunc parseBearer(r *http.Request) string {\n\tvar realBearer string\n\tbearer := r.Header.Get(\"authorization\")\n\n\t// Strip the 'Bearer ' from header\n\tif strings.HasPrefix(bearer, \"Bearer \") {\n\t\trealBearer = strings.Replace(bearer, \"Bearer \", \"\", 1)\n\t}\n\n\treturn realBearer\n}\n\n// Decode the 'user_id' from the request.\n//\n//// Returns the user id\nfunc parseUserId(r *http.Request) (int, error) {\n\t// Demux the gorilla parsing\n\tvars := mux.Vars(r)\n\t// 'user_id' is set in the gorilla handling in main.go\n\tid, ok := vars[\"user_id\"]\n\tif !ok {\n\t\treturn 0, ErrBadRouting\n\t}\n\n\tvar userId int\n\tif id != \"\" {\n\t\tuserId, _ = strconv.Atoi(id)\n\t}\n\n\treturn userId, nil\n}\n\n//////////////////////////////////////////////////////////\n//\n// Encode responses to client\n\n// The response can/should be of type errorer and thus can be cast to check if there is an error\nfunc encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error {\n\t// Set JSON type\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Check error\n\tif e, ok := response.(errorer); ok {\n\t\t// This is a errorer class, now check for error\n\t\tif err := e.error(); err != nil {\n\t\t\tencodeError(ctx, e.error(), w)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// cast to dataHolder to get Data, otherwise just encode the resposne\n\tif holder, ok := response.(dataHolder); ok {\n\t\treturn json.NewEncoder(w).Encode(holder.getData())\n\t} else {\n\t\treturn json.NewEncoder(w).Encode(response)\n\t}\n}\n\n// Write the incoming err into the response writer\nfunc encodeError(_ context.Context, err error, w http.ResponseWriter) {\n\tif err == nil {\n\t\tpanic(\"encodeError with nil error\")\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\n\t// Write actual error code\n\tcode := codeFrom(err)\n\tw.WriteHeader(code)\n\n\tfmt.Println(\"Sending back error '\" + err.Error() + \"'\")\n\n\t// write out the error message\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"code\": code,\n\t\t\"message\": err.Error(),\n\t})\n}\n\n// Determine the HTTP error code from the incoming error 'err'\nfunc codeFrom(err error) int {\n\tswitch err {\n\tcase ErrNotFound:\n\t\treturn http.StatusNotFound\n\tcase ErrAlreadyExists:\n\t\treturn http.StatusConflict\n\tcase ErrUnauthorized:\n\t\treturn http.StatusUnauthorized\n\tdefault:\n\t\treturn http.StatusInternalServerError\n\t}\n}\n"
},
{
"alpha_fraction": 0.6934673190116882,
"alphanum_fraction": 0.7145728468894958,
"avg_line_length": 37.269229888916016,
"blob_id": "cdbb10d420cb7c1c034a07081f1a40e8b9a84db1",
"content_id": "f1f2650ce38a68efd7e287cafdd95cacf6ab89e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 995,
"license_type": "no_license",
"max_line_length": 153,
"num_lines": 26,
"path": "/images.java/user_book/Dockerfile",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "FROM openjdk:8-jdk-alpine\n\n# Args passed in via 'docker build'\n# Used by the LABELs\nARG BUILD_TIME\nARG VERSION\n\n# setup env\nRUN alias ll='ls -la' && \\\n apk -U add --no-cache bash curl && \\\n# put jar and wait script in /opt/docker\n mkdir -p /opt/docker/\n\nCOPY target/user_book-1.0.jar /opt/docker/\nCOPY user_book.cfg.yml /opt/docker/\nCOPY waitforit.sh /opt/docker/\n\n# Putting LABEL last so we can re-use the preceding caching layers\nLABEL org.label-schema.build-date=\"$BUILD_TIME\" \\\n org.label-schema.vendor=\"github.com/hipposareevil\" \\\n org.label-schema.version=\"$VERSION\" \\\n org.label-schema.description=\"Microservice for managing users books in the project. Each user book is a connection between a User and a Book\" \\\n org.label-schema.name=\"books.user_book\" \n\n# wait for the DB at url books_db:3306 for 240 seconds\nENTRYPOINT [\"/opt/docker/waitforit.sh\", \"books_db:3306\", \"240\", \"java -Xmx64m -jar /opt/docker/user_book-1.0.jar server /opt/docker/user_book.cfg.yml\"]\n"
},
{
"alpha_fraction": 0.6722893714904785,
"alphanum_fraction": 0.67289799451828,
"avg_line_length": 24.903152465820312,
"blob_id": "8a417cb6e9095160e71ca71145cfb548871c96a0",
"content_id": "b27caed4bd542e7bad6f68f93eb05ee40cbb1051",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 11501,
"license_type": "no_license",
"max_line_length": 158,
"num_lines": 444,
"path": "/images/book/src/github.com/hipposareevil/book/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Book service\n\nimport (\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\t\"strings\"\n)\n\n// namespace for author cache, this will be indexed by author id\nconst AUTHOR_CACHE = \"author.name\"\n\n// namespace for this book cache, this will be indexed by book id\nconst BOOK_CACHE = \"book.info\"\n\n// Service interface exposed to clients\ntype BookService interface {\n\t// GetBooks: offset, limit, title, authorIds, titleIds\n\t// first param: bearer\n\tGetBooks(string, int, int, string, []int, []int, string) (Books, error)\n\n\t// GetBook: bearer, id\n\tGetBook(string, int) (Book, error)\n\n\t// DeleteBook: id\n\tDeleteBook(int) error\n\n\t// CreateBook (see createBookRequest for params)\n\t// first param: bearer\n\tCreateBook(string, int, string, int, string, string, string, string, []string, string, []string, string) (Book, error)\n\n\t// UpdateBook\n\t// Same as CreateBook but the first param is the ID of book to update\n\t// first param: bearer\n\tUpdateBook(string, int, int, string, int, string, string, string, string, []string, string, []string, string) (Book, error)\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\ntype bookService struct {\n\tmysqlDb *sql.DB\n\tcache CacheLayer\n}\n\n//////////\n// METHODS on bookService\n\n////////////////\n// Get Book\n//\n// params:\n// bearer: Bookization bearer\n// bookId : ID of book to get\n//\n// returns:\n// book\n// error\nfunc (theService bookService) GetBook(bearer string, bookId int) (Book, error) {\n\tfmt.Println(\"[GetBook]\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Book{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tvar book Book\n\tvar subjectAsCsv string\n\tvar isbnsAsCsv string\n\n\t// Scan the DB info into 'book' composite variable\n\terr := theService.mysqlDb.\n\t\tQueryRow(\"SELECT \"+\n\t\t\t\"book_id, author_id, year, title, \"+\n\t\t\t\"isbn, subjects, ol_works, description, \"+\n\t\t\t\"image_small, image_medium, image_large, goodreads_url \"+\n\t\t\t\"FROM book WHERE book_id = ?\", bookId).\n\t\tScan(&book.Id, &book.AuthorId, &book.FirstPublishedYear, &book.Title,\n\t\t\t&isbnsAsCsv, &subjectAsCsv, &book.OpenlibraryWorkUrl, &book.Description,\n\t\t\t&book.ImageSmall, &book.ImageMedium, &book.ImageLarge, &book.GoodReadsUrl)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn Book{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"[GetBook] Got error from select: \", err)\n\t\treturn Book{}, ErrServerError\n\t}\n\n\t// Get the author name\n\tif len(bearer) > 0 {\n\t\tbook.AuthorName = getAuthorNameById(theService.cache, bearer, book.AuthorId)\n\t}\n\n\t// Convert subjects from CSV to string array\n\tbook.Subjects = splitCsvStringToArray(subjectAsCsv)\n\n\t// Convert isbns from CSV to string array\n\tbook.Isbns = splitCsvStringToArray(isbnsAsCsv)\n\n\t// Save to cache\n\tbookAsBytes, err := json.Marshal(book)\n\tif err == nil {\n\t\tgo theService.cache.SetBytes(BOOK_CACHE, bookId, bookAsBytes)\n\t} else {\n\t\tfmt.Println(\"[GetBook] Unable to save book to cache:\", err)\n\t}\n\n\treturn book, nil\n}\n\n////////////////\n// Get books\n//\n// returns:\n// books\n// error\nfunc (theService bookService) GetBooks(bearer string, offset int, limit int, title string, authorIds []int, bookIds []int, authorName string) (Books, error) {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Books{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Get total number of rows\n\tvar totalNumberOfRows int\n\t_ = theService.mysqlDb.QueryRow(\"SELECT COUNT(*) from book\").Scan(&totalNumberOfRows)\n\n\tif limit > totalNumberOfRows {\n\t\tlimit = totalNumberOfRows\n\t}\n\n\t// Make select string\n\tselectString := \"SELECT \" +\n\t\t\"book_id, author_id, year, title, \" +\n\t\t\"isbn, subjects, ol_works, description, \" +\n\t\t\"image_small, image_medium, image_large, goodreads_url \" +\n\t\t\"FROM book \"\n\n\tfmt.Println(\"[GetBooks]\")\n\n\t// Update query according to which other queryParams come in (title, authorIds, bookIds)\n\tupdated := false\n\tvar appendedString string\n\n\t// Title\n\tif len(title) > 0 {\n\t\tupdated = true\n\t\tappendedString = \"WHERE title LIKE '%\" + title + \"%' \"\n\t}\n\n\t// Author name\n\t// convert into authorIds\n\tif len(authorName) > 0 {\n\t\tids := getAuthorIdsByName(bearer, authorName)\n\t\tauthorIds = append(authorIds, ids...)\n\t}\n\n\t// Author IDs\n\tif len(authorIds) > 0 {\n\t\tvar prependValue string\n\n\t\tif updated {\n\t\t\tprependValue = \" OR\"\n\t\t} else {\n\t\t\tprependValue = \" WHERE\"\n\t\t}\n\t\tauthorIdsAsCsv := convertIntArrayToCsv(authorIds)\n\t\tappendedString += prependValue + \" author_id in (\" + authorIdsAsCsv + \")\"\n\t\tupdated = true\n\t}\n\t// Book IDs\n\tif len(bookIds) > 0 {\n\t\tvar prependValue string\n\n\t\tif updated {\n\t\t\tprependValue = \" OR\"\n\t\t} else {\n\t\t\tprependValue = \" WHERE\"\n\t\t}\n\t\tbookIdsAsCsv := convertIntArrayToCsv(bookIds)\n\t\tappendedString += prependValue + \" book_id in (\" + bookIdsAsCsv + \")\"\n\n\t\tupdated = true\n\t}\n\n\t// Re get total # of rows for the return value\n\tcountQuery := \"SELECT COUNT(*) FROM book \" + appendedString\n\t_ = theService.mysqlDb.QueryRow(countQuery).Scan(&totalNumberOfRows)\n\n\t// real select string\n\tselectString = selectString + appendedString\n\n\t// Make query\n\tresults, err := theService.mysqlDb.\n\t\tQuery(selectString+\n\t\t\t\"LIMIT ?,?\", offset, limit)\n\n\tif err != nil {\n\t\tfmt.Println(\"[GetBooks] Got error from mysql: \" + err.Error())\n\t\treturn Books{}, errors.New(\"unable to create query in mysql\")\n\t}\n\n\t// slice of Book entities\n\tdatum := make([]Book, 0, 0)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar book Book\n\n\t\tvar subjectAsCsv string\n\t\tvar isbnsAsCsv string\n\n\t\t// For each row, scan the result into our book composite object:\n\t\terr = results.\n\t\t\tScan(&book.Id, &book.AuthorId, &book.FirstPublishedYear, &book.Title,\n\t\t\t\t&isbnsAsCsv, &subjectAsCsv, &book.OpenlibraryWorkUrl, &book.Description,\n\t\t\t\t&book.ImageSmall, &book.ImageMedium, &book.ImageLarge, &book.GoodReadsUrl)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(\"[GetBooks] Got error from mysql when getting all books: \" + err.Error())\n\t\t\treturn Books{}, errors.New(\"Unable to scan mysql for all books.\")\n\t\t}\n\n\t\t// Get the author name\n\t\tbook.AuthorName = getAuthorNameById(theService.cache, bearer, book.AuthorId)\n\n\t\t// Convert subjects from CSV to string array\n\t\tbook.Subjects = splitCsvStringToArray(subjectAsCsv)\n\n\t\t// Convert isbns from CSV to string array\n\t\tbook.Isbns = splitCsvStringToArray(isbnsAsCsv)\n\n\t\tdatum = append(datum, book)\n\n\t\t// Save to cache\n\t\tbookAsBytes, err := json.Marshal(book)\n\t\tif err == nil {\n\t\t\tgo theService.cache.SetBytes(BOOK_CACHE, book.Id, bookAsBytes)\n\t\t} else {\n\t\t\tfmt.Println(\"[GetBooks] Unable to save book to cache:\", err)\n\t\t}\n\t}\n\n\t// reset the limit (number of things being returned)\n\tlimit = len(datum)\n\n\t// Create Books to return\n\treturnValue := Books{\n\t\tOffset: offset,\n\t\tLimit: limit,\n\t\tTotal: totalNumberOfRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n\n////////////////\n// Delete book\n//\n// params:\n// bookId : ID of book to delete\n//\n// returns:\n// error\nfunc (theService bookService) DeleteBook(bookId int) error {\n\tfmt.Println(\"[DeleteBook]\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Verify the book exists, if not, throw ErrNotFound\n\t// first param is empty for the bearer as we don't need to get the extra info\n\t_, getErr := theService.GetBook(\"\", bookId)\n\tif getErr != nil {\n\t\treturn getErr\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM book WHERE book_id = ?\", bookId)\n\n\t// remove from cache\n\ttheService.cache.Clear(BOOK_CACHE, bookId)\n\n\treturn err\n}\n\n////////////////\n// CreateBook\n//\n// returns:\n// book\n// error\nfunc (theService bookService) CreateBook(\n\tbearer string,\n\tauthorId int,\n\tdescription string,\n\tfirstPublishedYear int,\n\tgoodReadsUrl string,\n\timageLarge string,\n\timageMedium string,\n\timageSmall string,\n\tisbns []string,\n\topenlibraryWorkUrl string,\n\tsubjects []string,\n\ttitle string) (Book, error) {\n\n\tfmt.Println(\"[CreateBook]\")\n\n\t////////////////////\n\t// verify mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Book{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Convert []string to string as csv for database\n\tsubjectsAsCsv := strings.Join(subjects[:], \",\")\n\tisbnsAsCsv := strings.Join(isbns[:], \",\")\n\n\t// Make prepared insert statement\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"INSERT INTO book SET \" +\n\t\t\t\"author_id=?, year=?, title=?, isbn=?, subjects=?, \" +\n\t\t\t\"ol_works=?, goodreads_url=?, description=?, \" +\n\t\t\t\"image_small=?, image_medium=?, image_large=?\")\n\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"[CreateBook] Error preparing DB: \", err)\n\t\treturn Book{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\tres, err := stmt.Exec(authorId, firstPublishedYear, title, isbnsAsCsv, subjectsAsCsv,\n\t\topenlibraryWorkUrl, goodReadsUrl, description,\n\t\timageSmall, imageMedium, imageLarge)\n\n\tif err != nil {\n\t\tfmt.Println(\"[CreateBook] Error inserting into DB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn Book{}, ErrAlreadyExists\n\t\t} else {\n\t\t\treturn Book{}, errors.New(\"Unable to run INSERT against DB: \")\n\t\t}\n\t}\n\n\t// get the id\n\tid, _ := res.LastInsertId()\n\n\t// get the book back\n\tbookToReturn, err := theService.GetBook(bearer, int(id))\n\n\treturn bookToReturn, nil\n}\n\n////////////////\n// UpdateBook\n//\n// returns:\n// error\nfunc (theService bookService) UpdateBook(\n\tbearer string,\n\tbookId int,\n\tauthorId int,\n\tdescription string,\n\tfirstPublishedYear int,\n\tgoodReadsUrl string,\n\timageLarge string,\n\timageMedium string,\n\timageSmall string,\n\tisbns []string,\n\topenlibraryWorkUrl string,\n\tsubjects []string,\n\ttitle string) (Book, error) {\n\n\tfmt.Println(\"[UpdateBook]\")\n\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\treturn Book{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"UPDATE book SET \" +\n\t\t\t\"author_id=COALESCE(NULLIF(?,''),author_id), \" +\n\t\t\t\"year=COALESCE(NULLIF(?,''),year), \" +\n\t\t\t\"title=COALESCE(NULLIF(?,''),title), \" +\n\t\t\t\"isbn=COALESCE(NULLIF(?,''),isbn), \" +\n\t\t\t\"subjects=COALESCE(NULLIF(?,''),subjects), \" +\n\t\t\t\"ol_works=COALESCE(NULLIF(?,''),ol_works), \" +\n\t\t\t\"goodreads_url=COALESCE(NULLIF(?,''),goodreads_url), \" +\n\t\t\t\"description=COALESCE(NULLIF(?,''),description), \" +\n\t\t\t\"image_small=COALESCE(NULLIF(?,''),image_small), \" +\n\t\t\t\"image_medium=COALESCE(NULLIF(?,''),image_medium), \" +\n\t\t\t\"image_large=COALESCE(NULLIF(?,''),image_large) \" +\n\t\t\t\"WHERE book_id = ?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"[UpdateBook] Error preparing DB: \", err)\n\t\treturn Book{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\t// Convert []string to string as csv for database\n\tsubjectsAsCsv := strings.Join(subjects[:], \",\")\n\tisbnsAsCsv := strings.Join(isbns[:], \",\")\n\n\t_, err = stmt.Exec(authorId, firstPublishedYear, title,\n\t\tisbnsAsCsv, subjectsAsCsv, openlibraryWorkUrl, goodReadsUrl, description,\n\t\timageSmall, imageMedium, imageLarge, bookId)\n\n\tif err != nil {\n\t\tfmt.Println(\"[UpdateBook] Error updatingDB for book: \", err)\n\t\treturn Book{}, errors.New(\"Unable to run update against DB for book: \")\n\t}\n\n\t// This next call will save the book into the cache and overwrite existing data\n\n\t// get the book back\n\tbookToReturn, err := theService.GetBook(bearer, bookId)\n\n\treturn bookToReturn, err\n}\n"
},
{
"alpha_fraction": 0.7541974186897278,
"alphanum_fraction": 0.7595701813697815,
"avg_line_length": 52.17856979370117,
"blob_id": "27b2b8f036e87721f4c60fc1b5daf7ea22750ddf",
"content_id": "01a7bbc06f56267375ec26432e732fcca40c4109",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1489,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 28,
"path": "/images/user_book/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# User Book Microservice\n\n## Introduction\n\n**/user_book** is a microservice for creating, listing, updating and deleting *user books* from the database. Each *user book* is a link between a *User* and a *Book*.\n\nFor example, user Bob has two *user books* in his *to read* list. He would create a *user book* for each of those books and add the tag *to read* to each book\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n## Go-kit Application\nThis uses go-kit for the framework and dep for the management of the dependencies (kindof like maven). A *vendor* directory will be created by dep in the *src/github.com/hipposareevil* sub-directory.\n\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [go](https://golang.org/)\n* [go-kit](https://github.com/go-kit/kit) - microservice framework.\n* [dep](https://github.com/golang/dep) - depdendency management tool.\n* [bcrypt](https://godoc.org/golang.org/x/crypto/bcrypt) - encryption library\n"
},
{
"alpha_fraction": 0.6589791178703308,
"alphanum_fraction": 0.6632841229438782,
"avg_line_length": 21.74125862121582,
"blob_id": "95cc503d11e34a3f1a554b7606a7aa5e42f90ba2",
"content_id": "4a3a6c6574cdb636f66e65b67ad5967fd6feccb0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 3252,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 143,
"path": "/images/book/src/github.com/hipposareevil/book/other_services.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n///////////////////\n// Set of functions to make calls to other services\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// Struct returned by /author service\ntype Author struct {\n\tName string `json:\"name\"`\n\tAuthorId int `json:\"id\"`\n}\n\ntype Authors struct {\n\tOffset int `json:\"offset\"`\n\tLimit int `json:\"limit\"`\n\tTotal int `json:\"total\"`\n\tData []Author `json:\"data\"`\n}\n\n////////////\n// Query the /author endpoint for author information by name\n// will return array of IDs\n//\nfunc getAuthorIdsByName(bearer string, authorName string) []int {\n\tvar ids []int\n\n\t// Make request to other service\n\tauthorName = url.QueryEscape(authorName)\n\n\tfullUrl := \"http://author:8080/author?name=\" + authorName\n\tbody, err := makeRequest(bearer, fullUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to /author: \", err)\n\t\treturn ids\n\t}\n\n\t// get author info\n\tauthors := Authors{}\n\tjsonErr := json.Unmarshal(body, &authors)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /author\")\n\t\treturn ids\n\t}\n\n\t// get each author\n\tfor _, current := range authors.Data {\n\t\tids = append(ids, current.AuthorId)\n\t}\n\n\treturn ids\n}\n\n////////////\n// Query the /author endpoint for author name via ID\n//\nfunc getAuthorNameById(cache CacheLayer, bearer string, authorId int) string {\n\t// Check cache\n\tauthorName := cache.Get(AUTHOR_CACHE, authorId)\n\tif len(authorName) > 0 {\n\t\tfmt.Println(\"Got Author name from cache.\")\n\t\treturn authorName\n\t}\n\n\t// no cache\n\n\t// Make request to other service\n\tfullUrl := \"http://author:8080/author/\" + strconv.Itoa(authorId)\n\tbody, err := makeRequest(bearer, fullUrl)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make request to /author: \", err)\n\t\treturn \"\"\n\t}\n\n\t// get author info\n\tauthorBean := Author{}\n\tjsonErr := json.Unmarshal(body, &authorBean)\n\tif jsonErr != nil {\n\t\tfmt.Println(\"Unable to unmarshall response from /author\")\n\t\treturn \"\"\n\t}\n\n\treturn authorBean.Name\n}\n\n// Perform the boilerplate portion of making an http request\n//\n// param:\n// bearer\n// URL to query\n//\nfunc makeRequest(bearer string, queryUrl string) ([]byte, error) {\n\t///////////////\n\t// make client\n\tsuperClient := http.Client{\n\t\tTimeout: time.Second * 2, // Maximum of 2 secs\n\t}\n\n\t// make request object\n\treq, err := http.NewRequest(http.MethodGet, queryUrl, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to make new request to url: \", queryUrl, \" error: \", err)\n\t\treturn nil, err\n\t}\n\n\t// set headers\n\treq.Header.Set(\"User-Agent\", \"review-service-client\")\n\treq.Header.Set(\"accept\", \"application/json\")\n\treq.Header.Set(\"content-type\", \"application/json\")\n\treq.Header.Set(\"authorization\", \"Bearer \"+bearer)\n\n\t// send request\n\tres, getErr := superClient.Do(req)\n\tif getErr != nil {\n\t\tfmt.Println(\"Unable to make send request to url: \", queryUrl, \" error: \", getErr)\n\t\treturn nil, err\n\t}\n\n\t// Check status code\n\tif !strings.Contains(res.Status, \"200\") {\n\t\tfmt.Println(\"Unable to connect to url: \", queryUrl, \" HTTP status: \", res.Status)\n\t\treturn nil, err\n\t}\n\n\t// parse body\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif readErr != nil {\n\t\tfmt.Println(\"Unable to parse response from url: \", queryUrl, \" :\", readErr)\n\t\treturn nil, err\n\t}\n\n\treturn body, nil\n\n}\n"
},
{
"alpha_fraction": 0.6541353464126587,
"alphanum_fraction": 0.6760651469230652,
"avg_line_length": 26.05084800720215,
"blob_id": "43f809ee06753e3a1d59020a79d5ae510941d3f9",
"content_id": "a33ce1f8116509b32a4906326539a321cb5e55a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Gradle",
"length_bytes": 1596,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 59,
"path": "/images.java/query/build.gradle",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "buildscript {\n repositories {\n mavenCentral()\n }\n dependencies {\n classpath(\"org.springframework.boot:spring-boot-gradle-plugin:1.4.2.RELEASE\")\n }\n}\n\napply plugin: 'java'\napply plugin: 'eclipse'\napply plugin: 'idea'\napply plugin: 'org.springframework.boot'\n\njar {\n baseName = 'query'\n version = '1.0'\n}\n\nrepositories {\n mavenCentral()\n}\n\nsourceCompatibility = 1.8\ntargetCompatibility = 1.8\n\ndependencies {\n // tag::jetty[]\n compile(\"org.springframework.boot:spring-boot-starter-web\") {\n exclude module: \"spring-boot-starter-tomcat\"\n }\n compile(\"org.springframework.boot:spring-boot-starter-jetty\")\n // end::jetty[]\n \n // tag::actuator[]\n compile(\"org.springframework.boot:spring-boot-starter-actuator\")\n // end::actuator[]\n testCompile(\"junit:junit\")\n\n // wpff common\n compile fileTree(dir: './lib/', include: ['mybooks_common*.jar'])\n\n // REST\n compile(\"org.springframework.boot:spring-boot-starter-web\")\n\n // google api\n compile(\"com.google.apis:google-api-services-books:v1-rev57-1.19.0\")\n compile(\"com.google.http-client:google-http-client-jackson2:1.19.0\")\n\n // Swagger\n compile(\"io.springfox:springfox-swagger-ui:2.2.2\")\n compile(\"io.springfox:springfox-swagger2:2.2.2\")\n\n // https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils\n compile group: 'commons-beanutils', name: 'commons-beanutils', version: '1.8.3'\n\n // https://mvnrepository.com/artifact/org.apache.commons/commons-lang3\n compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.0'\n}\n"
},
{
"alpha_fraction": 0.6608981490135193,
"alphanum_fraction": 0.661555290222168,
"avg_line_length": 20.533018112182617,
"blob_id": "7c3d0efaf9f2aa1ecba3b86b2237d950a2d9928d",
"content_id": "56168540a7048c8d0da4d1f5c631533c9282b524",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 4565,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 212,
"path": "/images/tag/src/github.com/hipposareevil/tag/cache.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// Cache layer\n\nimport (\n\t\"fmt\"\n\t\"strconv\"\n\n\t\"time\"\n\n\t\"github.com/mediocregopher/radix.v2/pool\"\n\t_ \"github.com/mediocregopher/radix.v2/redis\"\n)\n\n// Cache layer\ntype CacheLayer interface {\n\t// Set a key/value for a namespace\n\tSet(string, int, string)\n\n\t// Set a key/(byte)value for a namespace\n\tSetBytes(string, int, []byte)\n\n\t// Set multiple k/vs\n\tSetMultiple(string, map[int]string)\n\n\t// Get a key/value from the cache\n\tGet(string, int) string\n\n\t// Get a key/value from the cache\n\tGetBytes(string, int) []byte\n\n\t// Clear one key in the namespace\n\tClear(string, int)\n\n\t// Clear all k/v from namespace\n\tClearAll(string)\n}\n\n// actual service\ntype cacheLayer struct {\n\tredisPool *pool.Pool\n}\n\n////////////\n// Set a value in the cache\n//\n// params:\n// namespace Namespace for k/v\n// key Key to store\n// value Value to store\nfunc (theCache cacheLayer) Set(namespace string, key int, value string) {\n\tstart := time.Now()\n\n\tconn, err := theCache.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get Redis for setting cache: \", err)\n\t\treturn\n\t}\n\tdefer theCache.redisPool.Put(conn)\n\n\t// convert key to string\n\tkeyAsString := strconv.Itoa(key)\n\n\t// Set a value\n\terr = conn.Cmd(\"HSET\", namespace, key, value).Err\n\tif err != nil {\n\t\tfmt.Println(\"Unable to set cache for \"+namespace+\".\"+keyAsString+\": \", err)\n\t}\n\n\tt := time.Now()\n\telapsed := t.Sub(start)\n\tfmt.Println(\"cache.Set: \", elapsed)\n}\n\n////////////\n// Set a value in the cache\n//\n// params:\n// namespace Namespace for k/v\n// key Key to store\n// value Byte Value to store\nfunc (theCache cacheLayer) SetBytes(namespace string, key int, value []byte) {\n\tvalueAsString := string(value[:])\n\ttheCache.Set(namespace, key, valueAsString)\n}\n\n////////////\n// Set a value in the cache\n//\n// params:\n// namespace: Namespace for k/v\n// kvMap: array of key values\nfunc (theCache cacheLayer) SetMultiple(namespace string, kvMap map[int]string) {\n\tstart := time.Now()\n\n\tconn, err := theCache.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get Redis for setting cache: \", err)\n\t\treturn\n\t}\n\tdefer theCache.redisPool.Put(conn)\n\n\t// Iterate over k/v map\n\tfor key, value := range kvMap {\n\t\t// convert key to string\n\t\tkeyAsString := strconv.Itoa(key)\n\n\t\t// Set a value\n\t\terr = conn.Cmd(\"HSET\", namespace, key, value).Err\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Unable to set cache for \"+namespace+\".\"+keyAsString+\": \", err)\n\t\t}\n\t}\n\n\tt := time.Now()\n\telapsed := t.Sub(start)\n\tfmt.Println(\"cache.SetMultiple: \", elapsed)\n}\n\n////////////\n// Clear a value in the cache\n//\n// params:\n// namespace Namespace for k/v\n// key Key to clear\nfunc (theCache cacheLayer) Clear(namespace string, key int) {\n\tconn, err := theCache.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get Redis for clearing cache: \", err)\n\t\treturn\n\t}\n\tdefer theCache.redisPool.Put(conn)\n\n\t// convert key to string\n\tkeyAsString := strconv.Itoa(key)\n\n\t// clear\n\terr = conn.Cmd(\"HDEL\", namespace, key).Err\n\tif err != nil {\n\t\tfmt.Println(\"Unable to delete cache for \"+namespace+\".\"+keyAsString+\": \", err)\n\t}\n}\n\n////////////\n// Clear all values in the cache for a namespace\n//\n// params:\n// namespace Namespace for k/v\nfunc (theCache cacheLayer) ClearAll(namespace string) {\n\tconn, err := theCache.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get Redis for clearing cache: \", err)\n\t\treturn\n\t}\n\tdefer theCache.redisPool.Put(conn)\n\n\t// clear\n\terr = conn.Cmd(\"DEL\", namespace).Err\n\tif err != nil {\n\t\tfmt.Println(\"Unable to delete cache for \"+namespace+\": \", err)\n\t}\n}\n\n////////////\n// Get a byte array value from the cache\n//\n// params:\n// namespace Namespace for k/v\n// key Key to retrieve\n//\n// returns:\n// value, or empty byte array if none exists\nfunc (theCache cacheLayer) GetBytes(namespace string, key int) []byte {\n\tstringValue := theCache.Get(namespace, key)\n\tvar byteValue []byte\n\n\tif len(stringValue) > 0 {\n\t\tbyteValue = []byte(stringValue)\n\t}\n\n\treturn byteValue\n}\n\n////////////\n// Get a value from the cache\n//\n// params:\n// namespace Namespace for k/v\n// key Key to retrieve\n//\n// returns:\n// value, or \"\" if none exists\nfunc (theCache cacheLayer) Get(namespace string, key int) string {\n\tconn, err := theCache.redisPool.Get()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get Redis for getting cache: \", err)\n\t\treturn \"\"\n\t}\n\tdefer theCache.redisPool.Put(conn)\n\n\t// convert key to string\n\tkeyAsString := strconv.Itoa(key)\n\n\t// Set a value\n\tvalue, err := conn.Cmd(\"HGET\", namespace, key).Str()\n\tif err != nil {\n\t\tfmt.Println(\"Unable to get cache for \"+namespace+\".\"+keyAsString+\": \", err)\n\t\tvalue = \"\"\n\t}\n\n\treturn value\n}\n"
},
{
"alpha_fraction": 0.6561368107795715,
"alphanum_fraction": 0.6569416522979736,
"avg_line_length": 24.228425979614258,
"blob_id": "371f9a6c0acaf1b7f81e38805c59323eefb2561f",
"content_id": "07c4590d8c42f3022aef1bf1cdba1e5f4c94f343",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 4970,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 197,
"path": "/images.java/author/src/main/java/com/wpff/resources/AuthorHelper.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.resources;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\nimport javax.ws.rs.NotFoundException;\n\nimport org.apache.commons.beanutils.BeanUtils;\n\nimport com.wpff.common.cache.Cache;\nimport com.wpff.common.result.Segment;\nimport com.wpff.core.Author;\nimport com.wpff.db.AuthorDAO;\nimport com.wpff.query.AuthorQuery;\n\nimport io.dropwizard.hibernate.UnitOfWork;\n\n/**\n * Helper for managing DAO\n *\n */\npublic class AuthorHelper {\n \n /**\n * Author DAO\n */\n private final AuthorDAO authorDAO;\n\n /**\n * Redis cache\n */\n private final Cache cache;\n \n /**\n * Create new helper\n * \n * @param authorDao\n * DAO used by helper\n * @param cache\n * Redis cache\n */\n public AuthorHelper(AuthorDAO authorDao, Cache cache) {\n this.authorDAO= authorDao;\n this.cache = cache;\n \n this.cache.clear(\"author.name\");\n }\n \n /**\n * Helper to convert a list into a csv of those values\n * \n * @param values\n * @return the list of values as a CSV string\n */\n static String convertListToCsv(List<String> values) {\n String csvString = \"\";\n if (values != null) {\n for (String s : values) {\n csvString += s + \",\";\n }\n // trim last comma\n csvString = csvString.substring(0, csvString.length()); \n }\n return csvString;\n }\n \n\n /**\n * Look for author by incoming id. If returned Author is null, throw 404.\n * \n * @param id\n * ID of author to look for\n * @return Author in database\n */\n @UnitOfWork\n Author findById(int id) {\n return authorDAO.findById(id).orElseThrow(() -> new NotFoundException(\"No author by id \" + id));\n }\n \n /**\n * Get all authors\n * \n * @param desiredSegment\n * offset and limit for the query\n * @return list of Authors\n */\n @UnitOfWork\n List<Author> findAll(Segment desiredSegment) {\n return authorDAO.findAll(desiredSegment);\n }\n \n /**\n * Get total number of authors\n * @return Number of authors\n */\n \t@UnitOfWork\n long getTotalNumberAuthors() {\n \t return authorDAO.getNumberOfAuthors();\n }\n\n /**\n * Get set of Authors by name\n * \n * @param authorQuery\n * author name\n * @param desiredSegment\n * offset and limit for the query \n * @returnlist of Authors\n */\n @UnitOfWork\n List<Author> findByName(String authorQuery, Segment desiredSegment) {\n return authorDAO.findByName(authorQuery, desiredSegment);\n }\n\n /**\n * Create an author in the database\n * \n * @param authorToCreate\n * what to create\n * @return author in db\n */\n @UnitOfWork\n Author createAuthor(Author authorToCreate) {\n Author author = this.authorDAO.create(authorToCreate);\n System.out.println(\"Created author: \" + author.getId());\n return author;\n }\n \n /**\n * Update an Author in the database\n * \n * @param authorBean\n * Author to update in database\n * @param authorId\n * Author's ID\n * @return Updated author from database\n * @throws InvocationTargetException\n * @throws IllegalAccessException\n */\n @UnitOfWork\n Author updateAuthor(AuthorQuery authorBean, int authorId) \n throws IllegalAccessException, InvocationTargetException {\n Author authorToUpdate = authorDAO.findById(authorId).orElseThrow(() -> new NotFoundException(\"No author by id \" + authorId));\n \n // Copy over non null values\n copyProperty(authorToUpdate, \"name\", authorBean.getName());\n copyProperty(authorToUpdate, \"birthDate\", authorBean.getBirthDate());\n copyProperty(authorToUpdate, \"olKey\", authorBean.getOlKey());\n copyProperty(authorToUpdate, \"goodreads_url\", authorBean.getGoodreadsUrl());\n copyProperty(authorToUpdate, \"imageSmall\", authorBean.getImageSmall());\n copyProperty(authorToUpdate, \"imageLarge\", authorBean.getImageLarge());\n copyProperty(authorToUpdate, \"imageMedium\", authorBean.getImageMedium());\n \n // Make subjects in DB a CSV string\n authorToUpdate.setSubjectsAsCsv(convertListToCsv(authorBean.getSubjects()));\n \n this.authorDAO.update(authorToUpdate);\n \n // Clear the cache\n this.cache.clear(\"author.name\", authorId);\n return authorToUpdate;\n }\n\n \n /**\n * Delete an author by ID\n * \n * @param authorId\n * ID of author to delete\n */\n @UnitOfWork\n void deleteAuthor(int authorId) {\n Author deleteMe = this.findById(authorId);\n \n // Clear the cache\n this.cache.clear(\"author.name\", authorId);\n \n this.authorDAO.delete(deleteMe);\n }\n \n \n /**\n * Copy non null property\n * \n * @param destination\n * @param field\n * @param value\n * @throws InvocationTargetException \n * @throws IllegalAccessException \n */\n private static void copyProperty(Object destination, String field, Object value) \n throws IllegalAccessException, InvocationTargetException {\n if (value != null) {\n BeanUtils.copyProperty(destination, field, value);\n }\n }\n}\n"
},
{
"alpha_fraction": 0.6848101019859314,
"alphanum_fraction": 0.6848101019859314,
"avg_line_length": 18.268293380737305,
"blob_id": "4b2a0167eb8fa2937a2807daedf093f1cf68322d",
"content_id": "2c2dd6d2de563fad739d43b9a0d4f4c2b7772e62",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 790,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 41,
"path": "/images.java/query/src/main/java/wpff/openlibrary/beans/TitleDocs.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package wpff.openlibrary.beans;\n\nimport java.util.List;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\n\n/**\n * Wrapper for Titles when parsing data from openlibrary:\n * 'https://openlibrary.org/search?title=X&author=Y'\n * The returned json starts with:\n * <pre>\n * {\n \"docs\": [\n {\n \"title_suggest\": \".....\n * </pre>\n * So this class gets us to the array of Titles\n */\n@JsonIgnoreProperties(ignoreUnknown=true)\npublic class TitleDocs {\n\tList<OpenLibraryTitle> doc;\n\t\n\tint numFound;\n\n\tpublic int getNumFound() {\n\t\treturn numFound;\n\t}\n\n\tpublic void setNumFound(int numFound) {\n\t\tthis.numFound = numFound;\n\t}\n\n\t\n\tpublic List<OpenLibraryTitle> getDocs() {\n\t\treturn doc;\n\t}\n\n\tpublic void setDocs(List<OpenLibraryTitle> titles) {\n\t\tthis.doc = titles;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6472222208976746,
"alphanum_fraction": 0.6476389169692993,
"avg_line_length": 21.712934494018555,
"blob_id": "09e4a0838b5c5cdcf35645265d5a536bbe08696e",
"content_id": "6fe84842debe164ad23fc33bed6dad0399256aaa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Go",
"length_bytes": 7200,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 317,
"path": "/images/user/src/github.com/hipposareevil/user/service.go",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package main\n\n// User service\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\n\t\"errors\"\n\n\t\"database/sql\"\n\t_ \"github.com/go-sql-driver/mysql\"\n\n\t// password encryption\n\t\"golang.org/x/crypto/bcrypt\"\n)\n\n// Service interface exposed to clients\ntype UserService interface {\n\t// GetUsers: offset, limit\n\tGetUsers(int, int) (Users, error)\n\n\t// GetUser: id\n\tGetUser(int) (User, error)\n\n\t// DeleteUser: id\n\tDeleteUser(int) error\n\n\t// CreateUser\n\t// name, usergroup, data, password\n\tCreateUser(string, string, string, string) (User, error)\n\n\t// UpdateUser\n\t// id, name, usergroup, data, password\n\tUpdateUser(int, string, string, string, string) error\n}\n\n////////////////////////\n// Actual service\n// This takes the following:\n// - mysqlDb DB for MySQL\ntype userService struct {\n\tmysqlDb *sql.DB\n}\n\n//////////\n// METHODS on userService\n\n////////////////\n// Get User\n//\n// params:\n// bearer: Authorization bearer\n// userId : ID of user to get\n//\n// returns:\n// user\n// error\nfunc (theService userService) GetUser(userId int) (User, error) {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"got ping error: \", err)\n\t\treturn User{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Make query\n\tvar user User\n\t// Scan the DB info into 'user' variable\n\terr := theService.mysqlDb.\n\t\tQueryRow(\"SELECT user_id, name, user_group, data FROM user WHERE user_id = ?\", userId).\n\t\tScan(&user.Id, &user.Name, &user.UserGroup, &user.Data)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn User{}, ErrNotFound\n\tcase err != nil:\n\t\tfmt.Println(\"Got error from select: \", err)\n\t\treturn User{}, ErrServerError\n\tdefault:\n\t\tfmt.Println(\"got user!\", user)\n\t}\n\n\treturn user, nil\n}\n\n////////////////\n// Get users\n//\n// params:\n// offset : offset into list\n// limit : number of items to get from list\n//\n// returns:\n// users\n// error\nfunc (theService userService) GetUsers(offset int, limit int) (Users, error) {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"got ping error: \", err)\n\t\treturn Users{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Get total number of rows\n\tvar totalNumberOfRows int\n\t_ = theService.mysqlDb.QueryRow(\"SELECT COUNT(*) from user\").Scan(&totalNumberOfRows)\n\n\tif limit > totalNumberOfRows {\n\t\tlimit = totalNumberOfRows\n\t}\n\n\t// Make query\n\tresults, err := theService.mysqlDb.\n\t\tQuery(\"SELECT user_id, name, user_group, data FROM user LIMIT ?,? \", offset, limit)\n\tif err != nil {\n\t\tfmt.Println(\"Got error from mysql: \" + err.Error())\n\t\treturn Users{}, errors.New(\"unable to query mysql\")\n\t}\n\n\t// slice of User entities\n\tdatum := make([]User, 0, 0)\n\n\t// Parse results\n\tfor results.Next() {\n\t\tvar user User\n\t\t// For each row, scan the result into our user composite object:\n\t\t// user_id, name\n\t\terr = results.Scan(&user.Id, &user.Name, &user.UserGroup, &user.Data)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Got error from mysql: \" + err.Error())\n\t\t\treturn Users{}, errors.New(\"Unable to query mysql\")\n\t\t}\n\t\tdatum = append(datum, user)\n\t}\n\n\t// Create Users to return\n\treturnValue := Users{\n\t\tOffset: offset,\n\t\tLimit: limit,\n\t\tTotal: totalNumberOfRows,\n\t\tData: datum,\n\t}\n\n\treturn returnValue, nil\n}\n\n////////////////\n// Delete user\n//\n// params:\n// userId : ID of user to delete\n//\n// returns:\n// error\nfunc (theService userService) DeleteUser(userId int) error {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"got ping error: \", err)\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// Verify the user exists, if not, throw ErrNotFound\n\t_, getErr := theService.GetUser(userId)\n\tif getErr != nil {\n\t\treturn getErr\n\t}\n\n\t// Make DELETE query\n\t_, err := theService.mysqlDb.Exec(\"DELETE FROM user WHERE user_id = ?\", userId)\n\n\treturn err\n}\n\n////////////////\n// CreateUser\n//\n// params:\n// name: name of new user\n// userGroup\n// data\n// password (unencrypted at this point)\n//\n// returns:\n// user\n// error\nfunc (theService userService) CreateUser(userName string, userGroup string, data string, password string) (User, error) {\n\t////////////////////\n\t// verify mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"got ping error: \", err)\n\t\treturn User{}, errors.New(\"unable to ping mysql\")\n\t}\n\n\t// encrypt password\n\tencryptedPassword, err := encrypt(password)\n\tif err != nil {\n\t\tfmt.Println(\"Unable to encrypt password\")\n\t\treturn User{}, errors.New(\"Unable to encrypt password\")\n\t}\n\n\t// Make insert\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"INSERT INTO user SET name=?, user_group=?, data=?, password=?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn User{}, errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\tres, err := stmt.Exec(userName, userGroup, data, encryptedPassword)\n\tif err != nil {\n\t\tfmt.Println(\"Error inserting into DB: \", err)\n\t\tif strings.Contains(err.Error(), \"Duplicate entry \") {\n\t\t\treturn User{}, ErrAlreadyExists\n\t\t} else {\n\t\t\treturn User{}, errors.New(\"Unable to run INSERT against DB: \")\n\t\t}\n\t}\n\n\t// get the id\n\tid, _ := res.LastInsertId()\n\n\t// Create user\n\tvar user User\n\tuser = User{\n\t\tId: int(id),\n\t\tName: userName,\n\t\tData: data,\n\t\tUserGroup: userGroup,\n\t}\n\n\treturn user, nil\n}\n\n////////////////\n// UpdateUser\n//\n// params:\n// userId: id of user to update\n// userName: new name of user\n// userGroup\n// data\n// password (unencrypted at this point)\n//\n// returns:\n// error\nfunc (theService userService) UpdateUser(userId int, userName string, userGroup string, data string, password string) error {\n\t////////////////////\n\t// Get data from mysql\n\t// mysql\n\tif err := theService.mysqlDb.Ping(); err != nil {\n\t\ttheService.mysqlDb.Close()\n\t\tfmt.Println(\"got ping error: \", err)\n\t\treturn errors.New(\"unable to ping mysql\")\n\t}\n\n\t// encrypt password\n\tencryptedPassword, err := encrypt(password)\n\tif err != nil {\n\t\treturn errors.New(\"Unable to encrypt password\")\n\t}\n\n\t// change empty to null\n\tif len(password) <= 0 {\n\t\tencryptedPassword = \"\"\n\t}\n\n\t// Make query\n\tstmt, err := theService.mysqlDb.\n\t\tPrepare(\"UPDATE user SET \" +\n\t\t\t\"name=COALESCE(NULLIF(?,''),name), \" +\n\t\t\t\"data=COALESCE(NULLIF(?,''),data), \" +\n\t\t\t\"user_group=COALESCE(NULLIF(?,''),user_group), \" +\n\t\t\t\"password=COALESCE(NULLIF(?,''),password) \" +\n\t\t\t\"WHERE user_id = ?\")\n\tdefer stmt.Close()\n\tif err != nil {\n\t\tfmt.Println(\"Error preparing DB: \", err)\n\t\treturn errors.New(\"Unable to prepare a DB statement: \")\n\t}\n\n\t_, err = stmt.Exec(userName, data, userGroup, encryptedPassword, userId)\n\tif err != nil {\n\t\tfmt.Println(\"Error updatingDB: \", err)\n\t\treturn errors.New(\"Unable to run update against DB: \")\n\t}\n\n\treturn nil\n}\n\n// Encrypt the incoming plaintext password.\n// Returns a new string w/ encrypted text.\nfunc encrypt(password string) (string, error) {\n\tasByte := []byte(password)\n\n\tencrypted, err := bcrypt.GenerateFromPassword(asByte, bcrypt.MinCost)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error encrypting password.\")\n\t\tfmt.Println(err)\n\t\treturn \"\", err\n\t}\n\n\tencryptedPassword := string(encrypted[:])\n\n\treturn encryptedPassword, nil\n}\n"
},
{
"alpha_fraction": 0.7382592558860779,
"alphanum_fraction": 0.7470256686210632,
"avg_line_length": 40.97368240356445,
"blob_id": "90444094031c36624e26bc16fb037ec25f7b5417",
"content_id": "5748397123f5ca0d20aaaa6c00d919dfbe1fdb95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1597,
"license_type": "no_license",
"max_line_length": 324,
"num_lines": 38,
"path": "/images.java/user_book/README.md",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "# User Book Microservice\n\n## Introduction\n\n**/user_book** is a microservice for creating, listing, updating and deleting *user books* from the database. Each *user book* is a link between a *User* and a *Book*.\n\nFor example, user Bob has two *user books* in his *to read* list. He would create a *user book* for each of those books and add the tag *to read* to each book.\n\n\n## Supported calls\nThe list of supported calls and their documentation are available via the swagger endpoint. This runs on localhost:8080/swagger/ when the application is up.\n\n## Fields for a User Book\nA User Book entry has the following fields:\n\nField | Purpose\n--- | ---\nid | Unique ID of the user book. \nrating | 1 for thumbs up, 0 for thumbs down.\ntags | list of Tags for this user book.\nbookId | ID of Book. Used in the */book* endpoint.\nuserId | ID of the User. Used in the */user* endpoint.\ndata | Optional metadata from the user, e.g. notes on the book.\n\n## Authorization\nIt is necessary to authorize all REST calls to this endpoint. This is done by obtaining an authorization token from the */authorize* endpoint and adding it to the HTTP headees with the key *AUTHORIZATION*. See [/authorize](https://github.com/hipposareevil/books/blob/master/images/authorize/README.md) for more information.\n\n\n## Dropwizard Application\nThe application listens on port 8080.\n\n## Docker \nThe Docker container will expose port 8080 to other containers on the *booknet* Docker network.\n\n## Libraries used\n\n* [dropwizard](http://www.dropwizard.io/) for microservice framework.\n* [maven](https://maven.apache.org/) for building.\n\n\n"
},
{
"alpha_fraction": 0.6449784636497498,
"alphanum_fraction": 0.6449784636497498,
"avg_line_length": 19.612150192260742,
"blob_id": "b7b4d9a9fdc6003494a92fa4642779c5363e66ca",
"content_id": "a4bbd9a4a3735176bc6608719d0c4e04806c6a2e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Java",
"length_bytes": 4411,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 214,
"path": "/images.java/author/src/main/java/com/wpff/core/Author.java",
"repo_name": "hipposareevil/books",
"src_encoding": "UTF-8",
"text": "package com.wpff.core;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.GenerationType;\nimport javax.persistence.Id;\nimport javax.persistence.Table;\n\n\n/***\n * Represents an author. This is marshalled to/from the database.\n */\n@Entity\n@Table(name = \"author\")\npublic class Author implements Comparable {\n @Id\n @GeneratedValue(strategy = GenerationType.IDENTITY)\n @Column(name = \"author_id\", unique=true, nullable = false)\n private int id;\n\n @Column(name = \"name\", unique=true, nullable = false)\n private String name;\n\n @Column(name = \"birth_date\", unique=true, nullable = false)\n private String birthDate;\n\n // small image URL\n @Column(name = \"image_small\", unique=false, nullable = true)\n private String imageSmall;\n\n // medium image URL\n @Column(name = \"image_medium\", unique=false, nullable = true)\n private String imageMedium;\n\n // large image URL\n @Column(name = \"image_large\", unique=false, nullable = true)\n private String imageLarge;\n\n // openlibrary.org 'author key' location\n @Column(name = \"ol_key\", unique=false, nullable = true)\n private String olKey;\n\n // goodreads URL for author\n @Column(name = \"goodreads_url\", unique=false, nullable = true)\n private String goodreadsUrl;\n\n // openlibrary.org 'author key' location\n @Column(name = \"subjects\", unique=false, nullable = true)\n private String subjectsAsCsv;\n\n //////////////////////////////////\n \n /**\n * @return the id\n */\n public int getId() {\n return id;\n }\n\n /**\n * @param id the id to set\n */\n public void setId(int id) {\n this.id = id;\n }\n\n /**\n * @return the name\n */\n public String getName() {\n return name;\n }\n\n /**\n * @param name the name to set\n */\n public void setName(String name) {\n this.name = name;\n }\n\n /**\n * @return the birthDate\n */\n public String getBirthDate() {\n return birthDate;\n }\n\n /**\n * @param birthDate the birthDate to set\n */\n public void setBirthDate(String birthDate) {\n this.birthDate = birthDate;\n }\n\n /**\n * @return the imageSmall\n */\n public String getImageSmall() {\n return imageSmall;\n }\n\n /**\n * @param imageSmall the imageSmall to set\n */\n public void setImageSmall(String imageSmall) {\n this.imageSmall = imageSmall;\n }\n\n /**\n * @return the imageMedium\n */\n public String getImageMedium() {\n return imageMedium;\n }\n\n /**\n * @param imageMedium the imageMedium to set\n */\n public void setImageMedium(String imageMedium) {\n this.imageMedium = imageMedium;\n }\n\n /**\n * @return the imageLarge\n */\n public String getImageLarge() {\n return imageLarge;\n }\n\n /**\n * @param imageLarge the imageLarge to set\n */\n public void setImageLarge(String imageLarge) {\n this.imageLarge = imageLarge;\n }\n\n /**\n * @return the olKey\n */\n public String getOlKey() {\n return olKey;\n }\n\n /**\n * @param olKey the olKey to set\n */\n public void setOlKey(String olKey) {\n this.olKey = olKey;\n }\n\n @Override\n public int compareTo(Object o) {\n Author other = (Author) o;\n return Integer.compare(this.id, other.id);\n }\n\n /**\n * @return the subjects\n */\n public String getSubjectsAsCsv() {\n return subjectsAsCsv;\n }\n\n /**\n * @param subjects the subjects to set\n */\n public void setSubjectsAsCsv(String subjects) {\n this.subjectsAsCsv = subjects;\n }\n\n /* (non-Javadoc)\n * @see java.lang.Object#toString()\n */\n @Override\n public String toString() {\n StringBuilder builder = new StringBuilder();\n builder.append(\"Author [id=\");\n builder.append(id);\n builder.append(\", name=\");\n builder.append(name);\n builder.append(\", birthDate=\");\n builder.append(birthDate);\n builder.append(\", imageSmall=\");\n builder.append(imageSmall);\n builder.append(\", imageMedium=\");\n builder.append(imageMedium);\n builder.append(\", imageLarge=\");\n builder.append(imageLarge);\n builder.append(\", olKey=\");\n builder.append(olKey);\n builder.append(\", subjectsAsCsv=\");\n builder.append(subjectsAsCsv);\n builder.append(\"]\");\n return builder.toString();\n }\n\n /**\n * @return the goodreadsUrl\n */\n public String getGoodreadsUrl() {\n return goodreadsUrl;\n }\n\n /**\n * @param goodreadsUrl the goodreadsUrl to set\n */\n public void setGoodreadsUrl(String goodreadsUrl) {\n this.goodreadsUrl = goodreadsUrl;\n }\n\n\n\n}\n"
}
] | 129 |
PWalis/Music-Segmenter | https://github.com/PWalis/Music-Segmenter | 3272df56752feabf2491ea3cdbca434ad1ab48a7 | ad3017afd8c837a765ae8627524745743a81fd1d | 1630fe86f04f3ffb5107ab37b264467ad1a7a4df | refs/heads/main | 2023-02-26T07:38:59.251401 | 2021-01-29T22:40:49 | 2021-01-29T22:40:49 | 324,621,752 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6721375584602356,
"alphanum_fraction": 0.6834701299667358,
"avg_line_length": 38.38461685180664,
"blob_id": "d517dbc7a507db86b8914fd840ab17e02300b76e",
"content_id": "51898b16b3e5df3787a1139aac58427b92e987d5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2561,
"license_type": "permissive",
"max_line_length": 147,
"num_lines": 65,
"path": "/python/spotify_api.py",
"repo_name": "PWalis/Music-Segmenter",
"src_encoding": "UTF-8",
"text": "import spotipy\nimport sys\nimport random\nfrom decouple import config\nfrom spotipy.oauth2 import SpotifyOAuth\nimport json\n\n# Scope allows for different permisions as to what the API can access https://developer.spotify.com/documentation/general/guides/scopes/#streaming \n# Scope uses split() method on string so you can just add scopes to a single string with a space between them for multiple scopes\nscope = \"user-library-read user-read-currently-playing user-modify-playback-state\"\nclient_id=config('SPOTIPY_CLIENT_ID')\nclient_secret=config('SPOTIPY_CLIENT_SECRET')\nredirect_uri=config('SPOTIPY_REDIRECT_URI')\n\n# Instantiating Spotify API object and passing appropriate perameters \nsp = spotipy.Spotify(auth_manager=SpotifyOAuth(scope=scope, client_id=client_id,\n client_secret=client_secret, redirect_uri=redirect_uri))\n\ndef get_saved_list():\n '''Retreive current top 20 saved songs and print arist and song name'''\n results = sp.current_user_saved_tracks()\n for idx, item in enumerate(results['items']):\n track = item['track']\n print(idx, track['artists'][0]['name'], \" – \", track['name'])\n\ndef get_current_song():\n '''Retreive current playing song and printing the artist and song name'''\n results = sp.current_user_playing_track()\n track = results['item']\n print(track['artists'][0]['name'], ' - ', f\"'{track['name']}'\")\n\ndef set_song_random():\n '''Sets current playback song to a random song from saved list(top 20)'''\n uri = get_random_song()\n sp.add_to_queue(uri=uri)\n sp.next_track()\n\ndef get_random_song():\n '''Returns random song ID from top 20 recent saved songs'''\n results = sp.current_user_saved_tracks()\n song_id = results['items'][random.randint(0,19)]['track']['id']\n return song_id\n\ndef current_track_time():\n ''' Returns tuple (progress_ms, duration_ms) of current track '''\n results = sp.current_user_playing_track()\n with open('song_data.json', 'w') as file:\n json.dump(results, file, indent=4)\n return (results['progress_ms'], results['item']['duration_ms'])\n\ndef ms_to_time(ms):\n ''' Convers ms to (minute:seconds) fromat as str '''\n total_seconds = ms//1000\n minutes = total_seconds//60\n seconds = total_seconds % 60\n return f'{minutes}:{seconds}'\n\ndef seek_back(seconds=10):\n '''' Seek will go back the an amount of seconds based on \"seconds\" parameter'''\n seconds_in_ms = seconds * 1000\n ms = current_track_time()[0] - seconds_in_ms\n sp.seek_track(ms)\n\nif __name__ == '__main__':\n globals()[sys.argv[1]]()"
},
{
"alpha_fraction": 0.7198795080184937,
"alphanum_fraction": 0.7259036302566528,
"avg_line_length": 29.090909957885742,
"blob_id": "04863fde343e44fc22dfe224782b5735fad62844",
"content_id": "c13b13857d219dff584ac7151f8b28da52adf9d8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 332,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 11,
"path": "/README.md",
"repo_name": "PWalis/Music-Segmenter",
"src_encoding": "UTF-8",
"text": "# Music-Rewind-Anytime\n## Looks at a users listening history and creates an up to date rewind \n### Features\n- Listening time\n - average time listend per day, week, month,\n - Min time listend monthly\n - Max time listend monthly\n- Top 5 genres\n- Most listend song\n- Exploration score\n- Top 5 Artists for selected time frame\n\n"
},
{
"alpha_fraction": 0.6864988803863525,
"alphanum_fraction": 0.6864988803863525,
"avg_line_length": 23.33333396911621,
"blob_id": "9d0de197be82a0deb4efd622503f053b21db9138",
"content_id": "9131b632eca7329bdbbfb9bcabecdf3b70ef9762",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 437,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 18,
"path": "/python/app.py",
"repo_name": "PWalis/Music-Segmenter",
"src_encoding": "UTF-8",
"text": "from flask import Flask\nfrom spotify_api import set_song_random, seek_back\n\napp = Flask(__name__)\n\[email protected]('/')\ndef hello_world():\n return 'Hello, world!'\n\[email protected]('/random')\ndef random_song():\n set_song_random()\n return 'You are now listening to a random song from your saved list'\n\[email protected]('/back/<seconds>')\ndef back_seconds(seconds):\n seek_back(int(seconds))\n return f'You gone back in time {seconds} seconds'"
}
] | 3 |
pmendoza94/poke | https://github.com/pmendoza94/poke | d2ea1287d706e16091cae6dd103ab61a23d00c34 | a7d7d7d5171a84ed6dccb18b2384222724ca958e | 1bc5dc21ca761bb6261b9620eae8c129b02210fb | refs/heads/master | 2021-01-01T19:28:19.585999 | 2017-07-28T01:55:03 | 2017-07-28T01:55:03 | 98,596,432 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6407563090324402,
"alphanum_fraction": 0.6418067216873169,
"avg_line_length": 38.66666793823242,
"blob_id": "7dcdf7af833c462a9cc0730d6a6423b07c8c2fb0",
"content_id": "7aff5ffedf3dbe3a990025e00a2178b34d75c76e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 952,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 24,
"path": "/apps/second_app/models.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "from __future__ import unicode_literals\nfrom django.db import models\nfrom ..first_app.models import User\nfrom django.contrib import messages\n\n# Create your models here.\nclass PokeManager(models.Manager):\n def createPoker(self, postData):\n results = {'status': True, 'poke': None}\n poke = []\n if results['status'] == True:\n print postData(['user_id']), '***********'\n userInt = int(postData['user_id'])\n user = User.objects.get(id = userInt)\n results['poke'] = Poke.objects.create(name = postData['name'], alias = postData['alias'], email = postData['email'], poker = user)\n return results\n\nclass Poke(models.Model):\n poke = models.ManyToManyField(User, related_name = 'poke')\n number_of_pokes = models.IntegerField(default = 0)\n created_at = models.DateTimeField(auto_now_add = True)\n updated_at = models.DateTimeField(auto_now = True)\n\n objects = PokeManager()\n"
},
{
"alpha_fraction": 0.5064516067504883,
"alphanum_fraction": 0.5645161271095276,
"avg_line_length": 23.799999237060547,
"blob_id": "54d7c528b04a4bf5a65af6b2fe1361e39dc36c16",
"content_id": "ab343d03ec35a4d3db0417cd9e586c5c697e1b72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 620,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 25,
"path": "/apps/second_app/migrations/0002_auto_20170728_0122.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10 on 2017-07-28 01:22\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('first_app', '0002_auto_20170727_2148'),\n ('second_app', '0001_initial'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='poke',\n name='user_id',\n ),\n migrations.AddField(\n model_name='poke',\n name='poke',\n field=models.ManyToManyField(related_name='poke', to='first_app.User'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5853253602981567,
"alphanum_fraction": 0.5936254858970642,
"avg_line_length": 37.61538314819336,
"blob_id": "86c4bda6dd83137db85edf3ef4f0f89fa17c5bee",
"content_id": "47c44cbad5b9e77b8d70cc11b49663bec23ddc0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3012,
"license_type": "no_license",
"max_line_length": 147,
"num_lines": 78,
"path": "/apps/first_app/models.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "from __future__ import unicode_literals\nfrom django.db import models\nimport re\nimport bcrypt\n# Create your models here.\nclass UserManager(models.Manager):\n def registerVal(self, postData):\n results = {'status': True, 'errors': []}\n user = []\n\n if not postData['name'] or len(postData['name']) < 3:\n results['status'] = False\n results['errors'].append('Name needs to be longer than 2 characters.')\n\n if not postData['alias'] or len(postData['alias']) < 3:\n results['status'] = False\n results['errors'].append('Alias name needs to be longer than 2 characters.')\n\n if not postData['email'] or len(postData['email']) < 5 or not re.match(r'[^@]+@[^@]+\\.[^@]+', postData['email']):\n results['status'] = False\n results['errors'].append('Email is invalid.')\n\n if not postData['password'] or len(postData['password']) < 8 != postData['con_password']:\n results['status'] = False\n results['errors'].append('Password does not match. Please try again.')\n\n if results['status'] == True:\n user = User.objects.filter(email = postData['email'])\n\n if len(user) != 0:\n results['status'] = False\n results['errors'].append('User already exists. Please try another email.')\n\n print results['status']\n print results['errors']\n return results\n\n\n def loginVal(self, postData):\n results = {'status':True, 'errors': [], 'user': None}\n if len(postData['email']) < 3:\n results['status'] = False\n results['errors'].append('Something went wrong. Double check everything.')\n\n else:\n user = User.objects.filter(email = postData['email'])\n\n if len(user) <= 0:\n results['status'] = False\n results['errors'].append('Something went wrong. Double check everything.')\n\n elif len(postData['password']) < 8 or postData['password'] != user[0].password:\n results['status'] = False\n results['errors'].append('Something went wrong. Double check everything.')\n\n else:\n results['user'] = user[0]\n\n print results['status']\n print results['errors']\n return results\n\n def createUser(self, postData):\n p_hash = bcrypt.hashpw(postData['password'].encode(), bcrypt.gensalt())\n user = User.objects. create(name = postData['name'], alias = postData['alias'], email = postData['email'], password = postData['password'])\n return user\n\n\nclass User(models.Model):\n name = models.CharField(max_length = 100)\n alias = models.CharField(max_length = 100)\n email = models.CharField(max_length = 100)\n password = models.CharField(max_length = 100)\n birth_date = models.DateField(max_length = 8, null = True)\n created_at = models.DateTimeField(auto_now_add = True)\n updated_at = models.DateTimeField(auto_now = True)\n\n objects = UserManager()\n"
},
{
"alpha_fraction": 0.63080894947052,
"alphanum_fraction": 0.63080894947052,
"avg_line_length": 28.794872283935547,
"blob_id": "8424550b1c4ef67637df3829f6d5869da16618f0",
"content_id": "28c585a0e473f4d17447519dbbb1c9c80b2a8d52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1162,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 39,
"path": "/apps/first_app/views.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, redirect, HttpResponse\nfrom django.contrib import messages\nfrom .models import User\n\n\n# Create your views here.\ndef index(request):\n print '*** index ***'\n # User.objects.all().delete()\n return render(request, 'first_app/index.html')\n\ndef register(request):\n print '*** register ***'\n results = User.objects.registerVal(request.POST)\n if results['status'] == False:\n for error in results['errors']:\n messages.error(request, error)\n else:\n user = User.objects.createUser(request.POST)\n messages.success(request, 'User has been created!')\n\n return redirect('/')\n\ndef login(request):\n print '*** login ***'\n results = User.objects.loginVal(request.POST)\n if results['status'] == False:\n for error in results['errors']:\n messages.error(request, error)\n return redirect('/')\n else:\n request.session['name'] = results['user'].name\n request.session['user_id'] = results['user'].id\n return redirect('/pokes')\n\ndef logout(request):\n print '*** logout ***'\n request.session['user_id']=None\n return redirect('/')\n"
},
{
"alpha_fraction": 0.6600000262260437,
"alphanum_fraction": 0.6600000262260437,
"avg_line_length": 24,
"blob_id": "accba465c55031299dd1ef16ef77eed778a7d8b8",
"content_id": "8b48e7f25509ca0a520e4dfdaf24760118235fb5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 200,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 8,
"path": "/apps/second_app/urls.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import url\nfrom . import views\n\nurlpatterns = [\n url(r'^pokes$', views.pokes),\n url(r'^poke_proccesser$', views.poke_proccesser),\n url(r'^add_poke$', views.add_poke),\n]\n"
},
{
"alpha_fraction": 0.5797752737998962,
"alphanum_fraction": 0.582022488117218,
"avg_line_length": 27.404254913330078,
"blob_id": "5fd7b2084c30c39facf322007a397edb29a3359c",
"content_id": "7e0b8168b2e9d8265e27def92e2cf3e463a75995",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1335,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 47,
"path": "/apps/second_app/views.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, redirect, HttpResponse\nfrom ..first_app.models import User\nfrom .models import Poke\n\n# Create your views here.\ndef pokes(request):\n print '*** pokes ***'\n if not request.session['user_id']:\n return redirect('/')\n try:\n request.session['poke']\n except:\n request.session['poke'] = 0\n\n try:\n request.session['activities']\n except:\n request.session['activities'] = []\n context = {\n 'users': User.objects.exclude(id = request.session['user_id']),\n 'pokes': Poke.objects.all(),\n }\n return render(request, 'second_app/poke.html', context)\n\ndef poke_proccesser(request):\n print '*** poke_proccesser ***'\n if not request.session['user_id']:\n return redirect('/')\n\n try:\n request.session['activities'].insert(0,{{request.session.name}} + 'poked you' + str(poke.number_of_pokes) + 'times.')\n print request.session['activities']\n return redirect('/pokes')\n except:\n return redirect('/pokes')\n\ndef add_poke(request):\n print '********** add_poke *******'\n try:\n print '***********'\n poke = Poke.objects.get(id = id)\n poke.number_of_pokes += 1\n poke.save()\n print poke\n return redirect('/pokes')\n except:\n return redirect('/pokes')\n"
},
{
"alpha_fraction": 0.5442478060722351,
"alphanum_fraction": 0.5907079577445984,
"avg_line_length": 21.600000381469727,
"blob_id": "e5a8c1e99f69a665043e98f8cdac0843e6bd8dd1",
"content_id": "e133dc16ce2f647922665ad76efaffe914c35f17",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 452,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 20,
"path": "/apps/first_app/migrations/0002_auto_20170727_2148.py",
"repo_name": "pmendoza94/poke",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10 on 2017-07-27 21:48\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('first_app', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='birth_date',\n field=models.DateField(max_length=8, null=True),\n ),\n ]\n"
}
] | 7 |
turtlecode/RadioButtonDesktopApplication-Python-Pyqt5 | https://github.com/turtlecode/RadioButtonDesktopApplication-Python-Pyqt5 | ff19e93c5afe73a6291d51d6caf4e07d461079db | 51d090166999b9d7c23c32936f75ba9647cc5a3e | 35c00601a68d6550d2b1170a913c347404faa823 | refs/heads/main | 2023-07-01T08:01:07.256454 | 2021-07-31T15:58:44 | 2021-07-31T15:58:44 | 391,400,953 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6659478545188904,
"alphanum_fraction": 0.6939815878868103,
"avg_line_length": 51.58762741088867,
"blob_id": "5ae45d788a76da51db816e06e58624624bc4d535",
"content_id": "3eedaa091491d2fdfc2ba6168c45e7375971e23a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5101,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 97,
"path": "/radiobutton.py",
"repo_name": "turtlecode/RadioButtonDesktopApplication-Python-Pyqt5",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'radiobutton.ui'\n#\n# Created by: PyQt5 UI code generator 5.15.4\n#\n# WARNING: Any manual changes made to this file will be lost when pyuic5 is\n# run again. Do not edit this file unless you know what you are doing.\n\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\n\n\nclass Ui_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n MainWindow.resize(424, 800)\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n self.country_box = QtWidgets.QGroupBox(self.centralwidget)\n self.country_box.setGeometry(QtCore.QRect(70, 80, 141, 231))\n self.country_box.setObjectName(\"country_box\")\n self.gridLayoutWidget = QtWidgets.QWidget(self.country_box)\n self.gridLayoutWidget.setGeometry(QtCore.QRect(20, 20, 101, 191))\n self.gridLayoutWidget.setObjectName(\"gridLayoutWidget\")\n self.gridLayout = QtWidgets.QGridLayout(self.gridLayoutWidget)\n self.gridLayout.setContentsMargins(0, 0, 0, 0)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.england = QtWidgets.QRadioButton(self.gridLayoutWidget)\n self.england.setObjectName(\"england\")\n self.gridLayout.addWidget(self.england, 1, 0, 1, 1)\n self.turkey = QtWidgets.QRadioButton(self.gridLayoutWidget)\n self.turkey.setObjectName(\"turkey\")\n self.gridLayout.addWidget(self.turkey, 0, 0, 1, 1)\n self.france = QtWidgets.QRadioButton(self.gridLayoutWidget)\n self.france.setObjectName(\"france\")\n self.gridLayout.addWidget(self.france, 3, 0, 1, 1)\n self.germany = QtWidgets.QRadioButton(self.gridLayoutWidget)\n self.germany.setObjectName(\"germany\")\n self.gridLayout.addWidget(self.germany, 2, 0, 1, 1)\n self.job_group = QtWidgets.QGroupBox(self.centralwidget)\n self.job_group.setGeometry(QtCore.QRect(230, 80, 141, 231))\n self.job_group.setObjectName(\"job_group\")\n self.gridLayoutWidget_2 = QtWidgets.QWidget(self.job_group)\n self.gridLayoutWidget_2.setGeometry(QtCore.QRect(20, 20, 101, 191))\n self.gridLayoutWidget_2.setObjectName(\"gridLayoutWidget_2\")\n self.gridLayout_2 = QtWidgets.QGridLayout(self.gridLayoutWidget_2)\n self.gridLayout_2.setContentsMargins(0, 0, 0, 0)\n self.gridLayout_2.setObjectName(\"gridLayout_2\")\n self.police = QtWidgets.QRadioButton(self.gridLayoutWidget_2)\n self.police.setObjectName(\"police\")\n self.gridLayout_2.addWidget(self.police, 3, 0, 1, 1)\n self.carpenter = QtWidgets.QRadioButton(self.gridLayoutWidget_2)\n self.carpenter.setObjectName(\"carpenter\")\n self.gridLayout_2.addWidget(self.carpenter, 0, 0, 1, 1)\n self.doctor = QtWidgets.QRadioButton(self.gridLayoutWidget_2)\n self.doctor.setObjectName(\"doctor\")\n self.gridLayout_2.addWidget(self.doctor, 1, 0, 1, 1)\n self.teacher = QtWidgets.QRadioButton(self.gridLayoutWidget_2)\n self.teacher.setObjectName(\"teacher\")\n self.gridLayout_2.addWidget(self.teacher, 2, 0, 1, 1)\n self.get_selected = QtWidgets.QPushButton(self.centralwidget)\n self.get_selected.setGeometry(QtCore.QRect(150, 330, 121, 51))\n self.get_selected.setObjectName(\"get_selected\")\n self.label = QtWidgets.QLabel(self.centralwidget)\n self.label.setGeometry(QtCore.QRect(20, 390, 381, 111))\n font = QtGui.QFont()\n font.setPointSize(28)\n self.label.setFont(font)\n self.label.setObjectName(\"label\")\n MainWindow.setCentralWidget(self.centralwidget)\n self.menubar = QtWidgets.QMenuBar(MainWindow)\n self.menubar.setGeometry(QtCore.QRect(0, 0, 424, 21))\n self.menubar.setObjectName(\"menubar\")\n MainWindow.setMenuBar(self.menubar)\n self.statusbar = QtWidgets.QStatusBar(MainWindow)\n self.statusbar.setObjectName(\"statusbar\")\n MainWindow.setStatusBar(self.statusbar)\n\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"MainWindow\"))\n self.country_box.setTitle(_translate(\"MainWindow\", \"Country\"))\n self.england.setText(_translate(\"MainWindow\", \"England\"))\n self.turkey.setText(_translate(\"MainWindow\", \"Turkey\"))\n self.france.setText(_translate(\"MainWindow\", \"France\"))\n self.germany.setText(_translate(\"MainWindow\", \"Germany\"))\n self.job_group.setTitle(_translate(\"MainWindow\", \"Job\"))\n self.police.setText(_translate(\"MainWindow\", \"Police\"))\n self.carpenter.setText(_translate(\"MainWindow\", \"Carpenter\"))\n self.doctor.setText(_translate(\"MainWindow\", \"Doctor\"))\n self.teacher.setText(_translate(\"MainWindow\", \"Teacher\"))\n self.get_selected.setText(_translate(\"MainWindow\", \"Get\"))\n self.label.setText(_translate(\"MainWindow\", \"TextLabel\"))\n"
},
{
"alpha_fraction": 0.6173669695854187,
"alphanum_fraction": 0.6179271936416626,
"avg_line_length": 29.79310417175293,
"blob_id": "2ef671f0aee5171a666c5e8ca97d1394602ae317",
"content_id": "a874b6f95ea48d97ee8881956f02b26fd7f6d2cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1785,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 58,
"path": "/radiobutton_lesson8.py",
"repo_name": "turtlecode/RadioButtonDesktopApplication-Python-Pyqt5",
"src_encoding": "UTF-8",
"text": "import sys\nfrom PyQt5 import QtWidgets\nfrom radiobutton import Ui_MainWindow\n\nclass my_app(QtWidgets.QMainWindow):\n def __init__(self):\n super(my_app, self).__init__()\n self.ui = Ui_MainWindow()\n self.ui.setupUi(self)\n\n self.ui.turkey.toggled.connect(self.country_onclick)\n self.ui.england.toggled.connect(self.country_onclick)\n self.ui.germany.toggled.connect(self.country_onclick)\n self.ui.france.toggled.connect(self.country_onclick)\n\n self.ui.carpenter.toggled.connect(self.job_onclick)\n self.ui.doctor.toggled.connect(self.job_onclick)\n self.ui.teacher.toggled.connect(self.job_onclick)\n self.ui.police.toggled.connect(self.job_onclick)\n\n self.ui.get_selected.clicked.connect(self.get_selected)\n\n\n def job_onclick(self):\n rb = self.sender()\n if rb.isChecked():\n print(rb.text())\n\n\n def country_onclick(self):\n rb = self.sender()\n if rb.isChecked():\n print(rb.text())\n\n def get_selected(self):\n text_country = ''\n text_job = ''\n full_text = ''\n items_country = self.ui.country_box.findChildren(QtWidgets.QRadioButton)\n for item_country in items_country:\n if item_country.isChecked():\n text_country = \"You are from \" + item_country.text() + '\\n'\n items_job = self.ui.job_group.findChildren(QtWidgets.QRadioButton)\n for item_job in items_job:\n if item_job.isChecked():\n text_job = \"Your job is \" + item_job.text()\n full_text = text_country + text_job\n\n self.ui.label.setText(full_text)\n\n\ndef create_app():\n app = QtWidgets.QApplication(sys.argv)\n win = my_app()\n win.show()\n sys.exit(app.exec_())\n\ncreate_app()"
}
] | 2 |
aidarbek/database_systems_project | https://github.com/aidarbek/database_systems_project | 90f0189912c5b6b2e2d981bd6d25621369b67bc8 | 8544b87b8eafd36c417ac0881e5d0c352407ab94 | e1a6c7b38ccb2012562a07279530d058711fa116 | refs/heads/master | 2021-08-19T01:54:06.865115 | 2017-11-24T11:30:55 | 2017-11-24T11:30:55 | 111,911,017 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6072507500648499,
"alphanum_fraction": 0.6465256810188293,
"avg_line_length": 16.864864349365234,
"blob_id": "70cba46eac1d104e6bbc73186a649865185874ed",
"content_id": "3dba6d0ed0923e3b65f97b04f8469e995393ad59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 662,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 37,
"path": "/benchmark.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "import user\n\nimport time\n\nimport neo\n\nimport tweet\n\nsum_neo = 0.0\nsum_sql = 0.0\n\nstart_id = 1\nend_id = 1001\n\nn = (end_id - start_id) * 1.0\n\nfor i in range(start_id, end_id):\n\thandle = \"a\" + str(i)\n\n\t# Benchmark SQL\n\tstart_sql = time.clock()\n\tdata = tweet.get(handle = handle, page=\"feed\", limit = 10000)\n\tsql_time = time.clock() - start_sql\n\n\t# Benchmark Neo4j\n\tstart_neo = time.clock()\n\tneo.get_feed(handle)\n\tneo_time = time.clock() - start_neo\n\n\tif i == start_id:\n\t\tcontinue\n\n\tsum_sql += sql_time\n\tsum_neo += neo_time\n\nprint(\"SQL average feed time for 1000 records: \" + str(sum_sql / n))\nprint(\"Neo4j average feed time for 1000 records: \" + str(sum_neo / n))\n\n"
},
{
"alpha_fraction": 0.6543008089065552,
"alphanum_fraction": 0.6584833860397339,
"avg_line_length": 30.4228572845459,
"blob_id": "827d5f49ea12cdb12cf56f7d4262c4b6a417611c",
"content_id": "2def16b241d10709b4cc21520d953b1f29bac658",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5499,
"license_type": "no_license",
"max_line_length": 182,
"num_lines": 175,
"path": "/tweet.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "\n#!/usr/bin/python\n\nimport MySQLdb\nimport config\n\ndef isOwner(handle, tweet_id):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\tcursor.execute(\"SELECT Creator FROM tweet WHERE tweet_id={} AND Creator='{}'\".format(tweet_id, handle))\n\t\t\n\tresult = cursor.fetchone()\n\t\t\n\tdb.close()\n\tif result is None:\n\t\treturn False\t\n\treturn True\n\ndef add(handle, text, files):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\ttry:\n\t\tcursor.execute(\"INSERT INTO tweet(Creator, content) VALUES('{}', '{}')\".format(handle, text))\n\t\ttweet_id = cursor.lastrowid\n\t\tdb.commit()\n\texcept Exception as e:\n\t\tprint(str(e))\n\t\tdb.close()\n\t\treturn None\n\ttry:\n\t\tif len(files) > 0:\n\t\t\tfor f in files:\n\t\t\t\tcursor.execute(\"INSERT INTO attach_list(tweet, file) VALUES({}, {})\".format(tweet_id, f))\n\t\t\tdb.commit()\n\texcept Exception as e:\n\t\tprint(str(e))\n\t\tdb.close()\n\t\treturn None\n\tdb.close()\n\treturn tweet_id\n\ndef likes(user, tweet):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\ttry:\n\t\tcursor.execute(\"INSERT INTO likes(handle, tweet_id) VALUES('{}', {})\".format(user, tweet))\n\t\tdb.commit()\n\texcept:\n\t\tdb.close()\n\t\traise Exception(\"Couldn't insert\")\n\tdb.close()\ndef delete(tweet):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\ttry:\n\t\tcursor.execute(\"DELETE FROM attach_list WHERE tweet = {}\".format(tweet))\n\t\tcursor.execute(\"DELETE FROM likes WHERE tweet_id = {}\".format(tweet))\n\t\tcursor.execute(\"DELETE FROM tweet WHERE tweet_id = {}\".format(tweet))\n\t\tdb.commit()\n\texcept Exception as e:\n\t\tprint(str(e))\n\t\traise Exception(\"Couldn't delete\")\n\tdb.close()\ndef unlikes(user, tweet):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\ttry:\n\t\tcursor.execute(\"DELETE FROM likes WHERE handle = '{}' AND tweet_id = {}\".format(user, tweet))\n\t\tdb.commit()\n\texcept:\n\t\tdb.close()\n\t\traise Exception(\"Couldn't unlike\")\n\tdb.close()\ndef get(handle, page=\"feed\", limit = 10, last = None, current_user = None):\n\t\"\"\"\n\t\thandle - handle of the user. Depending on \"page\" parameter it can be:\n\t\t\t1) Handle of user, whose feed we would like to get\n\t\t\t2) Handle of user, whose tweets we would like to get\n\t\tpage - (\"feed\" or \"user\" or \"search\") - get user feed, user tweets or search respectively\n\t\tlimit - limit number of tweets\n\t\tlast - ID of the last tweet\n\t\"\"\"\n\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n \t\n \tadditional_condition = \"\"\n\n \tif last is not None:\n \t\t# Necessary to continue the tweet\n \t\tadditional_condition = \"tweet.tweet_id < {} AND \".format(last)\n\n\tif page == \"feed\":\n\t\t# User news feed\n\t\tcursor.execute(\"\"\"SELECT tweet.tweet_id,content, Created, Creator, \n\t\t\tCOUNT(likes.handle) FROM tweet \n\t\t\t\tLEFT JOIN likes ON likes.tweet_id = tweet.tweet_id \n\t\t\tWHERE {} (\n\t\t\t\t\ttweet.Creator IN \n\t\t\t\t\t(SELECT to_user FROM follows WHERE from_user='{}') \n\t\t\t\t\t\tOR \n\t\t\t\t\ttweet.Creator = '{}'\n\t\t\t\t) \n\t\t\tGROUP BY tweet.tweet_id\n\t\t\tORDER BY tweet.tweet_id DESC LIMIT {}\"\"\".format(additional_condition, handle, handle, limit))\n\telif page == \"user\":\n\t\t# Tweets of particular user only\n\t\tcursor.execute(\"\"\"SELECT tweet.tweet_id,content, Created, Creator, \n\t\t\tCOUNT(likes.handle) FROM tweet \n\t\t\t\tLEFT JOIN likes ON likes.tweet_id = tweet.tweet_id \n\t\t\tWHERE {} Creator = '{}' \n\t\t\tGROUP BY tweet.tweet_id\n\t\t\tORDER BY tweet_id DESC LIMIT {}\"\"\".format(additional_condition, handle, limit))\n\telif page == \"search\":\n\t\t# Tweets of particular user only\n\t\tcursor.execute(\"\"\"SELECT tweet.tweet_id,content, Created, Creator, \n\t\t\tCOUNT(likes.handle) FROM tweet \n\t\t\t\tLEFT JOIN likes ON likes.tweet_id = tweet.tweet_id \n\t\t\tWHERE {} content LIKE '%{}%' \n\t\t\tGROUP BY tweet.tweet_id\n\t\t\tORDER BY tweet_id DESC LIMIT {}\"\"\".format(additional_condition, handle, limit))\n\t\n\tresults = cursor.fetchall()\n\tdata = []\n\tfor row in results:\n\t\trow_data = {}\n\t\trow_data[\"tweet_id\"] = row[0]\n\t\trow_data[\"content\"] = row[1]\n\t\trow_data[\"Created\"] = row[2].strftime('%Y-%m-%d %H:%M:%S')\n\t\trow_data[\"Creator\"] = row[3]\n\t\trow_data[\"likes\"] = row[4]\n\n\t\tcursor.execute(\"SELECT file.file_url, file.file_type FROM file INNER JOIN attach_list ON attach_list.file = file.file_id WHERE attach_list.tweet = {}\".format(row_data[\"tweet_id\"]))\n\t\tfiles = cursor.fetchall()\n\t\trow_data[\"files\"] = [{\"file_url\": f[0], \"file_type\": f[1]} for f in files]\n\t\trow_data[\"liked\"] = False\n\n\t\tif current_user is not None:\n\t\t\tcursor.execute(\"SELECT * FROM likes WHERE tweet_id = {} AND handle = '{}' \".format(row_data[\"tweet_id\"], current_user))\n\t\t\tliked = cursor.fetchone()\n\t\t\tif liked is not None:\n\t\t\t\trow_data[\"liked\"] = True\n\t\tdata.append(row_data)\n\tdb.close()\n\treturn data\n\ndef addFile(data):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\ttry:\n\t\tcursor.execute(\"INSERT INTO file(file_url, file_type) VALUES('{}', '{}')\".format(data[\"file_url\"], data[\"file_type\"]))\n\t\tdb.commit()\n\t\tcursor.execute(\"SELECT file_id FROM file ORDER BY file_id DESC LIMIT 1\")\n\t\tdata[\"file_id\"] = cursor.fetchone()[0]\n\texcept Exception as e:\n\t\traise e\n\tdb.close()\n\treturn data\n\nif __name__ == '__main__':\n\t#add(\"aidarbek1\", \"New tweet\", [2,3])\n\tdelete(1139)\n\t#print(get(\"hello\", \"search\", 10))\n\tpass"
},
{
"alpha_fraction": 0.6206185817718506,
"alphanum_fraction": 0.6489690542221069,
"avg_line_length": 20.065217971801758,
"blob_id": "4fc4d24d8d4d2c3f482eeddf4594db31ab7277ae",
"content_id": "0e8148e8aab9cb8b607e00d7acf8c82a1659b283",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1940,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 92,
"path": "/fill.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "import user\n\nimport time\n\nimport neo\n\nimport tweet\n\nsum_neo = 0.0\nsum_sql = 0.0\n\nstart_id = 1\nend_id = 1001\n\nn = (end_id - start_id) * 1.0\n\nfor i in range(start_id, end_id):\n\thandle = \"a\" + str(i)\n\n\t# Benchmark SQL\n\tstart_sql = time.clock()\n\tuser.register(handle, handle, handle, handle, handle)\n\tsql_time = time.clock() - start_sql\n\n\t# Benchmark Neo4j\n\tstart_neo = time.clock()\n\tneo.insert_user(handle)\n\tneo_time = time.clock() - start_neo\n\n\tif i == start_id:\n\t\tcontinue\n\n\tsum_sql += sql_time\n\tsum_neo += neo_time\n\nprint(\"SQL average user adding time of 1000 records: \" + str(sum_sql / n))\nprint(\"Neo4j average user adding time of 1000 records: \" + str(sum_neo / n))\nsum_neo = 0.0\nsum_sql = 0.0\nfor i in range(start_id, end_id):\n\tfor j in range(start_id, end_id):\n\n\t\tif i == j:\n\t\t\tcontinue\n\t\thandle1 = \"a\" + str(i)\n\t\thandle2 = \"a\" + str(j)\n\n\t\t# Benchmark SQL\n\t\tstart_sql = time.clock()\n\t\tuser.follow(handle1, handle2)\n\t\tsql_time = time.clock() - start_sql\n\n\t\t# Benchmark Neo4j\n\t\tstart_neo = time.clock()\n\t\tneo.insert_relation(handle1, handle2)\n\t\tneo_time = time.clock() - start_neo\n\n\t\tif i == start_id and j == start_id:\n\t\t\tcontinue\n\n\t\tsum_sql += sql_time\n\t\tsum_neo += neo_time\n\nprint(\"SQL average relationship adding time of 1000 records: \" + str(sum_sql / n))\nprint(\"Neo4j average relationship adding time of 1000 records: \" + str(sum_neo / n))\n\nsum_neo = 0.0\nsum_sql = 0.0\n\nfor i in range(start_id, end_id):\n\thandle = \"a\" + str(i)\n\ttext = \"What a good day!\"\n\ttweet_id = i\n\n\t# Benchmark SQL\n\tstart_sql = time.clock()\n\ttweet.add(handle, text, \"\")\n\tsql_time = time.clock() - start_sql\n\n\t# Benchmark Neo4j\n\tstart_neo = time.clock()\n\tneo.add_tweet(handle, text, tweet_id)\n\tneo_time = time.clock() - start_neo\n\t\n\tif i == start_id:\n\t\tcontinue\n\n\tsum_sql += sql_time\n\tsum_neo += neo_time\n\nprint(\"SQL average tweet adding time of 1000 records: \" + str(sum_sql / n))\nprint(\"Neo4j average tweet adding time of 1000 records: \" + str(sum_neo / n))\n\n\n"
},
{
"alpha_fraction": 0.488095223903656,
"alphanum_fraction": 0.488095223903656,
"avg_line_length": 13,
"blob_id": "dc1e220eaaabf1fbf36b5dd2e10eb5e6ea7544c4",
"content_id": "ee98b90c1d9049ff6986d3762ba4158a7066a20b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 84,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 6,
"path": "/config.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "\nDB = {\n\t\"host\": \"localhost\",\n\t\"user\": \"root\",\n\t\"password\": \"\",\n\t\"db\": \"db_course\"\n}"
},
{
"alpha_fraction": 0.642630398273468,
"alphanum_fraction": 0.6433106660842896,
"avg_line_length": 26.911392211914062,
"blob_id": "c78bd26c435e71d3ff804efe53ee0aab859d424b",
"content_id": "2501ed732ca62c76d67410f36960019735cbd467",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4410,
"license_type": "no_license",
"max_line_length": 174,
"num_lines": 158,
"path": "/user.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "\n#!/usr/bin/python\n\nimport MySQLdb\nimport config\n\nclass User:\n\n\tdef __init__(self, login = None, password = None):\n\t\t\"\"\"\n\t\t\tClass constructor can become also authorization function\n\t\t\"\"\"\n\t\tself.handle = None\n\t\tself.auth = False\n\n\t\tif login != None:\n\t\t\tif self.__exists(login):\n\t\t\t\tself.handle = login\n\t\t\telse:\n\t\t\t\tself.handle = None\n\n\t\t\tif password != None:\n\t\t\t\tif self.__login(password):\n\t\t\t\t\tself.auth = True\n\t\t\t\telse:\n\t\t\t\t\tself.auth = False\n\n\tdef isAuth(self):\n\t\treturn self.auth\n\t\n\tdef setAuth(self):\n\t\tself.auth = True\n\n\tdef getHandle(self):\n\t\treturn self.handle\n\n\tdef getInfo(self):\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\tcursor.execute(\"SELECT handle, full_name, email, phone_number, photo_url FROM user WHERE handle='{}'\".format(self.handle))\n\t\tdata = {}\n\t\tdata[\"handle\"], data[\"full_name\"], data[\"email\"], data[\"phone\"], data[\"photo_url\"] = cursor.fetchone()\n\t\t\n\t\tcursor.execute(\"SELECT COUNT(*) FROM follows WHERE from_user='{}'\".format(self.handle))\n\t\tdata[\"following\"] = cursor.fetchone()[0]\n\n\t\tcursor.execute(\"SELECT COUNT(*) FROM follows WHERE to_user='{}'\".format(self.handle))\n\t\tdata[\"followers\"] = cursor.fetchone()[0]\n\n\t\tdb.close()\n\t\treturn data\n\tdef followedBy(self, handle):\n\t\tif handle is None:\n\t\t\treturn False\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\tcursor.execute(\"SELECT from_user FROM follows WHERE from_user='{}' AND to_user='{}'\".format(handle, self.handle))\n\t\tfollows = cursor.fetchone()\n\t\tdb.close()\n\t\tif follows is not None:\n\t\t\treturn True\n\t\treturn False\n\tdef update(self, columns, values):\n\t\tset_string = \"\"\n\n\t\tif len(columns) != len(values):\n\t\t\traise Exception(\"Columns do not match values!\")\n\n\t\tset_string = \",\".join([columns[i] + \"='\" + values[i]+\"'\" for i in range(len(columns))])\n\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\t\tcursor.execute(\"UPDATE user SET {} WHERE handle = '{}'\".format(set_string, self.handle))\n\t\tdb.commit()\n\t\tdb.close()\n\tdef __exists(self, handle):\n\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\tcursor.execute(\"SELECT handle FROM user WHERE handle='{}'\".format(handle))\n\t\t\n\t\thandle = cursor.fetchone()\n\t\t\n\t\tdb.close()\n\t\tif handle is None:\n\t\t\treturn False\n\t\treturn True\n\n\tdef register(self, handle, email, full_name, phone, password):\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\ttry:\n\t\t\tcursor.execute(\"INSERT INTO user(handle, email, full_name, phone_number, password) VALUES('{}', '{}', '{}', '{}', '{}')\".format(handle, email, full_name, phone, password))\n\t\t\tdb.commit()\n\t\texcept:\n\t\t\tdb.close()\n\t\t\traise Exception(\"Couldn't create user\")\n\t\tdb.close()\n\n\tdef __login(self, password):\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\tcursor.execute(\"SELECT handle FROM user WHERE handle='{}' AND password='{}'\".format(self.handle, password))\n\t\t\n\t\thandle = cursor.fetchone()\n\t\t\n\t\tdb.close()\n\t\tif handle is None:\n\t\t\treturn False\t\n\t\treturn True\n\n\tdef follow(self, to_user):\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\n\t\ttry:\n\t\t\tif self.isAuth():\n\t\t\t\tcursor.execute(\"INSERT INTO follows(from_user, to_user) VALUES('{}', '{}')\".format(self.handle, to_user))\n\t\t\t\tcursor.execute(\"INSERT INTO notification(content, url, handle) VALUES('@{} user followed you', '/{}', '{}')\".format(self.handle, self.handle, to_user))\n\t\t\t\tdb.commit()\n\t\t\telse:\n\t\t\t\traise Exception(\"\")\n\t\texcept:\n\t\t\tdb.close()\n\t\t\traise Exception(\"Couldn't insert\")\n\t\tdb.close()\n\n\tdef unfollow(self, to_user):\n\t\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\t\tcursor = db.cursor()\n\t\ttry:\n\t\t\tif self.isAuth():\n\t\t\t\tcursor.execute(\"DELETE FROM follows WHERE from_user = '{}' AND to_user = '{}'\".format(self.handle, to_user))\n\t\t\t\tdb.commit()\n\t\t\telse:\n\t\t\t\traise Exception(\"\")\n\t\texcept:\n\t\t\tdb.close()\n\t\t\traise Exception(\"Couldn't unfollow\")\n\t\tdb.close()\n\n\nif __name__ == '__main__':\n\tuser = User(\"aidarbek1\", \"qwerty\")\n\tuser.update([\"full_name\"], [\"Aidarbek Suleimenov\"])\n\tpass"
},
{
"alpha_fraction": 0.5856031179428101,
"alphanum_fraction": 0.5946822166442871,
"avg_line_length": 26.070175170898438,
"blob_id": "632dbfd9805ab20216a6460126b0d5528853d5ff",
"content_id": "e5d21822ae0c1b6d4d3fd1c45d236349a23ae81f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1542,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 57,
"path": "/neo.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "\"\"\"\nMATCH (a:User),(b:User)\nWHERE a.handle = 'a1' AND b.handle = 'a2'\nCREATE (a)-[r:FOLLOWS]->(b)\nRETURN r\n\"\"\"\nquery = \"\"\"\n\t\tMATCH (person {name: \"Keanu Reeves\"})-[:ACTED_IN]->(movie)<-[:ACTED_IN]-(guy)\n\t\tRETURN person.name, guy.name, movie.title;\n\t\t\"\"\"\n\n\nfrom neo4j.v1 import GraphDatabase, basic_auth\n\ndriver = GraphDatabase.driver(\"bolt://localhost:7687\", auth=basic_auth(\"neo4j\", \"neo4j\"))\n\ndef insert_user(handle):\n\tsession = driver.session()\n\tquery = \"CREATE (n:User {handle: \\\"\"+ handle + \"\\\", email: \\\"\"+ handle + \"\\\",full_name: \\\"\"+ handle + \"\\\",phone: \\\"\"+ handle + \"\\\",password: \\\"\"+ handle + \"\\\"})\"\n\tresult = session.run(query)\n\tsession.close()\n\ndef insert_relation(handle1, handle2):\n\tsession = driver.session()\n\tquery = \"\"\"\n\t\t\tMATCH (a:User),(b:User)\n\t\t\tWHERE a.handle = '\"\"\"+handle1+\"\"\"' AND b.handle = '\"\"\"+handle2+\"\"\"'\n\t\t\tCREATE (a)-[r:FOLLOWS]->(b)\n\t\t\tRETURN r\n\t\"\"\"\n\tresult = session.run(query)\n\tsession.close()\n\ndef add_tweet(handle, tweet, tweet_id):\n\tsession = driver.session()\n\t\n\tquery = \"CREATE (n:Tweet {tweet_id: \\\"\"+str(tweet_id)+\"\\\", text: \\\"\"+ tweet + \"\\\"})\"\n\tresult = session.run(query)\n\n\tquery = \"\"\"\n\t\t\tMATCH (a:User),(b:Tweet)\n\t\t\tWHERE a.handle = '\"\"\"+handle+\"\"\"' AND b.tweet_id = '\"\"\"+str(tweet_id)+\"\"\"'\n\t\t\tCREATE (a)-[r:TWEETS]->(b)\n\t\t\tRETURN r\n\t\"\"\"\n\tresult = session.run(query)\n\t\n\tsession.close()\n\ndef get_feed(handle):\n\tsession = driver.session()\n\tquery = \"\"\"\n\t\t\tMATCH (person {handle: \\\"\"\"\" +handle+ \"\"\"\\\"})-[:FOLLOWS]->(guy)-[:TWEETS]->(tweet)\n\t\t\tRETURN tweet;\n\t\"\"\"\n\tresult = session.run(query)\n\tsession.close()"
},
{
"alpha_fraction": 0.6129058599472046,
"alphanum_fraction": 0.6197461485862732,
"avg_line_length": 26.208520889282227,
"blob_id": "d681450f372c7d3d399eec980330281e42176b74",
"content_id": "a4939219779622ee556c373c836ee54334870973",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12134,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 446,
"path": "/app.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\nfrom flask import Flask, Response\nfrom flask import request\nfrom flask import abort, redirect, url_for\nfrom flask import render_template\nimport os\nimport json\nfrom flask import jsonify, send_from_directory\nfrom flask.ext.cors import CORS, cross_origin\nimport notification\nfrom werkzeug.routing import BaseConverter\n\nfrom user import User\nimport tweet\nfrom werkzeug.utils import secure_filename\n\nUPLOAD_FOLDER = 'uploads'\nALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])\n\napp = Flask(__name__, static_url_path='/uploads')\n\napp.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER\n\nclass RegexConverter(BaseConverter):\n\tdef __init__(self, url_map, *items):\n\t\tsuper(RegexConverter, self).__init__(url_map)\n\t\tself.regex = items[0]\napp.url_map.converters['regex'] = RegexConverter\n\napp.secret_key = \"Key\"\napp.debug = True\n\n\ndef allowed_file(filename):\n return '.' in filename and \\\n filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS\n\[email protected]('/uploads/<path>')\ndef send_img(path):\n return send_from_directory(app.config['UPLOAD_FOLDER'],\n path)\n\[email protected]('/uploads/open-iconic/font/css/<path>')\ndef send_css(path):\n return send_from_directory(app.config['UPLOAD_FOLDER'] + \"/open-iconic/font/css\",\n path)\[email protected]('/uploads/open-iconic/font/fonts/<path>')\ndef send_fonts(path):\n return send_from_directory(app.config['UPLOAD_FOLDER'] + \"/open-iconic/font/fonts\",\n path)\n\[email protected]('/upload', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST':\n # check if the post request has the file part\n if 'file' not in request.files:\n flash('No file part')\n return \"Nothing\"\n file = request.files['file']\n # if user does not select file, browser also\n # submit a empty part without filename\n if file.filename == '':\n flash('No selected file')\n return \"No filename\"\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n data = {}\n data[\"file_url\"] = \"/uploads/\" + filename\n data[\"file_type\"] = file.filename.rsplit('.', 1)[1].lower()\n\n data = tweet.addFile(data)\n\n p = json.dumps(data)\n\n resp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n return resp\n return '''\n <!doctype html>\n <title>Upload new File</title>\n <h1>Upload new File</h1>\n <form method=post enctype=multipart/form-data>\n <p><input type=file name=file>\n <input type=submit value=Upload>\n </form>\n '''\n\[email protected](\"/\", methods=['GET'])\ndef index():\n\t\n\thandle = request.cookies.get('handle')\n\tif handle is not None:\n\t\tuser = User(handle)\n\t\tuser.setAuth()\n\t\tuser_data = user.getInfo()\n\t\treturn render_template(\"feed.html\", handle = handle, full_name = user_data[\"full_name\"], email = user_data[\"email\"])\n\treturn render_template(\"index.html\")\n\[email protected](\"/search\", methods=['GET'])\ndef search_controller():\n\t\n\thandle = request.cookies.get('handle')\n\n\tsearch = request.args.get('q')\n\n\tif handle is not None:\n\t\tuser = User(handle)\n\t\tuser.setAuth()\n\t\tuser_data = user.getInfo()\n\t\treturn render_template(\"search.html\", handle = handle, full_name = user_data[\"full_name\"], email = user_data[\"email\"], search = search)\n\treturn render_template(\"index.html\")\n\[email protected](\"/\", methods=['POST'])\ndef register():\n\thandle = str(request.form[\"handle\"].encode('utf-8'))\n\temail = str(request.form[\"email\"].encode('utf-8'))\n\tfull_name = str(request.form[\"full_name\"].encode('utf-8'))\n\tphone = str(request.form[\"phone\"].encode('utf-8'))\n\tpassword = str(request.form[\"password\"].encode('utf-8'))\n\n\tdata = {\"success\": \"User have been registered\"}\n\t\n\tuser = User()\n\n\ttry:\n\t\tuser.register(handle, email, full_name, phone, password)\n\texcept:\n\t\tdata = {\"error\": \"Couldn't register user! Check handle or email\"}\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/login\", methods=['POST'])\ndef login():\n\t\n\ttmp_handle = request.cookies.get('handle')\n\n\thandle = request.form[\"handle\"]\n\n\tpassword = request.form[\"password\"]\n\n\tdata = {\"success\": \"You logged in\"}\n\t\n\tuser = User(handle, password)\n\n\tif not user.isAuth():\n\t\thandle = None\n\t\tdata = {\"error\": \"Couldn't log in user!\"}\n\telse:\n\t\tdata = {\"success\": \"You logged in!\"}\n\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\t\n\tif handle is not None:\n\t\tresp.set_cookie(\"handle\", handle)\n\t\n\treturn resp\n\[email protected](\"/tweet\", methods=['POST'])\ndef add_tweet():\n\t\n\thandle = request.cookies.get('handle')\n\t\n\tdata = {}\n\n\tif handle is not None:\n\t\ttext = request.form[\"text\"]\n\t\tfiles = request.form[\"files\"] # Comma separated list of file_id\n\t\tif files == \"\":\n\t\t\tfiles = []\n\t\telse:\n\t\t\tfiles = files.split(\",\")\n\n\t\ttweet_id = tweet.add(handle, text, files)\n\n\t\tif tweet_id is None:\n\t\t\tdata[\"error\"] = \"Error occured while adding the tweet\"\n\t\telse:\n\t\t\tdata[\"success\"] = \"Tweet added\"\n\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\t\n\tif handle is not None:\n\t\tresp.set_cookie(\"handle\", handle)\n\t\n\treturn resp\n\[email protected](\"/follow\", methods=['POST'])\ndef follow():\n\tfrom_handle = request.cookies.get('handle')\n\tto_handle = request.form[\"handle\"]\n\tdata = {\"error\": \"You must log in!\"}\n\tif from_handle is not None:\n\t\tuser = User(from_handle)\n\t\tuser.setAuth()\n\n\t\ttry:\n\t\t\tdata = {\"success\": \"You followed the user!\"}\n\t\t\tuser.follow(to_handle)\n\t\texcept:\n\t\t\ttry:\n\t\t\t\tuser.unfollow(to_handle)\n\t\t\t\tdata = {\"success\": \"You unfollowed the user!\"}\n\t\t\texcept:\n\t\t\t\tdata = {\"error\": \"Couldn't follow the user\"}\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/notification/count\", methods=['GET'])\ndef notification_count():\n\thandle = request.cookies.get('handle')\n\tdata = {\"count\": 0}\n\tprint(handle)\n\tif handle is not None:\n\t\ttry:\n\t\t\tdata[\"count\"] = notification.count(handle)\n\t\texcept Exception as e:\n\t\t\tprint(str(e))\n\t\t\tdata[\"count\"] = 0\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/notification\", methods=['GET'])\ndef notification_page():\n\thandle = request.cookies.get('handle')\n\tdata = {\"notifications\": []}\n\tif handle is not None:\n\t\ttry:\n\t\t\tdata[\"notifications\"] = notification.get(handle)\n\t\texcept:\n\t\t\tdata[\"notifications\"] = []\n\n\t\tuser = User(handle)\n\t\tuser.setAuth()\n\t\tuser_data = user.getInfo()\n\t\treturn render_template(\"notification.html\", handle = handle, full_name = user_data[\"full_name\"], email = user_data[\"email\"],\n\t\t\tnotifications = data[\"notifications\"])\n\telse:\n\t\treturn render_template(\"index.html\")\n\[email protected](\"/notification\", methods=['POST'])\ndef notification_get():\n\thandle = request.cookies.get('handle')\n\tdata = {\"notifications\": []}\n\tif handle is not None:\n\t\ttry:\n\t\t\tdata[\"notifications\"] = notification.get(handle)\n\t\texcept:\n\t\t\tdata[\"notifications\"] = []\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\n\[email protected](\"/like\", methods=['POST'])\ndef like():\n\thandle = request.cookies.get('handle')\n\ttweet_id = request.form[\"tweet_id\"]\n\tdata = {\"success\": \"You liked tweet!\"}\n\tif handle is not None:\n\t\ttry:\n\t\t\ttweet.likes(handle, tweet_id)\n\t\texcept:\n\t\t\ttry:\n\t\t\t\ttweet.unlikes(handle, tweet_id)\n\t\t\t\tdata = {\"success\": \"You unliked the tweet!\"}\n\t\t\texcept:\n\t\t\t\tdata = {\"error\": \"Couldn't unlike the tweet\"}\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\n\n\[email protected](\"/tweet/delete\", methods=['POST'])\ndef tweet_delete():\n\thandle = request.cookies.get('handle')\n\ttweet_id = request.form[\"tweet_id\"]\n\tdata = {\"success\": \"You delete tweet!\"}\n\tif handle is not None:\n\t\ttry:\n\t\t\tif tweet.isOwner(handle, tweet_id):\n\t\t\t\ttweet.delete(tweet_id)\n\t\texcept Exception as e:\n\t\t\tprint(str(e))\n\t\t\tdata = {\"error\": \"Couldn't unlike the tweet\"}\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/tweet/feed\", methods=['GET'])\ndef feed():\n\thandle = request.cookies.get('handle')\n\n\tlast = request.args.get('last')\n\tdata = {}\n\tif handle is not None:\n\t\tdata = tweet.get(handle, \"feed\", 10, last, handle)\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/tweet/search\", methods=['GET'])\ndef search():\n\thandle = request.cookies.get('handle')\n\n\tlast = request.args.get('last')\n\tsearch = request.args.get('q')\n\t\n\tprint(handle)\n\tprint(search)\n\n\tdata = {}\n\tif handle is not None:\n\t\tdata = tweet.get(search, \"search\", 10, last, handle)\n\tp = json.dumps(data)\n\tresp = Response(response=p,\n status=200,\n mimetype=\"text/plain\")\n\treturn resp\n\[email protected](\"/logout\", methods=['GET'])\ndef logout():\n\thandle = request.cookies.get('handle')\n\tp = \"\"\n\tif handle is not None:\n\t\tp = \"You logged out\"\n\theaders = {\n 'Location': '/'\n }\n\tresp = redirect(url_for(\"index\"))\n\tif handle is not None:\n\t\tresp.set_cookie(\"handle\", '', expires=0)\n\t\n\treturn resp\n\treturn redirect(url_for(\"index\"))\n\[email protected]('/user/<handle>', methods=['GET'])\ndef profile(handle):\n\tuser = User(handle)\n\tdata = {}\n\t\n\tcurrent_user = request.cookies.get('handle')\n\tlast = request.args.get('last')\n\n\tif user.getHandle() is not None:\n\n\t\tdata = tweet.get(handle, \"user\", 10, last, current_user)\n\t\n\t\tp = json.dumps(data)\n\n\t\tresp = Response(response=p,\n\t status=200,\n\t mimetype=\"text/plain\")\n\telse:\n\t\tresp = Response(response=\"User not found\",\n\t status=404,\n\t mimetype=\"text/plain\")\n\treturn resp\n\[email protected]('/settings', methods=['GET', 'POST'])\ndef settings():\n\t\n\tcurrent_user = request.cookies.get('handle')\n\tif current_user is not None:\n\t\tauthorised = True\n\telse:\n\t\tauthorised = False\n\t\treturn redirect(url_for(\"index\"))\n\t\n\tuser = User(current_user)\n\n\tif request.method == 'POST':\n\t\temail = str(request.form[\"email\"].encode('utf-8'))\n\t\tfull_name = str(request.form[\"full_name\"].encode('utf-8'))\n\t\tphone = str(request.form[\"phone\"].encode('utf-8'))\n\t\tphoto_url = str(request.form[\"photo_url\"].encode('utf-8'))\n\t\tuser.update([\"email\", \"full_name\", \"phone_number\", \"photo_url\"], \n\t\t\t[email, full_name, phone, photo_url])\n\t\n\tdata = user.getInfo()\n\tif user.getHandle() is not None:\n\n\t\tif data[\"photo_url\"] == \"\":\n\t\t\tdata[\"photo_url\"] = \"http://via.placeholder.com/200x150\"\n\n\t\tresp = render_template(\"settings.html\", data = data, handle = current_user)\n\telse:\n\t\tresp = Response(response=\"User not found\",\n\t status=404,\n\t mimetype=\"text/plain\")\n\treturn resp\n\[email protected]('/<handle>', methods=['GET'])\ndef profile_page(handle):\n\tuser = User(handle)\n\tdata = {}\n\t\n\tcurrent_user = request.cookies.get('handle')\n\tif current_user is not None:\n\t\tauthorised = True\n\telse:\n\t\tauthorised = False\n\tif user.getHandle() is not None:\n\n\t\tdata = user.getInfo()\n\n\t\tif data[\"photo_url\"] == \"\":\n\t\t\tdata[\"photo_url\"] = \"http://via.placeholder.com/200x150\"\n\n\t\tresp = render_template(\"user.html\", handle=data[\"handle\"], full_name=data[\"full_name\"], \n\t\t\tfollowers = data[\"followers\"], following = data[\"following\"], photo_url = data[\"photo_url\"], current_user = current_user,\n\t\t\tauthorised = authorised, followed = user.followedBy(current_user), current_user_page = current_user == data[\"handle\"])\n\telse:\n\t\tresp = Response(response=\"User not found\",\n\t status=404,\n\t mimetype=\"text/plain\")\n\treturn resp\n\nif __name__ == \"__main__\":\n app.run(threaded=True)"
},
{
"alpha_fraction": 0.6362799406051636,
"alphanum_fraction": 0.6464088559150696,
"avg_line_length": 24.279069900512695,
"blob_id": "82d3e2183002aaf9276062fa66f417524b5957f5",
"content_id": "2f873db9e0a855deaa45c8bd3c0ba71b7680d05b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1086,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 43,
"path": "/notification.py",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\nimport MySQLdb\nimport config\n\ndef get(handle):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\tcursor.execute(\"SELECT `id`, `content`, `url` FROM `notification` WHERE `handle`='{}' AND `read` = 0 LIMIT 10\".format(handle))\n\t\t\n\tresult = cursor.fetchall()\n\n\tdata = []\n\tnotifications = []\n\tfor row in result:\n\t\trow_data = {}\n\t\trow_data[\"content\"] = row[1]\n\t\trow_data[\"url\"] = row[2]\n\t\tdata.append(row_data)\n\t\tnotifications.append(str(row[0]))\n\tif len(notifications) > 0:\n\t\tcursor.execute(\"UPDATE `notification` SET `read` = 1 WHERE `id` IN({})\".format(\",\".join(notifications)))\n\t\tdb.commit()\n\n\tdb.close()\n\treturn data\ndef count(handle):\n\tdb = MySQLdb.connect(config.DB[\"host\"], config.DB[\"user\"], config.DB[\"password\"], config.DB[\"db\"])\n\n\tcursor = db.cursor()\n\n\tcursor.execute(\"SELECT COUNT(`id`) FROM `notification` WHERE `handle`='{}' AND `read` = 0 \".format(handle))\n\t\t\n\tresult = cursor.fetchone()[0]\n\n\tdb.close()\n\n\treturn result\nif __name__ == '__main__':\n\tprint(count(\"aidarbek1\"))\n\tpass"
},
{
"alpha_fraction": 0.804347813129425,
"alphanum_fraction": 0.804347813129425,
"avg_line_length": 45,
"blob_id": "d94918fd73ca173f6458ae4945a5c9aac9dfb167",
"content_id": "3d42fcda4dd43697fc48f1cc9770fb4e78a904a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 138,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 3,
"path": "/README.md",
"repo_name": "aidarbek/database_systems_project",
"src_encoding": "UTF-8",
"text": "# database_systems_project\n\nAddition of data and time evaluation to DB is in fill.py. Benchmarking of querying dataset is in benchmark.py\n"
}
] | 9 |
excamera/LambdaInfraAutoTools | https://github.com/excamera/LambdaInfraAutoTools | 1e47dce486b283fd5d403dc73bd269f8bfa4aeae | c752f97c1e8c041138cbf928e2b9f6237f6fb864 | bd2c15e632222f6c401dad86dadaa1a3cd4fbdbe | refs/heads/master | 2020-12-14T18:51:52.715897 | 2016-04-29T21:03:58 | 2016-04-29T21:03:58 | 56,997,318 | 0 | 1 | null | null | null | null | null | [
{
"alpha_fraction": 0.8064516186714172,
"alphanum_fraction": 0.8064516186714172,
"avg_line_length": 30,
"blob_id": "6562cfa902ae4a37170c76946d7e3223b268a3e8",
"content_id": "df60820ed864a174845926c24c907de5366ec607",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 31,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 1,
"path": "/examples/Makefile.am",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "# Makefile to compile examples\n"
},
{
"alpha_fraction": 0.7024952173233032,
"alphanum_fraction": 0.7024952173233032,
"avg_line_length": 23.809524536132812,
"blob_id": "0697b66510f959bad44bd7fce4e8eb275d20de01",
"content_id": "823add97f0ad25366ee89e9a3133fbb34d209630",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 521,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 21,
"path": "/src/Makefile.am",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "# Makefile to compile source code\n#\n# Add the sub-directories of src dir\n# in this option separated by a space\n#\n\nbin_PROGRAMS = helloworld\nhelloworlddir = src\nhelloworld_LDFLAGS = -static\nhelloworld_SOURCES = helloworld.c\nhelloworld_PROGRAMS = helloworld\nnoinst_LIBRARIES = libhelloworld.a\nlibhelloworld_a_SOURCES = helloworld.c\n\nbuild: helloworld\nlambdainstall: build\n\t./setuplambda.sh\nlambdaupdate: build\n\t./updatelambda.sh\ncheck: build\n\tpython ./test_invoke.py\n"
},
{
"alpha_fraction": 0.8615384697914124,
"alphanum_fraction": 0.8615384697914124,
"avg_line_length": 15.25,
"blob_id": "d4a0ca8f155c771d5b109bf171b4b642c2ad8aeb",
"content_id": "db680692341eecb2ee0db3e3383a1b2dfa3cda0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 65,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 4,
"path": "/clean.sh",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "make mostlyclean\nmake clean\nmake distclean\nmake maintainer-clean\n"
},
{
"alpha_fraction": 0.7253446578979492,
"alphanum_fraction": 0.7476140260696411,
"avg_line_length": 20.930233001708984,
"blob_id": "3f2791e50a72651ba8beb15a6b02fc46d6d934b4",
"content_id": "b2e73f2600ad8a3630ba4bdd5b45aed8f2c54a54",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "M4Sugar",
"length_bytes": 943,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 43,
"path": "/configure.ac",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "# Process this file with autoconf to to produce a configure script\n# \n# Layout referred from https://www.gnu.org/software/autoconf/manual/\n# autoconf-2.61/html_node/configure_002eac-Layout.html#configure_002eac-Layout\n#\n# Heavily inspired from Keith's repo : https://github.com/keithw/sourdough\n#\n\nAC_PREREQ([2.68])\nAC_INIT([Pipelines], [0.01], [[email protected]])\nAM_INIT_AUTOMAKE([foreign subdir-objects])\nAC_CONFIG_SRCDIR([src/helloworld.c])\nAC_CONFIG_HEADERS([config.h])\n\n# Add CXXFLAGS\nCXX11_FLAGS=\"-std=c++11 -pthread\"\nNEEDED_CXXFLAGS=\"-Wall\"\nAC_SUBST([CXX11_FLAGS])\nAC_SUBST([NEEDED_CXXFLAGS])\n\n# Checks for programs\nLT_INIT\nLT_LANG([C++])\nAC_PROG_CXX\nAC_PROG_CC\nAC_PROG_RANLIB\n\n# Checks for libraries\n\n# Checks for header files\n\n# Checks for types\n\n# Checks for structures\n\n# Checks for compiler characteristics\n\n# Checks for library functions\n\n# Checks for system services\n\nAC_CONFIG_FILES([Makefile src/Makefile examples/Makefile])\nAC_OUTPUT\n"
},
{
"alpha_fraction": 0.7142857313156128,
"alphanum_fraction": 0.7142857313156128,
"avg_line_length": 20,
"blob_id": "6ac7d4e7fa6f17c733dd5526e3e3bf6dd41cc191",
"content_id": "77d85879f64a90a34929fa707d3cee8c13d40a41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 21,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 1,
"path": "/script/Makefile.am",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "bin_SCRIPTS = run.sh\n"
},
{
"alpha_fraction": 0.5479854941368103,
"alphanum_fraction": 0.57718425989151,
"avg_line_length": 46.494625091552734,
"blob_id": "20c53949d0d70f8719000c7af17ff44a4889a2f6",
"content_id": "ad290ca084231d038f4a7266afb5538e6813933b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4418,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 93,
"path": "/SampleOutput.md",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "rbhalera@vm132:~$ source ~/.bashrc\nrbhalera@vm132:~$ cd Pipelines/\nrbhalera@vm132:~/Pipelines$ make clean\nMaking clean in src\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\n rm -f helloworld\n rm -f helloworld\nrm -rf .libs _libs\ntest -z \"libhelloworld.a\" || rm -f libhelloworld.a\nrm -f *.o\nrm -f *.lo\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nMaking clean in examples\nmake[1]: Entering directory `/home/rbhalera/Pipelines/examples'\nrm -rf .libs _libs\nrm -f *.lo\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/examples'\nmake[1]: Entering directory `/home/rbhalera/Pipelines'\nrm -rf .libs _libs\nrm -f *.lo\nmake[1]: Leaving directory `/home/rbhalera/Pipelines'\nrbhalera@vm132:~/Pipelines$ make build\ncd src && make build\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\ndepbase=`echo helloworld.o | sed 's|[^/]*$|.deps/&|;s|\\.o$||'`;\\\n\tgcc -DHAVE_CONFIG_H -I. -I.. -g -O2 -MT helloworld.o -MD -MP -MF $depbase.Tpo -c -o helloworld.o helloworld.c &&\\\n\tmv -f $depbase.Tpo $depbase.Po\n/bin/bash ../libtool --tag=CC --mode=link gcc -g -O2 -static -o helloworld helloworld.o \nlibtool: link: gcc -g -O2 -o helloworld helloworld.o \nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nrbhalera@vm132:~/Pipelines$ make lambdainstall\ncd src && make lambdainstall\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\n./setuplambda.sh\nupdating: helloworld (deflated 68%)\nupdating: handler.py (deflated 33%)\n\nA client error (ResourceConflictException) occurred when calling the CreateFunction operation: Function already exist: Helloworldv1\nmake[1]: *** [lambdainstall] Error 255\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nmake: *** [lambdainstall] Error 2\nrbhalera@vm132:~/Pipelines$ make lambdaupdate\ncd src && make lambdaupdate\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\n./updatelambda.sh\nupdating: helloworld (deflated 68%)\nupdating: handler.py (deflated 33%)\n-----------------------------------\n| CreateBucket |\n+-----------+---------------------+\n| Location | /lambdazipsbucket |\n+-----------+---------------------+\nupload: ./helloworld.zip to s3://lambdazipsbucket/helloworld.zip\n---------------------------------------------------------------------------------\n| UpdateFunctionCode |\n+--------------+----------------------------------------------------------------+\n| CodeSha256 | JXsiMoYpVuqdqrgCi4Kq1YS9+jKcm5+p1SutfgeRUig= |\n| CodeSize | 4101 |\n| Description | |\n| FunctionArn | arn:aws:lambda:eu-west-1:751324341733:function:Helloworldv1 |\n| FunctionName| Helloworldv1 |\n| Handler | handler.handler |\n| LastModified| 2016-04-28T08:32:39.967+0000 |\n| MemorySize | 128 |\n| Role | arn:aws:iam::751324341733:role/lambda |\n| Runtime | python2.7 |\n| Timeout | 3 |\n| Version | $LATEST |\n+--------------+----------------------------------------------------------------+\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nrbhalera@vm132:~/Pipelines$ make check\nMaking check in src\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\nrm -f libhelloworld.a\nar cru libhelloworld.a helloworld.o \nranlib libhelloworld.a\npython ./test_invoke.py\nOutput : Hello World\nInvocation Success\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nMaking check in examples\nmake[1]: Entering directory `/home/rbhalera/Pipelines/examples'\nmake[1]: Nothing to be done for `check'.\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/examples'\nmake[1]: Entering directory `/home/rbhalera/Pipelines'\nmake[1]: Leaving directory `/home/rbhalera/Pipelines'\ncd src && make check\nmake[1]: Entering directory `/home/rbhalera/Pipelines/src'\npython ./test_invoke.py\nOutput : Hello World\nInvocation Success\nmake[1]: Leaving directory `/home/rbhalera/Pipelines/src'\nrbhalera@vm132:~/Pipelines$ \n"
},
{
"alpha_fraction": 0.7451612949371338,
"alphanum_fraction": 0.7612903118133545,
"avg_line_length": 50.66666793823242,
"blob_id": "61b97baf3d63587531542f29e05c85e90b2a3dab",
"content_id": "5ae234d9b738b5c7aac4baa86fb047677463bf08",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 310,
"license_type": "no_license",
"max_line_length": 154,
"num_lines": 6,
"path": "/src/setuplambda.sh",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nzip helloworld.zip helloworld handler.py\n# aws s3api create-bucket --bucket lambdazipsbucket\n# aws s3 cp helloworld.zip s3://lambdazipsbucket\naws lambda create-function --function-name ${LAMBDA_NAME} --runtime python2.7 --role ${ROLE} --handler handler.handler --zip-file fileb://./helloworld.zip\n"
},
{
"alpha_fraction": 0.6320754885673523,
"alphanum_fraction": 0.6320754885673523,
"avg_line_length": 25.5,
"blob_id": "a8cca795e5e8fed54b17cfe1f74b22757bf6956a",
"content_id": "89fbc2187876e503e52caa2057795713ade42bcb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 212,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 8,
"path": "/src/handler.py",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "import subprocess as sp\n\ndef handler(event, context):\n try:\n\t output = sp.check_output(\"./helloworld\")\n except sp.CalledProcessError as e:\n output = e.output\n\treturn {'output':output}\n"
},
{
"alpha_fraction": 0.7193877696990967,
"alphanum_fraction": 0.7193877696990967,
"avg_line_length": 18.600000381469727,
"blob_id": "a78e850f7624d902e0f63e35685b9839ae67a4da",
"content_id": "4f7fcbab366fcd1e6b8ad68bc32b547f65c38776",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 784,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 40,
"path": "/README.md",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "# Pipelines\nPipeline support for excamera\n\nDependencies:\n\n\tautomake, autoconf & awscli\n\t$ (Linux) : sudo apt-get install autoconf\n\t$ pip install awscli\n\t$ (OS X) : brew install autoconf && brew install automake\n\nEnviroment Variables:\n\nYou have to set the following variables\n\n\t\"AWS_ACCESS_KEY_ID\", \n\t\"AWS_SECRET_ACCESS_KEY\",\n\t\"ROLE\" - This is the ARN of the role that the lambda will assume when running.\n\t\"LAMBDA_NAME\" - The name of the lambda\n\nYou need to have a adminuser profile in ~/.aws/credentials\n\nTo build:\n\n\t$ ./autogen.sh\n\t$ ./configure\n\t$ make clean\n\t$ make build\n\t$ make lambdainstall\n\t$ make check\n\nTo update:\n\t\n\t$ make build\n\t$ make updateinstall\n\t$ make check\n\nTo clean tmp files before commiting to git:\n\n\t$ ./clean.sh\nFile \"SampleOutput.md\" has a sample execution\n"
},
{
"alpha_fraction": 0.6877192854881287,
"alphanum_fraction": 0.6877192854881287,
"avg_line_length": 20.923076629638672,
"blob_id": "51e8625bf23bde9dbac7dffa6e4b9c3723e03c86",
"content_id": "8d75a8e26427d18e92e6b325f6986cfc5e7484d5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 285,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 13,
"path": "/Makefile.am",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "SUBDIRS = src examples\n\nbuild: config.h\n\tcd src && $(MAKE) $(AM_MAKEFLAGS) build\n\nlambdainstall: config.h\n\tcd src && $(MAKE) $(AM_MAKEFLAGS) lambdainstall\n\nlambdaupdate: config.h\n\tcd src && $(MAKE) $(AM_MAKEFLAGS) lambdaupdate\n\ncheck: config.h\n\tcd src && $(MAKE) $(AM_MAKEFLAGS) check\n"
},
{
"alpha_fraction": 0.7423076629638672,
"alphanum_fraction": 0.7538461685180664,
"avg_line_length": 41,
"blob_id": "25e063cd2078ad2c1c1841b2f31edb6c5822cd44",
"content_id": "8c492435de67fa1ba668ceb9f21c720a3fee685b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 260,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 6,
"path": "/src/updatelambda.sh",
"repo_name": "excamera/LambdaInfraAutoTools",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nzip helloworld.zip helloworld handler.py\n#aws s3api create-bucket --bucket lambdazipsbucket\n#aws s3 cp helloworld.zip s3://lambdazipsbucket\naws lambda update-function-code --function-name ${LAMBDA_NAME} --zip-file fileb://./helloworld.zip\n\n\n\n\n\n\n\n\n"
}
] | 11 |
lsx732005068/Test | https://github.com/lsx732005068/Test | bc115f964ad8ca763c39d055c2354d742274a59e | 1fdf0be13ff886383797905870ab7ed2beb476a5 | e444df4ac53636ca710611da3e289d636ef02d5f | refs/heads/master | 2021-05-24T09:42:31.882415 | 2020-08-28T06:43:40 | 2020-08-28T06:43:40 | 253,501,849 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.592792809009552,
"alphanum_fraction": 0.6216216087341309,
"avg_line_length": 28.83333396911621,
"blob_id": "64dd621d806e1dde937963914e286a56b0c009d2",
"content_id": "fa8118e796a2d195a8cdc644c6d8d6b549fa67aa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 573,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 18,
"path": "/web自动化/loginTest/models/function.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "import os\r\nimport time\r\n\r\nfrom selenium import webdriver\r\n\r\n# 封装截图的操作方法\r\ndef insert_img(driver, file_name):\r\n base_dir = os.path.dirname(os.path.dirname(__file__))\r\n base_dir = str(base_dir).replace(\"\\\\\", \"/\")\r\n currentTime = time.strftime(\"%Y-%m-%d %H-%M-%S\")\r\n filePath = base_dir + \"/result/img/\" + currentTime + \"_\" + file_name\r\n driver.get_screenshot_as_file(filePath)\r\n\r\nif __name__ == '__main__':\r\n\t\tdriver = webdriver.Firefox()\r\n\t\tdriver.get(\"http://192.168.105.117:8090/login\")\r\n\t\tinsert_img(driver, 'login.png')\r\n\t\tdriver.quit()\r\n"
},
{
"alpha_fraction": 0.500587522983551,
"alphanum_fraction": 0.5099882483482361,
"avg_line_length": 26.779661178588867,
"blob_id": "2c3f8c360461376f118fd84e6ac363a6ea449fdb",
"content_id": "dfa18e5608f59f2eb3e9c8e4dca278219c098511",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1912,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 59,
"path": "/面试准备/读取excel数据导入数据库脚本.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "import json\r\n\r\nimport openpyxl as openpyxl\r\nimport pymssql\r\nimport pymysql\r\nimport pandas\r\nimport xlrd\r\n\r\n\r\nclass importData:\r\n # 连接数据库\r\n def connsql(self, dateList):\r\n # 本地数据库\r\n connect = pymysql.connect(\r\n host='localhost',\r\n user='root',\r\n passwd='123456',\r\n db='study',\r\n port=3306)\r\n if connect:\r\n print('连接成功')\r\n # 创建一个游标对象,python里的sql都要通过cursor来执行\r\n cursor = connect.cursor()\r\n # 执行sql语句\r\n sql = \"insert into student(Sno, Sname) values (%s, %s)\"\r\n try:\r\n cursor.executemany(sql, dateList) # sql执行\r\n connect.commit() # 提交到数据库\r\n except Exception as e: # 获取报错信息\r\n print(e)\r\n cursor.close() # 关闭游标\r\n connect.close() # 关闭连接\r\n\r\n def readExcel(self, path):\r\n book = xlrd.open_workbook(path)\r\n sheet1 = book.sheets()[0]\r\n report_name = sheet1.row_values(0) # 获取报表名称行数据\r\n row_num = sheet1.nrows # 获取总行数\r\n report_name = sheet1.ncols # 获取总列数\r\n\r\n list = []\r\n # 循环每一行数据\r\n for i in range(1, row_num):\r\n row = sheet1.row_values(i) # 获取行数据\r\n number = \"\".join(row[0].split()) #序号\r\n # print(\"\".join(row[0].split()))\r\n name = \"\".join(row[1]) #姓名\r\n # print(\"\".join(row[1].split()))\r\n myTuple = (number, name)\r\n list.append(myTuple)\r\n print(list)\r\n self.connsql(list)\r\n\r\nif __name__ == '__main__':\r\n path = \"D:/SVN/未来招投标平台/test/T_OpenBidInfo.xlsx\"\r\n importData = importData()\r\n conn = importData.readExcel(path)\r\n # 假数据测试\r\n # importData.connsql()\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6443965435028076,
"alphanum_fraction": 0.6443965435028076,
"avg_line_length": 33.53845977783203,
"blob_id": "aa0072e51bd313ab3be2dc7a21687450e7469de9",
"content_id": "601431a1397007afeec8882832834c56f5114155",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 472,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 13,
"path": "/web自动化/loginTest/testCase/executeTestCase.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "import os\r\nimport unittest\r\n\r\nfrom HTMLTestRunner import HTMLTestRunner\r\n\r\nif __name__ == '__main__':\r\n fileName = 'report.html'\r\n test_dir = os.path.dirname(__file__)\r\n file_Path = os.path.dirname(os.path.dirname(__file__)) + \"/result/report/\" + fileName\r\n print(file_Path)\r\n discover = unittest.defaultTestLoader.discover(test_dir, '*test.py')\r\n with open(file_Path, 'wb') as f:\r\n HTMLTestRunner(stream=f, title='测试报告').run(discover)\r\n\r\n"
},
{
"alpha_fraction": 0.5864834189414978,
"alphanum_fraction": 0.6231386065483093,
"avg_line_length": 27.965517044067383,
"blob_id": "ebc10371507d9dccceefe645b3b5e7c843828100",
"content_id": "6123a29d1502b522045196d8bf186ca12563094e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 929,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 29,
"path": "/web自动化/loginTest/testCase/loginPsd_test.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "import unittest\r\nfrom time import sleep\r\n\r\nfrom models import myunit, function\r\nfrom page_obj.loginPsw_Page import login\r\n\r\n\r\nclass loginTest(myunit.MyTest):\r\n # 测试用户登录\r\n def user_login_verify(self, username=\"\", password=\"\"):\r\n login(self.driver).user_login(username, password)\r\n\r\n # 用户名密码正确\r\n def test_login1(self):\r\n self.user_login_verify('18351952332', '1996xh729')\r\n sleep(3)\r\n po = login(self.driver)\r\n self.assertEqual(po.login_user_success(), u'18351952332')\r\n function.insert_img(self.driver, \"user_pwd_true.png\")\r\n\r\n # 用户名密码为空登录\r\n def test_login2(self):\r\n self.user_login_verify()\r\n po = login(self.driver)\r\n self.assertEqual(po.login_errot_hint(), u'请输入手机号!')\r\n function.insert_img(self.driver, \"user_pawd_empty.png\")\r\n\r\nif __name__ == '__main__':\r\n unittest.main()\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5679405331611633,
"alphanum_fraction": 0.5796178579330444,
"avg_line_length": 33.50943374633789,
"blob_id": "5e67367dd3eaac15dd0d90128a0fde0a2a870469",
"content_id": "e15a59a4493fb4a88ac6a732ecd271f868f780d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2068,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 53,
"path": "/web自动化/loginTest/page_obj/base.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "\r\n# 页面基础类,用于所有页面的继承\r\nfrom selenium.webdriver.support.wait import WebDriverWait\r\nfrom selenium.webdriver.support import expected_conditions as EC\r\n\r\nclass Page(object):\r\n base_url = \"http://192.168.105.117:8090\"\r\n def __init__(self, selenium_driver, base_url = base_url, parent = None):\r\n self.base_url = base_url\r\n self.driver = selenium_driver\r\n self.timeout = 30\r\n self.parent = parent\r\n\r\n def _open(self, url):\r\n url = self.base_url + url\r\n self.driver.get(url)\r\n assert self.on_page(), 'Did not land on %s' % url\r\n\r\n def open(self):\r\n self._open(self.url)\r\n\r\n def on_page(self):\r\n return self.driver.current_url == (self.base_url + self.url)\r\n\r\n def find_element(self, *loc):\r\n try:\r\n # 确保所有元素是可见的\r\n # 注意:以下入参为元祖的元素,python存在这种特性,就是将入参放在元组里。\r\n # WebDriverWait(self.driver,10).until(lambda driver: driver.find_element(*loc).is_displayed())\r\n # 注意:以下入参本身是元组,不需要加*\r\n WebDriverWait(self.driver, 10).until(EC.visibility_of_element_located(loc))\r\n return self.driver.find_element(*loc)\r\n except:\r\n print(\"%s页面中未能找到%s元素\" %(self,loc))\r\n\r\n def find_elements(self, *loc):\r\n return self.driver.find_elements(*loc)\r\n\r\n def script(self, src):\r\n return self.driver.execute_script(src)\r\n\r\n def switch_frame(self, loc):\r\n return self.driver.swith_to_frame(loc)\r\n\r\n def send_keys(self, loc, value, clear_first=True, click_first=True):\r\n try:\r\n loc = getattr(self, \"_%s\" % loc)\r\n if click_first:\r\n self.find_element(*loc).click()\r\n if clear_first:\r\n self.find_element(*loc).clear()\r\n self.find_element(*loc).send_keys(value)\r\n except ArithmeticError:\r\n print(u\"%s 页面中未能找到 %s 元素\" % (self, loc))\r\n"
},
{
"alpha_fraction": 0.49448123574256897,
"alphanum_fraction": 0.5165562629699707,
"avg_line_length": 19.571428298950195,
"blob_id": "ed5d4fed8578b453fddcbedc5cdd53032dc3c8b4",
"content_id": "4768ef285a79322549ca7aed406c1f1542304452",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 471,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 21,
"path": "/web自动化/loginTest/practise/test.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "import time\r\n\r\n\r\ndef foo(param1, *param2):\r\n print(param1)\r\n print(param2)\r\n\r\ndef foo2(bar, lee):\r\n print(bar, lee)\r\n print(\"%s页面中未能找到%s 元素\" % (\"self\", \"loc\"))\r\n\r\n currentTime = time.strftime(\"%Y-%m-%d %H:%M:%S\")\r\n filePath = \"/result/img/\" + currentTime + \"_\" + \"login.png\"\r\n filePath = \"/result/img/\" + currentTime + filePath\r\n print(filePath)\r\n\r\n\r\nif __name__ == '__main__':\r\n foo(1, 2)\r\n a = [1,2]\r\n foo2(*a)\r\n"
},
{
"alpha_fraction": 0.6522655487060547,
"alphanum_fraction": 0.6975764036178589,
"avg_line_length": 33,
"blob_id": "95b29e2d08688850797f901c386dbf017f349379",
"content_id": "c9a4a9cd44555cf5fe7486074a2739c5ca2f88ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 949,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 27,
"path": "/web自动化/loginTest/practise/test02.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "from time import sleep\r\n\r\nfrom selenium import webdriver\r\nfrom selenium.webdriver import ActionChains\r\n\r\ndriver = webdriver.Firefox()\r\ndriver.get('http://192.168.105.117:8090/login')\r\nsleep(3)\r\nurl = driver.current_url\r\nprint(url)\r\n\r\n# driver.implicitly_wait(10)\r\n# driver.maximize_window()\r\n# driver.find_element_by_css_selector('.BlackColor___1F88e').click()\r\n# driver.find_element_by_css_selector('#normal_login_accountNo').send_keys('18351952332')\r\n# driver.find_element_by_css_selector('#normal_login_password').send_keys('1996xh729')\r\n#\r\n# driver.find_element_by_css_selector('button[class^=\"ant-btn\"]').click()\r\n# sleep(3)\r\n# name = driver.find_element_by_xpath('//*[@id=\"root\"]/section/header/div[3]/div[2]/div/span[2]')\r\n# ActionChains(driver).move_to_element(name).perform()\r\n# sleep(5)\r\n# phone = driver.find_element_by_xpath('/html/body/div[2]/div/div/div/div[2]/div/div[2]/div[2]/div[2]').text\r\n#\r\n# print(phone)\r\n\r\ndriver.quit()\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6113397479057312,
"alphanum_fraction": 0.6232281923294067,
"avg_line_length": 31.58461570739746,
"blob_id": "757ce8b55b7d2994b202279f404d604b800d4673",
"content_id": "e5ce685c3ecec4f39d8d488ab49f4327f85d7d43",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2491,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 65,
"path": "/web自动化/loginTest/page_obj/loginPsw_Page.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "# 创建登录页面对象,对用户登录页面上的用户名/密码输入框、登录按钮和\r\n# 提示信息等元素的定位进行封装。\r\nfrom time import sleep\r\nfrom selenium.webdriver import ActionChains\r\nfrom selenium.webdriver.common.by import By\r\nfrom page_obj.base import Page\r\n\r\n\r\nclass login(Page):\r\n # 用户登录界面\r\n url = '/login'\r\n # 定位用户名\r\n login_username_loc = (By.ID, 'normal_login_accountNo')\r\n # 定位密码框\r\n login_password_loc = (By.ID, 'normal_login_password')\r\n # 登录按钮的定位\r\n login_button_loc = (By.CSS_SELECTOR, 'button[class^=\"ant-btn\"]')\r\n # 登录报错信息的定位\r\n login_error_loc = (By.XPATH, '//*[@id=\"normal_login\"]/div[1]/div/div[2]/div')\r\n # 登录成功用户名信息\r\n login_name_success_loc = (By.XPATH, '//*[@id=\"root\"]/section/header/div[3]/div[2]/div/span[2]')\r\n\r\n # 登录成功手机号\r\n login_phone_success_loc = (By.XPATH, '/html/body/div[2]/div/div/div/div[2]/div/div[2]/div[2]/div[2]')\r\n\r\n # 选择密码登录\r\n def login_ByPsd(self):\r\n self.driver.find_element_by_css_selector('.BlackColor___1F88e').click()\r\n\r\n # 用户名输入\r\n def login_username(self, username):\r\n self.find_element(*self.login_username_loc).clear()\r\n self.find_element(*self.login_username_loc).send_keys(username)\r\n\r\n # 密码输入\r\n def login_password(self, password):\r\n self.find_element(*self.login_password_loc).clear()\r\n self.find_element(*self.login_password_loc).send_keys(password)\r\n\r\n # 点击登录按钮\r\n def login_button(self):\r\n self.find_element(*self.login_button_loc).click()\r\n\r\n # 统一密码登录入口\r\n def user_login(self, username='18351952332', password='1996xh729'):\r\n self.open()\r\n self.login_ByPsd()\r\n self.login_username(username)\r\n self.login_password(password)\r\n self.login_button()\r\n sleep(3)\r\n\r\n # 登录错误提示信息\r\n def login_errot_hint(self):\r\n return self.find_element(*self.login_error_loc).text\r\n\r\n # 登录成功提示信息\r\n def login_user_success(self):\r\n # 鼠标悬停在用户名上\r\n name = self.find_element(*self.login_name_success_loc)\r\n ActionChains(self.driver).move_to_element(name).perform()\r\n sleep(2)\r\n username = self.find_element(*self.login_phone_success_loc).text\r\n username = username.strip('账号:')\r\n return username\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5043859481811523,
"alphanum_fraction": 0.5745614171028137,
"avg_line_length": 18.545454025268555,
"blob_id": "1328fd22303dada1d99e24d38f5c3528e645a26a",
"content_id": "c31ee0b70d8209fdb67d5694595a8a8e6cd95d9b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 238,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 11,
"path": "/web自动化/loginTest/models/driver.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "from selenium import webdriver\r\n\r\n# 打开浏览器\r\ndef browser():\r\n driver = webdriver.Firefox()\r\n return driver\r\n\r\nif __name__ == '__main__':\r\n dr = browser()\r\n dr.get('http://192.168.105.117:8090/login')\r\n dr.quit()\r\n\r\n"
},
{
"alpha_fraction": 0.6477987170219421,
"alphanum_fraction": 0.6838193535804749,
"avg_line_length": 35.212764739990234,
"blob_id": "669162ca240acef31008836c7e4c63edb9fb9875",
"content_id": "589a5ab8f9faa57c78cc1f63019f2f01de9c6bf4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1797,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 47,
"path": "/面试准备/getData.py",
"repo_name": "lsx732005068/Test",
"src_encoding": "UTF-8",
"text": "# coding = utf-8\r\nfrom selenium import webdriver\r\nfrom selenium.common.exceptions import NoSuchElementException\r\nimport time\r\nimport xlwt\r\n\r\n\r\ndriver = webdriver.Ie()\r\ndriver.implicitly_wait(15)\r\ndriver.get(\"http://221.226.86.137/nanjingjianshe/JSGCZtbMis/login.aspx\");\r\ndriver.maximize_window()\r\ndriver.find_element_by_id(\"txtUserName\").send_keys(\"\")\r\ndriver.find_element_by_id(\"txtPwd\").send_keys(\"future123\")\r\ndriver.find_element_by_id(\"Imagebutton1\").click()\r\ndriver.get(\"http://221.226.86.137/nanjingjianshe/ZHManageMis/Pages/Jsgc_ChaXun/TouBiaoFile.aspx\");\r\ndriver.find_element_by_id(\"ctl00_ContentPlaceHolder1_btnSearch\").click()\r\ntime.sleep(3)\r\ndriver.find_element_by_id(\"ctl00_ContentPlaceHolder1_DanWeiName\").send_keys(\"中皓\")\r\ndriver.find_element_by_id(\"ctl00_ContentPlaceHolder1_btnOK\").click()\r\ntime.sleep(15)\r\n\r\n# 创建工作簿\r\nwbk = xlwt.Workbook(encoding='utf=8', style_compression=0)\r\n# 创建工作表\r\nsheet = wbk.add_sheet('tbData', cell_overwrite_ok=True)\r\n\r\ncount = 0\r\nwhile count < 2:\r\n table_tr_list = driver.find_element_by_xpath(\"//*[@id=\\\"ctl00_ContentPlaceHolder1_Datagrid1\\\"]/tbody\").\\\r\n find_elements_by_tag_name(\"tr\")\r\n for r, tr in enumerate(table_tr_list):\r\n table_td_list = tr.find_elements_by_tag_name('td')\r\n for c, td in enumerate(table_td_list):\r\n sheet.write(r+(count*21), c, td.text)\r\n if count == 0:\r\n nextPage = driver.find_element_by_id(\"ctl00_ContentPlaceHolder1_Pager\").\\\r\n find_element_by_css_selector(\"img[src=\\\"http://221.226.86.137/nanjingjianshe/images/page/nextn.gif\\\"]\")\r\n nextPage.click()\r\n count += 1\r\n time.sleep(8)\r\n\r\n# 保存该文件,文件必须存在\r\nwbk.save(r'E:\\testExcel\\toubiaoData.xls')\r\ndriver.quit()\r\n\r\n# if __name__ == '__main__':\r\n# getData()\r\n"
}
] | 10 |
brunolorente/ogc-route-client | https://github.com/brunolorente/ogc-route-client | aebf3e84127fb64573689420de725575399acbb9 | e1b3fe764c88f3af72345974a5c1b00e29e99f71 | 946be8d7590832ea2d38765ec033a8d79eaeb5fc | refs/heads/master | 2023-08-28T19:57:12.421067 | 2021-10-27T05:03:45 | 2021-10-27T05:03:45 | 416,238,169 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6175742745399475,
"alphanum_fraction": 0.6175742745399475,
"avg_line_length": 25.09677505493164,
"blob_id": "c236922e1def4eeb44f5647b45e382d47fe52fb2",
"content_id": "4ba28877523045a78ca5685e278f1452c5ae66fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 808,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 31,
"path": "/helpers.py",
"repo_name": "brunolorente/ogc-route-client",
"src_encoding": "UTF-8",
"text": "import requests\nfrom pprint import pprint\n\n'''\n HELPERS\n'''\ndef get_api_name(landing_page):\n try:\n api_response = requests.get(url = landing_page, params = {'f':'json'})\n json_api_response = api_response.json()\n except requests.ConnectionError as exception:\n return False\n\n return json_api_response[\"title\"]\n\ndef get_routes(landing_page):\n routes = []\n url = landing_page+'/routes'\n\n try:\n api_response = requests.get(url = url)\n json_api_response = api_response.json()\n except requests.ConnectionError as exception:\n return False\n \n for route in json_api_response[\"links\"]:\n if route[\"rel\"] == \"item\":\n element = dict(href=route[\"href\"], title=route[\"title\"])\n routes.append(element)\n\n return routes"
},
{
"alpha_fraction": 0.6510600447654724,
"alphanum_fraction": 0.6528268456459045,
"avg_line_length": 32.574256896972656,
"blob_id": "7f1287b195a0408fea279170a711c0e28b4483d0",
"content_id": "6541e12506155a2a6f98599afec2fe2012750fb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3396,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 101,
"path": "/app.py",
"repo_name": "brunolorente/ogc-route-client",
"src_encoding": "UTF-8",
"text": "import json\nimport requests\nfrom pprint import pp, pprint\nfrom helpers import get_routes, get_api_name\n\nfrom flask import Flask, render_template, request, url_for\napp = Flask(__name__)\n\nAPI_BASE_URL = 'https://dp21.skymantics.com/rimac'\nAPI_NAME = get_api_name(API_BASE_URL)\nDEFAULT_ZOOM = 12\nDEFAULT_CENTER = [0,0]\nTILESERVER_URL = 'https://tile.openstreetmap.org/{z}/{x}/{y}.png'\n\[email protected]('/')\ndef index():\n ROUTES = get_routes(API_BASE_URL)\n if not request.root_url:\n # this assumes that the 'index' view function handles the path '/'\n request.root_url = url_for('index', _external=True)\n return render_template(\n 'index.html', \n tileserver=TILESERVER_URL,\n routes=ROUTES,\n name=API_NAME,\n zoom=DEFAULT_ZOOM,\n center=DEFAULT_CENTER\n )\n\[email protected]('/route', defaults={\n\n})\ndef get_route():\n if (request.args.get('waypoints') != '' or request.args.get('waypoints') != None):\n waypoints_from_request = request.args.get('waypoints')\n else:\n waypoints_from_request = None\n\n if (request.args.get('route_name') != '' or request.args.get('route_name') != None):\n route_name_from_request = request.args.get('route_name')\n else:\n route_name_from_request = None\n\n # Get the waypoints from the request\n waypoints = waypoints_from_request\n # Get the route name from the request\n route_name = route_name_from_request\n\n # Set the API resource url\n URL = API_BASE_URL+\"/routes\"\n params = {\n 'waypoints':json.loads(waypoints),\n 'name': route_name,\n }\n \n # Optional params \n if (request.args.get('max_height') != '' or request.args.get('max_height') != None):\n max_height_from_request = request.args.get('max_height')\n params['maxHeight'] = max_height_from_request\n\n if (request.args.get('max_width') != '' or request.args.get('max_width') != None):\n max_width_from_request = request.args.get('max_width')\n params['maxWeight'] = max_width_from_request\n\n if (request.args.get('preference') != '' or request.args.get('preference') != None):\n preference_from_request = request.args.get('preference')\n params['preference'] = preference_from_request\n\n # sending get request and saving the response as response object\n api_response = requests.post(url = URL, json = params)\n # extracting data in json format\n json_api_response = api_response.json()\n # Get features \n json_fearures_list = json_api_response[\"features\"]\n # Parsing to string\n features_list = json.dumps(json_fearures_list)\n # Returning string\n return features_list\n\[email protected]('/all')\ndef all_routes():\n json_routes_list = get_routes(API_BASE_URL)\n # Parsing to string\n routes_list = json.dumps(json_routes_list)\n # Returning string\n return routes_list\n\[email protected]('/route/named')\ndef named_route():\n route_id = request.args.get('route_link')\n target_url = API_BASE_URL+'/routes/'+route_id\n # sending get request and saving the response as response object\n api_response = requests.get(url = target_url)\n # extracting data in json format\n json_api_response = api_response.json()\n # Get features \n json_fearures_list = json_api_response[\"features\"]\n # Parsing to string\n features_list = json.dumps(json_fearures_list)\n # Returning string\n return features_list\n \n"
}
] | 2 |
petrblaho/cfme_tests | https://github.com/petrblaho/cfme_tests | f676e310e80c151730c1dc41f0cb64f5fb2ae288 | a8d2034b697a5e067f5aab45fe309f6317f880e8 | 2bb9d75f1eb8728caadad579c473ce25d9839cb4 | refs/heads/master | 2020-12-28T23:24:38.002141 | 2015-05-04T10:37:25 | 2015-05-04T10:37:25 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.6864343881607056,
"alphanum_fraction": 0.6923646926879883,
"avg_line_length": 27.70212745666504,
"blob_id": "98c2f25db67b7f8dc7e5eebcef3f07423b93dcb7",
"content_id": "4a15600b5f74ffad8f59f685d3e7ec2f61454825",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1349,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 47,
"path": "/cfme/tests/cloud/test_tag_cloud.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.web_ui import Quadicon, mixins\nfrom cfme.configure.configuration import Category, Tag\nfrom utils.providers import setup_a_provider\nfrom utils.randomness import generate_lowercase_random_string, generate_random_string\n\n\[email protected](scope=\"module\")\ndef setup_first_cloud_provider():\n setup_a_provider(prov_class=\"cloud\", validate=True, check_existing=True)\n\n\[email protected]_fixture(scope=\"module\")\ndef category():\n cg = Category(name=generate_lowercase_random_string(size=8),\n description=generate_random_string(size=32),\n display_name=generate_random_string(size=32))\n cg.create()\n yield cg\n cg.delete()\n\n\[email protected]_fixture(scope=\"module\")\ndef tag(category):\n tag = Tag(name=generate_lowercase_random_string(size=8),\n display_name=generate_random_string(size=32),\n category=category)\n tag.create()\n yield tag\n tag.delete()\n\n\ndef test_tag_provider(setup_first_cloud_provider, tag):\n \"\"\"Add a tag to a provider\n \"\"\"\n pytest.sel.force_navigate('clouds_providers')\n Quadicon.select_first_quad()\n mixins.add_tag(tag)\n\n\ndef test_tag_vm(setup_first_cloud_provider, tag):\n \"\"\"Add a tag to a vm\n \"\"\"\n pytest.sel.force_navigate('clouds_instances_by_provider')\n Quadicon.select_first_quad()\n mixins.add_tag(tag)\n"
},
{
"alpha_fraction": 0.7692307829856873,
"alphanum_fraction": 0.7692307829856873,
"avg_line_length": 29.66666603088379,
"blob_id": "00e222bdfef0d6f0e7575f3d9fbdbbc5e4935ed7",
"content_id": "ab1286214dfc5418b20bc553a8239665e4f3e9dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Ruby",
"length_bytes": 91,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 3,
"path": "/scripts/data/coverage/Gemfile.dev.rb",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# goes into the rails root, next to the other Gemfiles\ngem \"simplecov\"\ngem \"simplecov-rcov\""
},
{
"alpha_fraction": 0.6908777952194214,
"alphanum_fraction": 0.6943201422691345,
"avg_line_length": 39.91549301147461,
"blob_id": "a032f7e31130223daf3574fc84c1d890ce024d68",
"content_id": "ae03b19038ea5046d8324158582bb67568327027",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8715,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 213,
"path": "/cfme/tests/control/test_compliance.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport re\n\nimport diaper\nimport pytest\n\nfrom cfme.configure.configuration import VMAnalysisProfile\nfrom cfme.control.explorer import (\n Action, VMCompliancePolicy, VMControlPolicy, VMCondition, PolicyProfile)\nfrom cfme.exceptions import VmNotFoundViaIP\nfrom cfme.infrastructure.virtual_machines import Vm\nfrom cfme.web_ui import flash, toolbar\nfrom fixtures.pytest_store import store\nfrom utils import testgen, version\nfrom utils.appliance import Appliance, provision_appliance\nfrom utils.log import logger\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nfrom utils.wait import wait_for\n\nPREFIX = \"test_compliance_\"\n\npytestmark = [\n # TODO: Problems with fleecing configuration - revisit later\n pytest.mark.ignore_stream(\"upstream\"),\n pytest.mark.meta(server_roles=[\"+automate\", \"+smartstate\", \"+smartproxy\"]),\n pytest.mark.usefixtures(\"provider_type\"),\n pytest.mark.uncollectif(lambda provider_type: provider_type in {\"scvmm\"}),\n]\n\n\ndef pytest_generate_tests(metafunc):\n argnames, argvalues, idlist = testgen.infra_providers(\n metafunc, \"vm_analysis\", require_fields=True)\n testgen.parametrize(metafunc, argnames, argvalues, ids=idlist, scope=\"module\")\n\n\ndef wait_for_ssa_enabled():\n wait_for(\n lambda: not toolbar.is_greyed('Configuration', 'Perform SmartState Analysis'),\n delay=10, handle_exception=True, num_sec=600, fail_func=lambda: toolbar.select(\"Reload\"))\n\n\[email protected]_fixture(scope=\"module\")\ndef compliance_vm(request, provider_key, provider_crud):\n try:\n ip_addr = re.findall(r'[0-9]+(?:\\.[0-9]+){3}', store.base_url)[0]\n appl_name = provider_crud.get_mgmt_system().get_vm_name_from_ip(ip_addr)\n appliance = Appliance(provider_key, appl_name)\n logger.info(\n \"The tested appliance ({}) is already on this provider ({}) so reusing it.\".format(\n appl_name, provider_key))\n appliance.configure_fleecing()\n vm = Vm(appl_name, provider_crud)\n except VmNotFoundViaIP:\n logger.info(\"Provisioning a new appliance on provider {}.\".format(provider_key))\n appliance = provision_appliance(\n vm_name_prefix=PREFIX + \"host_\",\n version=str(version.current_version()),\n provider_name=provider_key)\n request.addfinalizer(lambda: diaper(appliance.destroy))\n appliance.configure(setup_fleece=True)\n vm = Vm(appliance.vm_name, provider_crud)\n # Do the final touches\n with appliance.ipapp(browser_steal=True) as appl:\n appl.set_session_timeout(86400)\n provider_crud.refresh_provider_relationships()\n vm.wait_to_appear()\n vm.load_details()\n wait_for_ssa_enabled()\n yield vm\n\n\[email protected]_fixture(scope=\"module\")\ndef analysis_profile(compliance_vm):\n rand = generate_random_string()\n ap = VMAnalysisProfile(\n name=\"ap-{}\".format(rand), description=\"ap-desc-{}\".format(rand), files=[],\n categories=[\"check_software\"])\n with ap:\n yield ap\n\n\[email protected](scope=\"module\")\ndef fleecing_vm(\n request, compliance_vm, vm_analysis, provider_mgmt, provider_key, provider_crud,\n analysis_profile):\n logger.info(\"Provisioning an appliance for fleecing on {}\".format(provider_key))\n # TODO: When we get something smaller, use it!\n appliance = provision_appliance(\n vm_name_prefix=PREFIX + \"for_fleece_\",\n version=str(version.current_version()),\n provider_name=provider_key)\n request.addfinalizer(lambda: diaper(appliance.destroy))\n logger.info(\"Appliance {} provisioned\".format(appliance.vm_name))\n vm = Vm(appliance.vm_name, provider_crud)\n provider_crud.refresh_provider_relationships()\n vm.wait_to_appear()\n # Assign the analysis profile\n action = Action(\n \"Assign analysis profile {}\".format(analysis_profile.name),\n \"Assign Profile to Analysis Task\",\n dict(analysis_profile=analysis_profile.name))\n action.create()\n request.addfinalizer(action.delete)\n policy = VMControlPolicy(\"Analysis profile policy {}\".format(generate_random_string()))\n policy.create()\n request.addfinalizer(policy.delete)\n policy.assign_actions_to_event(\"VM Analysis Start\", action)\n analysis_pp = PolicyProfile(\n \"Analysis profile PP {}\".format(generate_random_string()),\n policies=[policy])\n analysis_pp.create()\n request.addfinalizer(analysis_pp.delete)\n vm.assign_policy_profiles(analysis_pp.description)\n request.addfinalizer(lambda: vm.unassign_policy_profiles(analysis_pp.description))\n return vm\n\n\ndef do_scan(vm):\n if vm.rediscover_if_analysis_data_present():\n # policy profile assignment is lost so reassign\n vm.assign_policy_profiles(*vm._assigned_pp)\n\n def _scan():\n return vm.get_detail(properties=(\"Lifecycle\", \"Last Analyzed\")).lower()\n original = _scan()\n vm.smartstate_scan(cancel=False, from_details=True)\n flash.assert_message_contain(\"Smart State Analysis initiated\")\n logger.info(\"Scan initiated\")\n wait_for(\n lambda: _scan() != original,\n num_sec=600, delay=5, fail_func=lambda: toolbar.select(\"Reload\"))\n logger.info(\"Scan finished\")\n\n\ndef test_check_package_presence(request, fleecing_vm, ssh_client, vm_analysis, analysis_profile):\n \"\"\"This test checks compliance by presence of a certain cfme-appliance package which is expected\n to be present on an appliance.\"\"\"\n # TODO: If we step out from provisioning a full appliance for fleecing, this might need revisit\n condition = VMCondition(\n \"Compliance testing condition {}\".format(generate_random_string(size=8)),\n expression=(\"fill_find(field=VM and Instance.Guest Applications : Name, \"\n \"skey=STARTS WITH, value=cfme-appliance, check=Check Count, ckey= = , cvalue=1)\")\n )\n request.addfinalizer(lambda: diaper(condition.delete))\n policy = VMCompliancePolicy(\"Compliance {}\".format(generate_random_string(size=8)))\n request.addfinalizer(lambda: diaper(policy.delete))\n policy.create()\n policy.assign_conditions(condition)\n profile = PolicyProfile(\n \"Compliance PP {}\".format(generate_random_string(size=8)),\n policies=[policy]\n )\n request.addfinalizer(lambda: diaper(profile.delete))\n profile.create()\n fleecing_vm.assign_policy_profiles(profile.description)\n request.addfinalizer(lambda: fleecing_vm.unassign_policy_profiles(profile.description))\n\n with update(analysis_profile):\n analysis_profile.categories = [\n \"check_services\", \"check_accounts\", \"check_software\", \"check_vmconfig\", \"check_system\"]\n\n do_scan(fleecing_vm)\n assert fleecing_vm.check_compliance_and_wait()\n\n\n##\n# File presence fleecing\[email protected](scope=\"function\")\ndef check_file_name():\n return \"/root/{}\".format(generate_random_string())\n\n\ndef test_check_files(request, fleecing_vm, ssh_client, check_file_name, analysis_profile):\n \"\"\"This test checks presence and contents of a certain file. First the non-compliance is\n enforced by not having the file, then the compliance is checked against existing file and\n it is expected to be compliant.\n \"\"\"\n contents = generate_random_string(size=12)\n condition = VMCondition(\n \"Compliance testing condition {}\".format(generate_random_string(size=8)),\n expression=(\"fill_find(VM and Instance.Files : Name, \"\n \"=, {}, Check Any, Contents, INCLUDES, {})\".format(check_file_name, contents))\n )\n request.addfinalizer(lambda: diaper(condition.delete))\n policy = VMCompliancePolicy(\"Compliance {}\".format(generate_random_string(size=8)))\n request.addfinalizer(lambda: diaper(policy.delete))\n policy.create()\n policy.assign_conditions(condition)\n profile = PolicyProfile(\n \"Compliance PP {}\".format(generate_random_string(size=8)),\n policies=[policy]\n )\n request.addfinalizer(lambda: diaper(profile.delete))\n profile.create()\n fleecing_vm.assign_policy_profiles(profile.description)\n request.addfinalizer(lambda: fleecing_vm.unassign_policy_profiles(profile.description))\n\n with update(analysis_profile):\n analysis_profile.files = [(\"/root/*\", True)]\n analysis_profile.categories = [\n \"check_services\", \"check_accounts\", \"check_software\", \"check_vmconfig\", \"check_system\"]\n\n # Non-compliant\n ssh_client.run_command(\"rm -f {}\".format(check_file_name))\n do_scan(fleecing_vm)\n assert not fleecing_vm.check_compliance_and_wait()\n\n # Compliant\n ssh_client.run_command(\"echo {} > {}\".format(contents, check_file_name))\n do_scan(fleecing_vm)\n assert fleecing_vm.check_compliance_and_wait()\n"
},
{
"alpha_fraction": 0.635604977607727,
"alphanum_fraction": 0.6447446942329407,
"avg_line_length": 37.128787994384766,
"blob_id": "1739f0281fa939d76ce004953601efb8ad09ee89",
"content_id": "980f67c26623708c60b11c56683e335953763fef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5033,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 132,
"path": "/conftest.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "\"\"\"\nTop-level conftest.py does a couple of things:\n\n1) Add cfme_pages repo to the sys.path automatically\n2) Load a number of plugins and fixtures automatically\n\"\"\"\nfrom pkgutil import iter_modules\n\nimport pytest\nimport requests\n\nimport cfme.fixtures\nimport fixtures\nimport markers\nimport metaplugins\nfrom cfme.fixtures.rdb import Rdb\nfrom fixtures.pytest_store import store\nfrom utils.log import logger\nfrom utils.path import data_path\nfrom utils.net import net_check\nfrom utils.ssh import SSHClient\nfrom utils.version import current_version\nfrom utils.wait import TimedOutError\n\n\nclass _AppliancePoliceException(Exception):\n def __init__(self, message, port, *args, **kwargs):\n super(_AppliancePoliceException, self).__init__(message, port, *args, **kwargs)\n self.message = message\n self.port = port\n\n\[email protected]\ndef pytest_addoption(parser):\n # Create the cfme option group for use in other plugins\n parser.getgroup('cfme', 'cfme: options related to cfme/miq appliances')\n yield\n\n\[email protected](scope=\"session\", autouse=True)\ndef set_session_timeout():\n store.current_appliance.set_session_timeout(86400)\n\n\[email protected](scope=\"session\", autouse=True)\ndef set_default_domain():\n if current_version() < \"5.3\":\n return # Domains are not in 5.2.x and lower\n ssh_client = SSHClient()\n # The command ignores the case when the Default domain is not present (: true)\n result = ssh_client.run_rails_command(\n \"\\\"d = MiqAeDomain.where :name => 'Default'; puts (d) ? d.first.enabled : true\\\"\")\n if result.output.lower().strip() != \"true\":\n # Re-enable the domain\n ssh_client.run_rails_command(\n \"\\\"d = MiqAeDomain.where :name => 'Default'; d = d.first; d.enabled = true; d.save!\\\"\")\n\n\[email protected](scope=\"session\", autouse=True)\ndef fix_merkyl_workaround():\n \"\"\"Workaround around merkyl not opening an iptables port for communication\"\"\"\n ssh_client = SSHClient()\n if ssh_client.run_command('test -f /etc/init.d/merkyl').rc == 0:\n logger.info('Rudely overwriting merkyl init.d on appliance;')\n local_file = data_path.join(\"bundles\").join(\"merkyl\").join(\"merkyl\")\n remote_file = \"/etc/init.d/merkyl\"\n ssh_client.put_file(local_file.strpath, remote_file)\n ssh_client.run_command(\"service merkyl restart\")\n\n\[email protected](autouse=True, scope=\"function\")\ndef appliance_police():\n if not store.slave_manager:\n return\n try:\n ports = {'ssh': 22, 'https': 443, 'postgres': 5432}\n port_results = {pn: net_check(pp, force=True) for pn, pp in ports.items()}\n for port, result in port_results.items():\n if not result:\n raise _AppliancePoliceException('Port {} was not contactable'.format(port), port)\n status_code = requests.get(store.current_appliance.url, verify=False,\n timeout=60).status_code\n if status_code != 200:\n raise _AppliancePoliceException('Status code was {}, should be 200'.format(\n status_code), port)\n return\n except _AppliancePoliceException as e:\n # special handling for known failure conditions\n if e.port == 443:\n # if the web ui worker merely crashed, give it 15 minutes\n # to come back up\n try:\n store.current_appliance.wait_for_web_ui(900)\n return\n except TimedOutError:\n # the UI didn't come back up after 15 minutes, and is\n # probably frozen; kill it and restart\n # fortunately we already check SSH is working...\n store.current_appliance.restart_evm_service(900, rude=True)\n\n # take another shot at letting the web UI come up\n try:\n store.current_appliance.wait_for_web_ui(900)\n return\n except TimedOutError:\n # so much for that\n pass\n e_message = e.message\n except Exception as e:\n e_message = e.args[0]\n\n # Regardles of the exception raised, we didn't return anywhere above\n # time to call a human\n msg = 'Help! My appliance {} crashed with: {}'.format(store.current_appliance.url, e_message)\n store.slave_manager.message(msg)\n Rdb(msg).set_trace(**{\n 'subject': 'RDB Breakpoint: Appliance failure',\n 'recipients': ['[email protected]', '[email protected]'],\n })\n store.slave_manager.message('Resuming testing following remote debugging')\n\n\ndef _pytest_plugins_generator(*extension_pkgs):\n # Finds all submodules in pytest extension packages and loads them\n for extension_pkg in extension_pkgs:\n path = extension_pkg.__path__\n prefix = '%s.' % extension_pkg.__name__\n for importer, modname, is_package in iter_modules(path, prefix):\n yield modname\n\npytest_plugins = tuple(_pytest_plugins_generator(fixtures, markers, cfme.fixtures, metaplugins))\ncollect_ignore = [\"tests/scenarios\"]\n"
},
{
"alpha_fraction": 0.6596009731292725,
"alphanum_fraction": 0.6613466143608093,
"avg_line_length": 34.486724853515625,
"blob_id": "87751ef5cee2ac1ee67dc6a99d531437a79a2440",
"content_id": "f5a79508ef3b5e10580ce14bbae77f5e304bb491",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4010,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 113,
"path": "/scripts/template_tester.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\"\"\"Template tester script, used to test and mark template as usable.\n\nget:\n Export bash vars to be eval'd for template testing with the jenkins runner\n\nlatest:\n Export bash vars to be eval'd for getting the latest usable template\n\nmark:\n Mark a template as tested and, if it passes, usable.\n\n\"\"\"\nimport os\nimport sys\n\nfrom utils import trackerbot\nfrom utils.log import logger\n\n\ndef get(api):\n try:\n template, provider_key, stream = trackerbot.templates_to_test(api, limit=1)[0]\n except (IndexError, TypeError):\n # No untested providertemplates, all is well\n return 0\n\n # Print envvar exports to be eval'd\n export(\n appliance_template=template,\n provider_key=provider_key,\n stream=stream\n )\n\n\ndef latest(api, stream, provider_key=None):\n try:\n if provider_key:\n prov = api.provider(provider_key).get()\n res = prov['latest_templates'][stream]\n else:\n res = api.group(stream).get()\n except IndexError:\n # No templates in stream\n return 1\n\n export(\n appliance_template=res['latest_template'],\n provider_keys=' '.join(res['latest_template_providers'])\n )\n\n\ndef export(**env_vars):\n for varname, value in env_vars.items():\n print 'export %s=\"%s\";' % (varname, value)\n print \"# to import these into your bash environment: eval $(%s)\" % ' '.join(sys.argv)\n\n\ndef mark(api, provider_key, template, usable, diagnose):\n # set some defaults\n diagnosis = None\n build_number = None\n if not usable:\n build_number = os.environ.get('BUILD_NUMBER', None)\n if diagnose:\n # diagnose will return None on a usable appliance, so don't bother\n from utils.appliance import IPAppliance\n ipa = IPAppliance()\n diagnosis = ipa.diagnose_evm_failure()\n if diagnosis:\n logger.error('Appliance failed: {}'.format(diagnosis.split(os.linesep)[0]))\n trackerbot.mark_provider_template(api, provider_key, template, tested=True, usable=usable,\n diagnosis=diagnosis, build_number=build_number)\n\n\ndef retest(api, provider_key, template):\n trackerbot.mark_provider_template(api, provider_key, template, tested=False)\n\nif __name__ == '__main__':\n parser = trackerbot.cmdline_parser()\n subs = parser.add_subparsers(title='commands', dest='command')\n\n parse_get = subs.add_parser('get', help='get a template to test')\n parse_get.set_defaults(func=get)\n\n parse_latest = subs.add_parser('latest', help='get the latest usable template for a provider')\n parse_latest.set_defaults(func=latest)\n parse_latest.add_argument('stream', help='template stream (e.g. upstream, downstream-52z')\n parse_latest.add_argument('provider_key', nargs='?', default=None)\n\n parse_mark = subs.add_parser('mark', help='mark a tested template')\n parse_mark.set_defaults(func=mark)\n parse_mark.add_argument('provider_key')\n parse_mark.add_argument('template')\n parse_mark.add_argument('-n', '--not-usable', dest='usable', action='store_false',\n default=True, help='mark template as not usable (templates are marked usable by default')\n parse_mark.add_argument('-d', '--diagnose', dest='diagnose', action='store_true',\n default=False, help='attempt to diagnose an unusable template and submit the result')\n\n parse_retest = subs.add_parser('retest', help='flag a tested template for retesting')\n parse_retest.set_defaults(func=retest)\n parse_retest.add_argument('provider_key')\n parse_retest.add_argument('template')\n\n args = parser.parse_args()\n api = trackerbot.api(args.trackerbot_url)\n func_map = {\n get: lambda: get(api),\n latest: lambda: latest(api, args.stream, args.provider_key),\n mark: lambda: mark(api, args.provider_key, args.template, args.usable, args.diagnose),\n retest: lambda: retest(api, args.provider_key, args.template),\n }\n sys.exit(func_map[args.func]())\n"
},
{
"alpha_fraction": 0.6627393364906311,
"alphanum_fraction": 0.6656848192214966,
"avg_line_length": 27.29166603088379,
"blob_id": "eb67553a36e801f558ae666202c55a91e238e2ac",
"content_id": "f329f6f0f5f3bcd32c431159a3be3c218653baaa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 24,
"path": "/cfme/tests/cloud/test_tenant.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.cloud.tenant import Tenant\nfrom utils import testgen\nfrom utils.randomness import generate_random_string\n\npytest_generate_tests = testgen.generate(testgen.provider_by_type, ['openstack'],\n scope='module')\n\n\[email protected]\ndef tenant(provider_key):\n return Tenant(name=generate_random_string(size=8),\n description=generate_random_string(size=8),\n provider_key=provider_key)\n\n\ndef test_tenant(provider_mgmt, tenant, provider_key):\n \"\"\" Tests tenant (currently disabled)\n\n Metadata:\n test_flag: tenant\n \"\"\"\n print tenant.name, tenant.description, provider_key\n"
},
{
"alpha_fraction": 0.5934139490127563,
"alphanum_fraction": 0.5971102118492126,
"avg_line_length": 28.760000228881836,
"blob_id": "1d6a102b2e5ea0d15b010b1999a09c1a00338f53",
"content_id": "f2bdfde0fdbfe93ef74bdfdb7b38a6d2707d747b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2976,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 100,
"path": "/sprout/sprout/__init__.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import\nfrom contextlib import contextmanager\ntry:\n import cPickle as pickle\nexcept ImportError:\n import pickle\nfrom .celery import app as celery_app\nassert celery_app\n\nfrom django.core.cache import cache\nfrom sprout import settings\nfrom redis import StrictRedis\nfrom utils.wait import wait_for\n\nredis_client = StrictRedis(**settings.GENERAL_REDIS)\n\n\nCRITICAL_SECTION_LOCK_TIME = 60\n\n\n@contextmanager\ndef critical_section(name):\n wait_for(\n cache.add,\n [\"lock-{}\".format(name), 'true', CRITICAL_SECTION_LOCK_TIME],\n delay=0.3, num_sec=2 * CRITICAL_SECTION_LOCK_TIME)\n try:\n yield\n finally:\n cache.delete(\"lock-{}\".format(name))\n\n\nclass RedisWrapper(object):\n LOCK_EXPIRE = 60\n\n def __init__(self, client):\n self.client = client\n\n def _set(self, key, value, *args, **kwargs):\n return self.client.set(str(key), pickle.dumps(value), *args, **kwargs)\n\n def _get(self, key, *args, **kwargs):\n default = kwargs.pop(\"default\", None)\n result = self.client.get(str(key), *args, **kwargs)\n if result is None:\n return default\n return pickle.loads(result)\n\n @contextmanager\n def atomic(self):\n wait_for(\n cache.add,\n [\"redis-atomic\", 'true', self.LOCK_EXPIRE],\n delay=0.3, num_sec=2 * self.LOCK_EXPIRE)\n try:\n yield self\n finally:\n cache.delete(\"redis-atomic\")\n\n def set(self, key, value, *args, **kwargs):\n with self.atomic():\n return self._set(key, value, *args, **kwargs)\n\n def get(self, key, *args, **kwargs):\n with self.atomic():\n return self._get(key, *args, **kwargs)\n\n def delete(self, key, *args, **kwargs):\n with self.atomic():\n return self.client.delete(key, *args, **kwargs)\n\n @contextmanager\n def appliances_ignored_when_renaming(self, *appliances):\n with self.atomic() as client:\n ignored_appliances = client._get(\"renaming_appliances\")\n if ignored_appliances is None:\n ignored_appliances = set([])\n for appliance in appliances:\n ignored_appliances.add(appliance)\n client._set(\"renaming_appliances\", ignored_appliances)\n yield\n with self.atomic() as client:\n ignored_appliances = client._get(\"renaming_appliances\")\n if ignored_appliances is None:\n ignored_appliances = set([])\n for appliance in appliances:\n try:\n ignored_appliances.remove(appliance)\n except KeyError:\n # Something worng happened, ignore\n pass\n client._set(\"renaming_appliances\", ignored_appliances)\n\n @property\n def renaming_appliances(self):\n return self.get(\"renaming_appliances\") or set([])\n\n\nredis = RedisWrapper(redis_client)\n"
},
{
"alpha_fraction": 0.5432825684547424,
"alphanum_fraction": 0.5441715717315674,
"avg_line_length": 41.24882507324219,
"blob_id": "ddb552ddd0fdc485694d9e2b9295e45ca2bd816a",
"content_id": "a46bdb08bdbc265c180d74aca5c04cd91e04f656",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8999,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 213,
"path": "/cfme/tests/automate/test_service_dialog.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nimport utils.randomness as rand\nfrom utils.update import update\nfrom utils import error, version\nfrom cfme.automate.service_dialogs import ServiceDialog\n\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\ndef test_create_service_dialog():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Box\",\n 'default_text_box': \"Default text\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_update_service_dialog():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Box\",\n 'default_text_box': \"Default text\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n with update(dialog):\n dialog.description = \"my edited description\"\n\n\ndef test_delete_service_dialog():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Box\",\n 'default_text_box': \"Default text\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n dialog.delete()\n\n\ndef test_service_dialog_duplicate_name():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Box\",\n 'default_text_box': \"Default text\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n error_msg = version.pick({\n version.LOWEST: \"Dialog Label has already been taken\",\n '5.3': \"Label has already been taken\"\n })\n with error.expected(error_msg):\n dialog.create(element_data)\n\n\ndef test_checkbox_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Check Box\",\n 'default_text_box': True,\n 'field_required': True\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_datecontrol_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Date Control\",\n 'field_past_dates': True\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_dropdownlist_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Drop Down List\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_radiobutton_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Radio Button\"\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_tagcontrol_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Tag Control\",\n 'field_category': \"Service Level\",\n 'field_required': True\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_textareabox_dialog_element():\n element_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Area Box\",\n 'field_required': True\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_data)\n\n\ndef test_reorder_elements():\n element_1_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Text Box\",\n 'default_text_box': \"Default text\"\n }\n element_2_data = {\n 'ele_label': \"ele_\" + rand.generate_random_string(),\n 'ele_name': rand.generate_random_string(),\n 'ele_desc': rand.generate_random_string(),\n 'choose_type': \"Check Box\",\n 'default_text_box': True,\n 'field_required': True\n }\n dialog = ServiceDialog(label=rand.generate_random_string(),\n description=\"my dialog\", submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n dialog.create(element_1_data, element_2_data)\n dialog.reorder_elements(dialog.box_label, element_1_data, element_2_data)\n"
},
{
"alpha_fraction": 0.593307375907898,
"alphanum_fraction": 0.5950779318809509,
"avg_line_length": 36.157894134521484,
"blob_id": "d5d1acfe150d42c33696b28abc2652079afa1ff8",
"content_id": "3025f0a1138099574ae33bf23a894a9139fff94f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5648,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 152,
"path": "/cfme/services/catalogs/myservice.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "from functools import partial\nfrom cfme import web_ui as ui\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.web_ui import accordion, menu, flash, Quadicon, Region, Form, fill, form_buttons\nfrom cfme.web_ui import toolbar as tb\nfrom utils.update import Updateable\nfrom utils.wait import wait_for\n\nlifecycle_btn = partial(tb.select, \"Lifecycle\")\nreload_func = partial(tb.select, \"Reload current display\")\nmy_service_tree = partial(accordion.tree, \"Services\")\ndetails_page = Region(infoblock_type='detail')\ncfg_btn = partial(tb.select, \"Configuration\")\npolicy_btn = partial(tb.select, \"Policy\")\n\n\nretirement_form = Form(\n fields=[\n ('retirement_date', ui.Calendar('miq_date_1')),\n ('retirement_warning', ui.Select(\"select#retirement_warn\"))\n ])\n\nedit_service_form = Form(\n fields=[\n (\"name\", ui.Input(\"name\")),\n (\"description\", ui.Input(\"description\"))\n ])\n\nset_ownership_form = Form(\n fields=[\n (\"select_owner\", ui.Select(\"select#user_name\")),\n (\"select_group\", ui.Select(\"select#group_name\"))\n ])\n\nedit_tags_form = Form(\n fields=[\n (\"select_value\", ui.Select(\"select#tag_add\"))\n ])\n\nmenu.nav.add_branch(\n 'my_services',\n {\n 'service':\n [\n lambda ctx: my_service_tree('All Services', ctx['service_name']),\n {\n 'retire_service_on_date': menu.nav.partial(lifecycle_btn, \"Set Retirement Date\"),\n 'edit_service': menu.nav.partial(cfg_btn, \"Edit this Service\"),\n 'service_set_ownership': menu.nav.partial(cfg_btn, \"Set Ownership\"),\n 'service_edit_tags': menu.nav.partial(policy_btn, \"Edit Tags\")\n }\n ]\n }\n)\n\n\nclass MyService(Updateable):\n \"\"\"Create,Edit and Delete Button Groups\n\n Args:\n service_name: The name of service to retire.\n vm_name: Name of vm in the service.\n retirement_date: Date to retire service.\n \"\"\"\n\n def __init__(self, service_name, vm_name):\n self.service_name = service_name\n self.vm_name = vm_name\n\n def get_detail(self, properties=None):\n \"\"\" Gets details from the details infoblock\n\n Args:\n *ident: An InfoBlock title, followed by the Key name\n e.g. \"Relationships\", \"Images\"\n Returns: A string representing the contents of the InfoBlock's value.\n \"\"\"\n return details_page.infoblock.text(*properties)\n\n def retire(self):\n sel.force_navigate('service',\n context={'service_name': self.service_name})\n lifecycle_btn(\"Retire this Service\", invokes_alert=True)\n sel.handle_alert()\n flash.assert_success_message('Retirement initiated for 1 Service from the CFME Database')\n wait_time_min = 1\n quadicon = Quadicon(self.vm_name, \"vm\")\n sel.click(quadicon)\n detail_t = (\"Power Management\", \"Power State\")\n wait_for(\n lambda: self.get_detail(properties=detail_t) == \"off\",\n fail_func=reload_func,\n num_sec=wait_time_min * 120,\n message=\"wait for service to retire\"\n )\n assert(self.get_detail(properties=detail_t) == \"off\")\n\n def retire_on_date(self, retirement_date):\n sel.force_navigate('retire_service_on_date',\n context={'service_name': self.service_name})\n fill(retirement_form, {'retirement_date': retirement_date},\n action=form_buttons.save)\n wait_time_min = 1\n quadicon = Quadicon(self.vm_name, \"vm\")\n sel.click(quadicon)\n detail_t = (\"Power Management\", \"Power State\")\n wait_for(\n lambda: self.get_detail(properties=detail_t) == \"off\",\n fail_func=reload_func,\n num_sec=wait_time_min * 120,\n message=\"wait for service to retire\"\n )\n assert(self.get_detail(properties=detail_t) == \"off\")\n\n def update(self, name, description):\n sel.force_navigate('edit_service',\n context={'service_name': self.service_name})\n edited_name = self.service_name + \"_\" + name\n fill(edit_service_form, {'name': edited_name,\n 'description': description},\n action=form_buttons.save)\n flash.assert_success_message('Service \"{}\" was saved'.format(edited_name))\n\n def delete(self, name):\n sel.force_navigate('service',\n context={'service_name': name})\n cfg_btn(\"Remove Service from the VMDB\", invokes_alert=True)\n sel.handle_alert()\n flash.assert_success_message('Service \"{}\": Delete successful'.format(name))\n\n def set_ownership(self, owner, group):\n sel.force_navigate('service_set_ownership',\n context={'service_name': self.service_name})\n fill(set_ownership_form, {'select_owner': owner,\n 'select_group': group},\n action=form_buttons.save)\n flash.assert_success_message('Ownership saved for selected Service')\n\n def edit_tags(self, value):\n sel.force_navigate('service_edit_tags',\n context={'service_name': self.service_name})\n fill(edit_tags_form, {'select_value': value},\n action=form_buttons.save)\n flash.assert_success_message('Tag edits were successfully saved')\n\n def check_vm_add(self, add_vm_name):\n sel.force_navigate('service',\n context={'service_name': self.service_name})\n\n quadicon = Quadicon(add_vm_name, \"vm\")\n sel.click(quadicon)\n flash.assert_no_errors()\n"
},
{
"alpha_fraction": 0.6698084473609924,
"alphanum_fraction": 0.6813620924949646,
"avg_line_length": 32.22222137451172,
"blob_id": "e6c44c29989d12fd6ad901a806528636adf5659d",
"content_id": "e6c9d40efc4d0025df95c218c0efe4269b578cb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3289,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 99,
"path": "/cfme/tests/infrastructure/test_customization_template.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.infrastructure import pxe\nfrom utils import error\nfrom utils.blockers import BZ\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\ndef test_customization_template_crud():\n \"\"\"Basic CRUD test for customization templates.\"\"\"\n template_crud = pxe.CustomizationTemplate(\n name=generate_random_string(size=8),\n description=generate_random_string(size=16),\n image_type='RHEL-6',\n script_type='Kickstart',\n script_data='Testing the script')\n\n template_crud.create()\n with update(template_crud):\n template_crud.name = template_crud.name + \"_update\"\n template_crud.delete(cancel=False)\n\n\ndef test_name_required_error_validation():\n \"\"\"Test to validate name in customization templates.\"\"\"\n template_name = pxe.CustomizationTemplate(\n name=None,\n description=generate_random_string(size=16),\n image_type='RHEL-6',\n script_type='Kickstart',\n script_data='Testing the script')\n\n with error.expected('Name is required'):\n template_name.create()\n\n\ndef test_type_required_error_validation():\n \"\"\"Test to validate type in customization templates.\"\"\"\n template_name = pxe.CustomizationTemplate(\n name=generate_random_string(size=8),\n description=generate_random_string(size=16),\n image_type='RHEL-6',\n script_type='<Choose>',\n script_data='Testing the script')\n\n with error.expected('Type is required'):\n template_name.create()\n\n\ndef test_pxe_image_type_required_error_validation():\n \"\"\"Test to validate pxe image type in customization templates.\"\"\"\n template_name = pxe.CustomizationTemplate(\n name=generate_random_string(size=8),\n description=generate_random_string(size=16),\n image_type='<Choose>',\n script_type='Kickstart',\n script_data='Testing the script')\n\n with error.expected(\"Pxe_image_type can't be blank\"):\n template_name.create()\n\n\[email protected](\n blockers=[\n BZ(1092951, ignore_bugs=[1083198])\n ]\n)\ndef test_duplicate_name_error_validation():\n \"\"\"Test to validate duplication in customization templates.\"\"\"\n template_name = pxe.CustomizationTemplate(\n name=generate_random_string(size=8),\n description=generate_random_string(size=16),\n image_type='RHEL-6',\n script_type='Kickstart',\n script_data='Testing the script')\n\n template_name.create()\n with error.expected('Name has already been taken'):\n template_name.create()\n template_name.delete(cancel=False)\n\n\[email protected](message='http://cfme-tests.readthedocs.org/guides/gotchas.html#'\n 'selenium-is-not-clicking-on-the-element-it-says-it-is')\ndef test_name_max_character_validation():\n \"\"\"Test to validate name with maximum characters in customization templates.\"\"\"\n template_name = pxe.CustomizationTemplate(\n name=generate_random_string(size=256),\n description=generate_random_string(size=16),\n image_type='RHEL-6',\n script_type='Kickstart',\n script_data='Testing the script')\n\n with error.expected('Name is required'):\n template_name.create()\n template_name.delete(cancel=False)\n"
},
{
"alpha_fraction": 0.6888349652290344,
"alphanum_fraction": 0.7038834691047668,
"avg_line_length": 30.676923751831055,
"blob_id": "5983b86ca2ea25ea1b21dff1b84e5e7d4f21871c",
"content_id": "c3eb071fdbc1fa64869be0dd9c3f4e6274667695",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2060,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 65,
"path": "/cfme/tests/automate/test_namespace.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "\n# -*- coding: utf-8 -*-\n# pylint: disable=E1101\n# pylint: disable=W0621\nimport pytest\n\nfrom cfme.automate.explorer import Namespace\nfrom utils.providers import setup_a_provider\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nfrom utils import version\nimport utils.error as error\nimport cfme.tests.configure.test_access_control as tac\nimport cfme.tests.automate as ta\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\[email protected](\n scope=\"function\",\n params=[ta.a_namespace, ta.a_namespace_with_path])\ndef namespace(request):\n # don't test with existing paths on upstream (there aren't any)\n if request.param is ta.a_namespace_with_path and version.current_version() == version.LATEST:\n pytest.skip(\"don't test with existing paths on upstream (there aren't any)\")\n return request.param()\n\n\[email protected]\ndef setup_single_provider():\n setup_a_provider()\n\n\ndef test_namespace_crud(namespace):\n namespace.create()\n old_name = namespace.name\n with update(namespace):\n namespace.name = generate_random_string(8)\n with update(namespace):\n namespace.name = old_name\n namespace.delete()\n assert not namespace.exists()\n\n\ndef test_add_delete_namespace_nested(namespace):\n namespace.create()\n nested_ns = Namespace(name=\"Nested\", parent=namespace)\n nested_ns.create()\n namespace.delete()\n assert not nested_ns.exists()\n\n\[email protected](blockers=[1136518])\ndef test_duplicate_namespace_disallowed(namespace):\n namespace.create()\n with error.expected(\"Name has already been taken\"):\n namespace.create()\n\n\n# provider needed as workaround for bz1035399\[email protected](blockers=[1140331])\ndef test_permissions_namespace_crud(setup_single_provider):\n \"\"\" Tests that a namespace can be manipulated only with the right permissions\"\"\"\n tac.single_task_permission_test([['Automate', 'Explorer']],\n {'Namespace CRUD':\n lambda: test_namespace_crud(ta.a_namespace())})\n"
},
{
"alpha_fraction": 0.7101123332977295,
"alphanum_fraction": 0.7161048650741577,
"avg_line_length": 27.404254913330078,
"blob_id": "ad0fb2d47d652223c306bda3054f4d928264e9ad",
"content_id": "20248dbe8cabbe6270c8b3f9bd7b8334077343cd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1335,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 47,
"path": "/cfme/tests/automate/test_instance.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nimport utils.error as error\nimport cfme.tests.automate as ta\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\[email protected](scope='module')\ndef make_class(request):\n return ta.make_class(request=request)\n\n\[email protected](scope=\"function\")\ndef an_instance(request, make_class):\n return ta.an_instance(make_class, request=request)\n\n\ndef test_instance_crud(an_instance):\n an_instance.create()\n origname = an_instance.name\n with update(an_instance):\n an_instance.name = generate_random_string(8)\n an_instance.description = \"updated\"\n with update(an_instance):\n an_instance.name = origname\n an_instance.delete()\n assert not an_instance.exists()\n\n\ndef test_duplicate_disallowed(an_instance):\n an_instance.create()\n with error.expected(\"Name has already been taken\"):\n an_instance.create()\n\n\[email protected](blockers=[1148541])\ndef test_display_name_unset_from_ui(request, an_instance):\n an_instance.create()\n request.addfinalizer(an_instance.delete)\n with update(an_instance):\n an_instance.display_name = generate_random_string()\n assert an_instance.exists\n with update(an_instance):\n an_instance.display_name = \"\"\n assert an_instance.exists\n"
},
{
"alpha_fraction": 0.6720400452613831,
"alphanum_fraction": 0.6764869093894958,
"avg_line_length": 31.709091186523438,
"blob_id": "224ce81d46ed8d24fd4ef5591325f4e20e6622cf",
"content_id": "3f62bc6c517d767f1cc5f8881fba429e22968b40",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1799,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 55,
"path": "/cfme/tests/configure/test_zones.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nimport cfme.web_ui.flash as flash\nimport cfme.configure.configuration as conf\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\n\[email protected]\ndef test_zone_crud(soft_assert):\n zone = conf.Zone(\n name=generate_random_string(size=5),\n description=generate_random_string(size=8))\n # CREATE\n zone.create()\n soft_assert(zone.exists, \"The zone {} does not exist!\".format(\n zone.description\n ))\n # UPDATE\n old_desc = zone.description\n with update(zone):\n zone.description = generate_random_string(size=8)\n soft_assert(zone.exists and (old_desc != zone.description),\n \"The zone {} was not updated!\".format(\n zone.description\n ))\n # DELETE\n zone.delete()\n soft_assert(not zone.exists, \"The zone {} exists!\".format(\n zone.description\n ))\n\n\ndef test_zone_add_cancel_validation():\n zone = conf.Zone(\n name=generate_random_string(size=5),\n description=generate_random_string(size=8))\n zone.create(cancel=True)\n flash.assert_message_match('Add of new Miq Zone was cancelled by the user')\n\n\ndef test_zone_change_appliance_zone(request):\n \"\"\" Tests that an appliance can be changed to another Zone \"\"\"\n zone = conf.Zone(\n name=generate_random_string(size=5),\n description=generate_random_string(size=8))\n request.addfinalizer(zone.delete)\n request.addfinalizer(conf.BasicInformation(appliance_zone=\"default\").update)\n zone.create()\n basic_info = conf.BasicInformation(appliance_zone=zone.name)\n basic_info.update()\n assert zone.description == conf.server_zone_description()\n basic_info = conf.BasicInformation(appliance_zone=\"default\")\n basic_info.update()\n"
},
{
"alpha_fraction": 0.5890339612960815,
"alphanum_fraction": 0.6010443568229675,
"avg_line_length": 34.030487060546875,
"blob_id": "0febca4798f8efb74f2b9c5674e880f10ebd1a1d",
"content_id": "bb5390425f78dc7cadc5985397a2c672a61543d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5745,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 164,
"path": "/scripts/harden_security.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python2\n\n\"\"\"SSH into a running appliance and configure security.\n\nConfigures security on appliance(s) according to this document:\nhttps://access.redhat.com/articles/1124753\n\nWorks for single appliance and distributed appliance configurations.\nIn distributed configurations, provide the hostname of the replication\nparent first, and then provide the hostnames of any child appliances using\nthe '-c' flag.\n\nExample usage:\n Configure security for a single appliance:\n\n configure_security.py 10.0.0.1\n\n Configure security for distributed appliance set:\n\n # replication parent: 10.0.0.1\n # replication child: 10.0.0.2\n # replication child: 10.0.0.3\n configure_security.py 10.0.0.1 -c 10.0.0.2 -c 10.0.0.3\n\n\"\"\"\n\nimport argparse\nimport re\nimport socket\nimport sys\n\nfrom utils.conf import credentials\nfrom utils.randomness import generate_random_string\nfrom utils.ssh import SSHClient\nfrom utils.wait import wait_for\n\n\ndef main():\n parser = argparse.ArgumentParser(epilog=__doc__,\n formatter_class=argparse.RawDescriptionHelpFormatter)\n parser.add_argument('appliance',\n help='hostname or ip address of parent appliance')\n parser.add_argument('-c', action='append', dest='children',\n help='hostname or ip address of child appliance')\n args = parser.parse_args()\n print \"Appliance: \" + args.appliance\n if args.children:\n for child in args.children:\n print \"Child: \" + child\n\n local_key_name = \"v2_key_\" + generate_random_string()\n\n ssh_creds = {\n 'username': credentials['ssh']['username'],\n 'password': credentials['ssh']['password'],\n }\n\n def is_ssh_running(address):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = s.connect_ex((address, 22))\n return result == 0\n\n def generate_key(address):\n with SSHClient(hostname=address, **ssh_creds) as client:\n print 'Connecting to Appliance...'\n status, out = client.run_command(\n 'ruby /var/www/miq/vmdb/tools/fix_auth.rb --key --verbose')\n if status != 0:\n print 'Creating new encryption key failed.'\n print out\n sys.exit(1)\n else:\n print 'New encryption key created.'\n if args.children:\n # Only copy locally if needed for child appliances\n client.get_file('/var/www/miq/vmdb/certs/v2_key',\n local_key_name)\n\n def update_db_yaml(address):\n with SSHClient(hostname=address, **ssh_creds) as client:\n client.run_command('cd /var/www/miq/vmdb')\n status, out = client.run_rails_command(\n '\\'puts MiqPassword.encrypt(\"smartvm\");\\'')\n if status != 0:\n print 'Retrieving encrypted db password failed on %s' % address\n sys.exit(1)\n else:\n encrypted_pass = out\n status, out = client.run_command(\n ('cd /var/www/miq/vmdb; '\n 'sed -i.`date +%m-%d-%Y` \"s/password:'\n ' .*/password: {}/g\" config/database.yml'.format(re.escape(encrypted_pass))))\n if status != 0:\n print 'Updating database.yml failed on %s' % address\n print out\n sys.exit(1)\n else:\n print 'Updating database.yml succeeded on %s' % address\n\n def update_password(address):\n with SSHClient(hostname=address, **ssh_creds) as client:\n status, out = client.run_command(\n 'ruby /var/www/miq/vmdb/tools/fix_auth.rb --hostname localhost --password smartvm')\n if status != 0:\n print 'Updating DB password failed on %s' % address\n print out\n sys.exit(1)\n else:\n print 'DB password updated on %s' % address\n\n def put_key(address):\n print 'copying key to %s' % address\n with SSHClient(hostname=address, **ssh_creds) as client:\n client.put_file(local_key_name, '/var/www/miq/vmdb/certs/v2_key')\n\n def restart_appliance(address):\n print 'Restarting evmserverd on %s' % address\n with SSHClient(hostname=address, **ssh_creds) as client:\n status, out = client.run_command('service evmserverd restart')\n if status != 0:\n print \"Restarting evmserverd failed on %s\" % address\n sys.exit(1)\n else:\n print \"Restarting succeeded on %s\" % address\n\n # make sure ssh is ready on each appliance\n wait_for(func=is_ssh_running, func_args=[args.appliance], delay=10, num_sec=600)\n\n # generate key on master appliance\n generate_key(args.appliance)\n update_db_yaml(args.appliance)\n\n # copy to other appliances\n if args.children:\n for child in args.children:\n wait_for(func=is_ssh_running, func_args=[child], delay=10, num_sec=600)\n put_key(child)\n update_db_yaml(child)\n\n # restart master appliance (and children, if provided)\n restart_appliance(args.appliance)\n if args.children:\n for child in args.children:\n restart_appliance(child)\n print \"Appliance(s) restarted with new key in place.\"\n\n # update encrypted passwords in each database-owning appliance.\n\n update_password(args.appliance)\n if args.children:\n for child in args.children:\n update_password(child)\n\n # Restart again!\n restart_appliance(args.appliance)\n if args.children:\n for child in args.children:\n restart_appliance(child)\n\n print \"Done!\"\n\n\nif __name__ == '__main__':\n sys.exit(main())\n"
},
{
"alpha_fraction": 0.6639854907989502,
"alphanum_fraction": 0.6650233268737793,
"avg_line_length": 35.358489990234375,
"blob_id": "c1f84706d32f0a436c14a12e4b263d55d0291de9",
"content_id": "1d529b4fcfc3d90fd80ad50432cf1ae703ca9d93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3854,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 106,
"path": "/cfme/fixtures/smtp.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"This module provides a fixture useful for checking the e-mails arrived.\n\nMain use is of fixture :py:meth:`smtp_test`, which is function scoped. There is also\na :py:meth:`smtp_test_module` fixture for which the smtp_test is just a function-scoped wrapper\nto speed things up. The base of all this is the session-scoped _smtp_test_session that keeps care\nabout the collector.\n\"\"\"\nimport pytest\nimport signal\nimport subprocess\nimport time\n\nfrom cfme.configure import configuration\nfrom fixtures.artifactor_plugin import art_client, get_test_idents\nfrom utils.conf import env\nfrom utils.log import create_logger\nfrom utils.net import random_port, my_ip_address, net_check_remote\nfrom utils.path import scripts_path\nfrom utils.smtp_collector_client import SMTPCollectorClient\n\n\nlogger = create_logger('emails')\n\n\[email protected](scope=\"function\")\ndef smtp_test(request):\n \"\"\"Fixture, which prepares the appliance for e-mail capturing tests\n\n Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance.\n \"\"\"\n logger.info(\"Preparing start for e-mail collector\")\n ports = env.get(\"mail_collector\", {}).get(\"ports\", {})\n mail_server_port = ports.get(\"smtp\", None) or random_port()\n mail_query_port = ports.get(\"json\", None) or random_port()\n my_ip = my_ip_address()\n logger.info(\"Mind that it needs ports {} and {} open\".format(mail_query_port, mail_server_port))\n smtp_conf = configuration.SMTPSettings(\n host=my_ip,\n port=mail_server_port,\n auth=\"none\",\n )\n smtp_conf.update()\n server_filename = scripts_path.join('smtp_collector.py').strpath\n server_command = server_filename + \" --smtp-port {} --query-port {}\".format(\n mail_server_port,\n mail_query_port\n )\n logger.info(\"Starting mail collector {}\".format(server_command))\n collector = None\n\n def _finalize():\n if collector is None:\n return\n logger.info(\"Sending KeyboardInterrupt to collector\")\n collector.send_signal(signal.SIGINT)\n time.sleep(2)\n if collector.poll() is None:\n logger.info(\"Sending SIGTERM to collector\")\n collector.send_signal(signal.SIGTERM)\n time.sleep(5)\n if collector.poll() is None:\n logger.info(\"Sending SIGKILL to collector\")\n collector.send_signal(signal.SIGKILL)\n collector.wait()\n logger.info(\"Collector finished\")\n collector = subprocess.Popen(server_command, shell=True)\n request.addfinalizer(_finalize)\n logger.info(\"Collector pid {}\".format(collector.pid))\n logger.info(\"Waiting for collector to become alive.\")\n time.sleep(3)\n assert collector.poll() is None, \"Collector has died. Something must be blocking selected ports\"\n logger.info(\"Collector alive\")\n query_port_open = net_check_remote(mail_query_port, my_ip, force=True)\n server_port_open = net_check_remote(mail_server_port, my_ip, force=True)\n assert query_port_open and server_port_open,\\\n 'Ports {} and {} on the machine executing the tests are closed.\\n'\\\n 'The ports are randomly chosen -> turn firewall off.'\\\n .format(mail_query_port, mail_server_port)\n client = SMTPCollectorClient(\n my_ip,\n mail_query_port\n )\n client.set_test_name(request.node.name)\n client.clear_database()\n return client\n\n\[email protected]\ndef pytest_runtest_call(item):\n try:\n yield\n finally:\n if \"smtp_test\" not in item.funcargs:\n return\n\n name, location = get_test_idents(item)\n\n art_client.fire_hook(\n \"filedump\",\n test_name=name,\n test_location=location,\n filename=\"emails.html\",\n contents=item.funcargs[\"smtp_test\"].get_html_report(),\n fd_ident=\"emails\"\n )\n"
},
{
"alpha_fraction": 0.6674124002456665,
"alphanum_fraction": 0.673378050327301,
"avg_line_length": 23.83333396911621,
"blob_id": "d99c1851cb6d54d42be7da145e3a45f88adf6d19",
"content_id": "ea5daba286b7869807d570f4a64022bc5eaf0563",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1341,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 54,
"path": "/cfme/tests/automate/test_method.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom cfme.automate.explorer import Namespace, Class, Method\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nimport utils.error as error\n\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\ndef _make_namespace():\n name = generate_random_string(8)\n description = generate_random_string(32)\n ns = Namespace(name=name, description=description)\n ns.create()\n return ns\n\n\ndef _make_class():\n name = generate_random_string(8)\n description = generate_random_string(32)\n cls = Class(name=name, description=description, namespace=_make_namespace())\n cls.create()\n return cls\n\n\[email protected](scope='module')\ndef a_class():\n return _make_class()\n\n\[email protected]\ndef a_method(a_class):\n return Method(name=generate_random_string(8),\n data=\"foo.bar()\",\n cls=a_class)\n\n\ndef test_method_crud(a_method):\n a_method.create()\n origname = a_method.name\n with update(a_method):\n a_method.name = generate_random_string(8)\n a_method.data = \"bar\"\n with update(a_method):\n a_method.name = origname\n a_method.delete()\n assert not a_method.exists()\n\n\ndef test_duplicate_method_disallowed(a_method):\n a_method.create()\n with error.expected(\"Name has already been taken\"):\n a_method.create()\n"
},
{
"alpha_fraction": 0.6312780976295471,
"alphanum_fraction": 0.6392405033111572,
"avg_line_length": 28.329341888427734,
"blob_id": "6249ff46ce47b645d05a77175f86fd8d58d38e29",
"content_id": "6ded105bf58bcf0a00a7a79920dbcabfcc630720",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4898,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 167,
"path": "/cfme/tests/intelligence/reports/test_crud.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\nimport yaml\n\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.intelligence.reports.dashboards import Dashboard\nfrom cfme.intelligence.reports.reports import CustomReport\nfrom cfme.intelligence.reports.schedules import Schedule\nfrom cfme.intelligence.reports.widgets import ChartWidget, MenuWidget, ReportWidget, RSSFeedWidget\nfrom utils.path import data_path\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\n\nreport_crud_dir = data_path.join(\"reports_crud\")\nschedules_crud_dir = data_path.join(\"schedules_crud\")\n\n\ndef crud_files_reports():\n result = []\n if not report_crud_dir.exists():\n report_crud_dir.mkdir()\n for file in report_crud_dir.listdir():\n if file.isfile() and file.basename.endswith(\".yaml\"):\n result.append(file.basename)\n return result\n\n\ndef crud_files_schedules():\n result = []\n if not schedules_crud_dir.exists():\n schedules_crud_dir.mkdir()\n for file in schedules_crud_dir.listdir():\n if file.isfile() and file.basename.endswith(\".yaml\"):\n result.append(file.basename)\n return result\n\n\[email protected](params=crud_files_reports())\ndef custom_report(request):\n with report_crud_dir.join(request.param).open(mode=\"r\") as rep_yaml:\n return CustomReport(**yaml.load(rep_yaml))\n\n\[email protected](params=crud_files_schedules())\ndef schedule(request):\n with schedules_crud_dir.join(request.param).open(mode=\"r\") as rep_yaml:\n data = yaml.load(rep_yaml)\n name = data.pop(\"name\")\n description = data.pop(\"description\")\n filter = data.pop(\"filter\")\n return Schedule(name, description, filter, **data)\n\n\ndef test_custom_report_crud(custom_report):\n custom_report.create()\n with update(custom_report):\n custom_report.title += generate_random_string()\n custom_report.queue(wait_for_finish=True)\n for report in custom_report.get_saved_reports():\n report.data # touch the results\n custom_report.delete()\n\n\[email protected](blockers=[1202412])\ndef test_schedule_crud(schedule):\n schedule.create()\n with update(schedule):\n schedule.description = \"badger badger badger\"\n schedule.queue(wait_for_finish=True)\n schedule.delete()\n\n\[email protected](blockers=[1209945])\ndef test_menuwidget_crud():\n w = MenuWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n shortcuts={\n \"Services / Catalogs\": generate_random_string(),\n \"Clouds / Providers\": generate_random_string(),\n },\n visibility=[\"<By Role>\", sel.ByText(\"EvmRole-administrator\")]\n )\n w.create()\n with update(w):\n w.active = False\n w.delete()\n\n\[email protected](blockers=[1209945])\ndef test_reportwidget_crud():\n w = ReportWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n filter=[\"Events\", \"Operations\", \"Operations VMs Powered On/Off for Last Week\"],\n columns=[\"VM Name\", \"Message\"],\n rows=\"10\",\n timer={\"run\": \"Hourly\", \"hours\": \"Hour\"},\n visibility=[\"<By Role>\", sel.ByText(\"EvmRole-administrator\")]\n )\n w.create()\n with update(w):\n w.active = False\n w.delete()\n\n\[email protected](blockers=[1209945])\ndef test_chartwidget_crud():\n w = ChartWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n filter=\"Configuration Management/Virtual Machines/Vendor and Guest OS\",\n timer={\"run\": \"Hourly\", \"hours\": \"Hour\"},\n visibility=[\"<By Role>\", sel.ByText(\"EvmRole-administrator\")]\n )\n w.create()\n with update(w):\n w.active = False\n w.delete()\n\n\[email protected](blockers=[1209945])\ndef test_rssfeedwidget_crud():\n w = RSSFeedWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n type=\"Internal\",\n feed=\"Administrative Events\",\n rows=\"8\",\n visibility=[\"<By Role>\", sel.ByText(\"EvmRole-administrator\")]\n )\n w.create()\n # Basic update\n with update(w):\n w.active = False\n # Different feed type\n with update(w):\n w.type = \"External\"\n w.external = \"SlashDot\"\n # and custom address\n with update(w):\n w.type = \"External\"\n w.external = \"http://rss.example.com/\"\n w.delete()\n\n\ndef test_dashboard_crud():\n d = Dashboard(\n generate_random_string(),\n \"EvmGroup-administrator\",\n generate_random_string(),\n locked=False,\n widgets=[\"Top CPU Consumers (weekly)\", \"Vendor and Guest OS Chart\"]\n )\n d.create()\n with update(d):\n d.locked = True\n with update(d):\n d.locked = False\n with update(d):\n d.widgets = \"Top Storage Consumers\"\n d.delete()\n"
},
{
"alpha_fraction": 0.6427379250526428,
"alphanum_fraction": 0.6577629446983337,
"avg_line_length": 32.27777862548828,
"blob_id": "61753eeebac5852d87e9bb6592f21c2ec60ee871",
"content_id": "f63b0f3f16dc2470893f4f3f8513068cf9d70a5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 599,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 18,
"path": "/cfme/tests/configure/test_about_links.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.configure import about\nimport pytest\nimport requests\n\n\ndef test_about_links():\n sel.force_navigate('about')\n for link_key, link_loc in about.product_assistance.locators.items():\n href = sel.get_attribute(link_loc, 'href')\n try:\n resp = requests.head(href, verify=False, timeout=20)\n except (requests.Timeout, requests.ConnectionError) as ex:\n pytest.fail(ex.message)\n\n assert 200 <= resp.status_code < 400, \"Unable to access '{}' ({})\".format(link_key, href)\n"
},
{
"alpha_fraction": 0.7083595395088196,
"alphanum_fraction": 0.7133877873420715,
"avg_line_length": 33.58695602416992,
"blob_id": "8c85ff436ca856e1d3a4434f284436d0c6cf74f1",
"content_id": "c12f2b0909fa5a18a2c298b1981bed09bee1bfa7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1591,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 46,
"path": "/cfme/tests/automate/test_provisioning_dialogs.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nfrom cfme.automate import provisioning_dialogs\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\n\[email protected]_fixture(scope=\"function\")\ndef dialog():\n dlg = provisioning_dialogs.ProvisioningDialog(\n provisioning_dialogs.ProvisioningDialog.VM_PROVISION,\n name=generate_random_string(),\n description=generate_random_string()\n )\n yield dlg\n if dlg.exists:\n dlg.delete()\n\n\ndef test_provisioning_dialog_crud(dialog):\n dialog.create()\n assert dialog.exists\n with update(dialog):\n dialog.name = generate_random_string()\n dialog.description = generate_random_string()\n assert dialog.exists\n dialog.change_type(provisioning_dialogs.ProvisioningDialog.HOST_PROVISION)\n assert dialog.exists\n dialog.delete()\n assert not dialog.exists\n\nsort_by_params = []\nfor nav_loc, name in provisioning_dialogs.ProvisioningDialog.ALLOWED_TYPES:\n sort_by_params.append((nav_loc, \"Name\", \"ascending\"))\n sort_by_params.append((nav_loc, \"Name\", \"descending\"))\n sort_by_params.append((nav_loc, \"Description\", \"ascending\"))\n sort_by_params.append((nav_loc, \"Description\", \"descending\"))\n\n\[email protected](blockers=[1096388])\[email protected]((\"nav_loc\", \"by\", \"order\"), sort_by_params)\ndef test_provisioning_dialogs_sorting(nav_loc, by, order):\n pytest.sel.force_navigate(\"{}_dialogs\".format(nav_loc))\n provisioning_dialogs.dialog_table.sort_by(by, order)\n # When we can get the same comparing function as the PGSQL DB has, we can check\n"
},
{
"alpha_fraction": 0.6082327961921692,
"alphanum_fraction": 0.612727701663971,
"avg_line_length": 32.5476188659668,
"blob_id": "7530de7c42f014923d98f3599437353a2c0de675",
"content_id": "abf17619a0d0fbfb2285253d006d3a32c462c779",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4227,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 126,
"path": "/utils/randomness.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport fauxfactory\nimport random\nimport sys\n\n\ndef generate_random_int(max=sys.maxint):\n max = int(max)\n return fauxfactory.gen_integer(0, max)\n\n\ndef generate_random_local_ip():\n return \"10.{}.{}.{}\".format(\n generate_random_int(255), generate_random_int(255), generate_random_int(255))\n\n\ndef generate_random_string(size=8):\n size = int(size)\n\n return fauxfactory.gen_string(\"alphanumeric\", size)\n\n\ndef generate_lowercase_random_string(size=8):\n return generate_random_string(size).lower()\n\n\ndef generate_random_uuid_as_str():\n return fauxfactory.gen_uuid()\n\n\ndef pick(from_where, n, quiet=True):\n \"\"\"Picks `n` elements randomly from source iterable.\n\n Will be converted during processing so no side effects\n\n Args:\n from_where: Source iterable.\n n: How many elements to pick\n quiet: Whether raise the exception about n bigger than len(from_where) or not. Default True.\n Returns: n-length list with randomly picked elements from `from_where`\n \"\"\"\n if len(from_where) < n:\n # We want more\n if not quiet:\n raise ValueError(\"Less elements in from_where than you want!\")\n else:\n return list(from_where)\n elif len(from_where) == n:\n # We want all\n return list(from_where)\n # Random picking\n result = []\n from_where = list(from_where) # to prevent side effects\n while len(result) < n:\n index = random.choice(range(len(from_where)))\n result.append(from_where.pop(index))\n return result\n\n\nclass RandomizeValues(object):\n _randomizers = {\n 'random_int': generate_random_int,\n 'random_str': generate_random_string,\n 'random_uuid': generate_random_uuid_as_str,\n }\n\n @classmethod\n def from_dict(cls, d):\n \"\"\"Load a dictionary with randomizable values and randomize them\n\n Targeted at dicts produced from loading YAML, so it doesn't try to\n handle more than basic types (str, tuple, list, set, dict)\n\n Allowable dict values to randomize (remember to quote these in YAML):\n\n - {random_int}: Becomes an int between 0 and maxint, inclusive\n - {random_int:max}: Becomes an int between 0 and \"max\",\n inclusive\n - {random_str}: Becomes a string of numbers and letters,\n length 8\n - {random_str:length}: Becomes a string of numbers and\n letters, length \"length\"\n - {random_uuid}: Becomes a completely random uuid\n\n Returns a modified dict with randomize values\n\n \"\"\"\n return {k: cls._randomize_item(v) for k, v in d.items()}\n\n @classmethod\n def _randomize_item(cls, item):\n # Go through the most common types deserialized from yaml\n # pass them back through RandomizeValues as needed until\n # there are concrete things to randomize\n if isinstance(item, dict):\n return cls.from_dict(item)\n elif isinstance(item, tuple):\n return tuple(cls._randomize_item(x) for x in item)\n elif isinstance(item, list):\n return [cls._randomize_item(x) for x in item]\n elif isinstance(item, set):\n return set([cls._randomize_item(x) for x in item])\n elif isinstance(item, basestring) and item.startswith('{random_'):\n # Concreteness! Try to parse out the randomness case and\n # possible argument to the randomizer\n # '{key:arg}' should become 'key' and 'arg'; if no arg, arg is None\n try:\n key, arg = item.strip('{}').split(':', 1)\n except ValueError:\n key, arg = item.strip('{}'), None\n else:\n # No idea what this is, return it\n return item\n\n if key in cls._randomizers:\n # If the case actually exists, call its randomizer\n randomizer = cls._randomizers[key]\n if arg:\n random_value = randomizer(arg)\n else:\n random_value = randomizer()\n return str(random_value)\n else:\n # randomizer was tripped, but no matching randomizers found\n # in _randomizers, just return what was there\n return item\n"
},
{
"alpha_fraction": 0.6042719483375549,
"alphanum_fraction": 0.6082667708396912,
"avg_line_length": 37.09316635131836,
"blob_id": "e4d416b19e2fff6dd2237df8d8bf330fd5aa1449",
"content_id": "d945c67b3514e85ec935aa4888cc830132c112ac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12266,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 322,
"path": "/cfme/tests/cloud/test_provisioning.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# These tests don't work at the moment, due to the security_groups multi select not working\n# in selenium (the group is selected then immediately reset)\nimport pytest\n\nfrom textwrap import dedent\n\nfrom cfme.automate import explorer as automate\nfrom cfme.cloud.instance import instance_factory\nfrom cfme.cloud.provider import OpenStackProvider\nfrom cfme.fixtures import pytest_selenium as sel\nfrom utils import testgen\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nfrom utils.wait import wait_for\n\npytestmark = [pytest.mark.meta(server_roles=\"+automate\")]\n\n\ndef pytest_generate_tests(metafunc):\n # Filter out providers without templates defined\n argnames, argvalues, idlist = testgen.cloud_providers(metafunc, 'provisioning')\n\n new_argvalues = []\n new_idlist = []\n for i, argvalue_tuple in enumerate(argvalues):\n args = dict(zip(argnames, argvalue_tuple))\n if not args['provisioning']:\n # Don't know what type of instance to provision, move on\n continue\n\n # required keys should be a subset of the dict keys set\n if not {'image'}.issubset(args['provisioning'].viewkeys()):\n # Need image for image -> instance provisioning\n continue\n\n if metafunc.function in {\n test_provision_from_template_with_attached_disks, test_provision_with_boot_volume,\n test_provision_with_additional_volume} \\\n and args['provider_type'] != 'openstack':\n continue\n\n new_idlist.append(idlist[i])\n new_argvalues.append([args[argname] for argname in argnames])\n\n testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope=\"module\")\n\n\[email protected](scope=\"function\")\ndef vm_name(request, provider_mgmt):\n vm_name = 'test_image_prov_%s' % generate_random_string()\n return vm_name\n\n\ndef test_provision_from_template(request, setup_provider, provider_crud, provisioning, vm_name):\n \"\"\" Tests instance provision from template\n\n Metadata:\n test_flag: provision\n \"\"\"\n image = provisioning['image']['name']\n note = ('Testing provisioning from image %s to vm %s on provider %s' %\n (image, vm_name, provider_crud.key))\n\n instance = instance_factory(vm_name, provider_crud, image)\n\n request.addfinalizer(instance.delete_from_provider)\n\n inst_args = {\n 'email': '[email protected]',\n 'first_name': 'Image',\n 'last_name': 'Provisioner',\n 'notes': note,\n 'instance_type': provisioning['instance_type'],\n 'availability_zone': provisioning['availability_zone'],\n 'security_groups': [provisioning['security_group']],\n 'guest_keypair': provisioning['guest_keypair']\n }\n\n if isinstance(provider_crud, OpenStackProvider):\n inst_args['cloud_network'] = provisioning['cloud_network']\n\n sel.force_navigate(\"clouds_instances_by_provider\")\n instance.create(**inst_args)\n\n\nVOLUME_METHOD = (\"\"\"\nprov = $evm.root[\"miq_provision\"]\nprov.set_option(\n :clone_options,\n {:block_device_mapping => [%s]})\n\"\"\")\n\nONE_FIELD = \"\"\"{:volume_id => \"%s\", :device_name => \"%s\"}\"\"\"\n\n\[email protected](scope=\"module\")\ndef default_domain_enabled():\n dom = automate.Domain.default\n if dom is not None:\n if not dom.is_enabled:\n with update(dom):\n dom.enabled = True\n\n\n# Not collected for EC2 in generate_tests above\[email protected](blockers=[1152737])\[email protected](\"disks\", [1, 2])\ndef test_provision_from_template_with_attached_disks(\n request, setup_provider, provider_crud, provisioning, vm_name, provider_mgmt, disks,\n soft_assert, provider_type, default_domain_enabled):\n \"\"\" Tests provisioning from a template and attaching disks\n\n Metadata:\n test_flag: provision\n \"\"\"\n\n image = provisioning['image']['name']\n note = ('Testing provisioning from image %s to vm %s on provider %s' %\n (image, vm_name, provider_crud.key))\n\n DEVICE_NAME = \"/dev/sd{}\"\n device_mapping = []\n\n with provider_mgmt.with_volumes(1, n=disks) as volumes:\n for i, volume in enumerate(volumes):\n device_mapping.append((volume, DEVICE_NAME.format(chr(ord(\"b\") + i))))\n # Set up automate\n cls = automate.Class(\n name=\"Methods\",\n namespace=automate.Namespace.make_path(\"Cloud\", \"VM\", \"Provisioning\", \"StateMachines\"))\n method = automate.Method(\n name=\"openstack_PreProvision\",\n cls=cls)\n with update(method):\n disk_mapping = []\n for mapping in device_mapping:\n disk_mapping.append(ONE_FIELD % mapping)\n method.data = VOLUME_METHOD % \", \".join(disk_mapping)\n\n def _finish_method():\n with update(method):\n method.data = \"\"\"prov = $evm.root[\"miq_provision\"]\"\"\"\n request.addfinalizer(_finish_method)\n instance = instance_factory(vm_name, provider_crud, image)\n request.addfinalizer(instance.delete_from_provider)\n inst_args = {\n 'email': '[email protected]',\n 'first_name': 'Image',\n 'last_name': 'Provisioner',\n 'notes': note,\n 'instance_type': provisioning['instance_type'],\n 'availability_zone': provisioning['availability_zone'],\n 'security_groups': [provisioning['security_group']],\n 'guest_keypair': provisioning['guest_keypair']\n }\n\n if isinstance(provider_crud, OpenStackProvider):\n inst_args['cloud_network'] = provisioning['cloud_network']\n\n sel.force_navigate(\"clouds_instances_by_provider\")\n instance.create(**inst_args)\n\n for volume_id in volumes:\n soft_assert(vm_name in provider_mgmt.volume_attachments(volume_id))\n for volume, device in device_mapping:\n soft_assert(provider_mgmt.volume_attachments(volume)[vm_name] == device)\n instance.delete_from_provider() # To make it possible to delete the volume\n wait_for(lambda: not instance.does_vm_exist_on_provider(), num_sec=180, delay=5)\n\n\n# Not collected for EC2 in generate_tests above\[email protected](blockers=[1160342])\ndef test_provision_with_boot_volume(\n request, setup_provider, provider_crud, provisioning, vm_name, provider_mgmt, soft_assert,\n provider_type, default_domain_enabled):\n \"\"\" Tests provisioning from a template and attaching one booting volume.\n\n Metadata:\n test_flag: provision, volumes\n \"\"\"\n\n image = provisioning['image']['name']\n note = ('Testing provisioning from image %s to vm %s on provider %s' %\n (image, vm_name, provider_crud.key))\n\n with provider_mgmt.with_volume(1) as volume:\n # Set up automate\n cls = automate.Class(\n name=\"Methods\",\n namespace=automate.Namespace.make_path(\"Cloud\", \"VM\", \"Provisioning\", \"StateMachines\"))\n method = automate.Method(\n name=\"openstack_CustomizeRequest\",\n cls=cls)\n with update(method):\n method.data = dedent('''\\\n $evm.root[\"miq_provision\"].set_option(\n :clone_options, {\n :image_ref => nil,\n :block_device_mapping_v2 => [{\n :boot_index => 0,\n :uuid => \"%s\",\n :device_name => \"vda\",\n :source_type => \"volume\",\n :destination_type => \"volume\",\n :delete_on_termination => false\n }]\n }\n )\n ''' % (volume, ))\n\n def _finish_method():\n with update(method):\n method.data = \"\"\"prov = $evm.root[\"miq_provision\"]\"\"\"\n request.addfinalizer(_finish_method)\n instance = instance_factory(vm_name, provider_crud, image)\n request.addfinalizer(instance.delete_from_provider)\n inst_args = {\n 'email': '[email protected]',\n 'first_name': 'Image',\n 'last_name': 'Provisioner',\n 'notes': note,\n 'instance_type': provisioning['instance_type'],\n 'availability_zone': provisioning['availability_zone'],\n 'security_groups': [provisioning['security_group']],\n 'guest_keypair': provisioning['guest_keypair']\n }\n\n if isinstance(provider_crud, OpenStackProvider):\n inst_args['cloud_network'] = provisioning['cloud_network']\n\n sel.force_navigate(\"clouds_instances_by_provider\")\n instance.create(**inst_args)\n\n soft_assert(vm_name in provider_mgmt.volume_attachments(volume))\n soft_assert(provider_mgmt.volume_attachments(volume)[vm_name] == \"vda\")\n instance.delete_from_provider() # To make it possible to delete the volume\n wait_for(lambda: not instance.does_vm_exist_on_provider(), num_sec=180, delay=5)\n\n\n# Not collected for EC2 in generate_tests above\[email protected](blockers=[1186413])\ndef test_provision_with_additional_volume(\n request, setup_provider, provider_crud, provisioning, vm_name, provider_mgmt, soft_assert,\n provider_type, default_domain_enabled, provider_data):\n \"\"\" Tests provisioning with setting specific image from AE and then also making it create and\n attach an additional 3G volume.\n\n Metadata:\n test_flag: provision, volumes\n \"\"\"\n\n image = provisioning['image']['name']\n note = ('Testing provisioning from image %s to vm %s on provider %s' %\n (image, vm_name, provider_crud.key))\n\n # Set up automate\n cls = automate.Class(\n name=\"Methods\",\n namespace=automate.Namespace.make_path(\"Cloud\", \"VM\", \"Provisioning\", \"StateMachines\"))\n method = automate.Method(\n name=\"openstack_CustomizeRequest\",\n cls=cls)\n try:\n image_id = provider_mgmt.get_template_id(provider_data[\"small_template\"])\n except KeyError:\n pytest.skip(\"No small_template in provider adta!\")\n with update(method):\n method.data = dedent('''\\\n $evm.root[\"miq_provision\"].set_option(\n :clone_options, {\n :image_ref => nil,\n :block_device_mapping_v2 => [{\n :boot_index => 0,\n :uuid => \"%s\",\n :device_name => \"vda\",\n :source_type => \"image\",\n :destination_type => \"volume\",\n :volume_size => 3,\n :delete_on_termination => false\n }]\n }\n )\n ''' % (image_id, ))\n\n def _finish_method():\n with update(method):\n method.data = \"\"\"prov = $evm.root[\"miq_provision\"]\"\"\"\n request.addfinalizer(_finish_method)\n instance = instance_factory(vm_name, provider_crud, image)\n request.addfinalizer(instance.delete_from_provider)\n inst_args = {\n 'email': '[email protected]',\n 'first_name': 'Image',\n 'last_name': 'Provisioner',\n 'notes': note,\n 'instance_type': provisioning['instance_type'],\n 'availability_zone': provisioning['availability_zone'],\n 'security_groups': [provisioning['security_group']],\n 'guest_keypair': provisioning['guest_keypair']\n }\n\n if isinstance(provider_crud, OpenStackProvider):\n inst_args['cloud_network'] = provisioning['cloud_network']\n\n sel.force_navigate(\"clouds_instances_by_provider\")\n instance.create(**inst_args)\n\n prov_instance = provider_mgmt._find_instance_by_name(vm_name)\n try:\n assert hasattr(prov_instance, 'os-extended-volumes:volumes_attached')\n volumes_attached = getattr(prov_instance, 'os-extended-volumes:volumes_attached')\n assert len(volumes_attached) == 1\n volume_id = volumes_attached[0][\"id\"]\n assert provider_mgmt.volume_exists(volume_id)\n volume = provider_mgmt.get_volume(volume_id)\n assert volume.size == 3\n finally:\n instance.delete_from_provider()\n wait_for(lambda: not instance.does_vm_exist_on_provider(), num_sec=180, delay=5)\n if volume_id in locals():\n if provider_mgmt.volume_exists(volume_id):\n provider_mgmt.delete_volume(volume_id)\n"
},
{
"alpha_fraction": 0.6857010126113892,
"alphanum_fraction": 0.6908078193664551,
"avg_line_length": 29.77142906188965,
"blob_id": "16f228d20cd738f8e7c68492e8d3be372596b179",
"content_id": "f9db00719c3b2efce9c78800926af06c60e8ee0e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2154,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 70,
"path": "/cfme/tests/infrastructure/test_vm_discovery.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nimport time\nfrom cfme.exceptions import CFMEException\nfrom cfme.infrastructure import virtual_machines\nfrom cfme.infrastructure.provider import SCVMMProvider\nfrom utils import testgen\nfrom utils.randomness import generate_random_string\nfrom utils.wait import TimedOutError\n\n\ndef pytest_generate_tests(metafunc):\n # Filter out providers without provisioning data or hosts defined\n argnames, argvalues, idlist = testgen.infra_providers(metafunc)\n testgen.parametrize(metafunc, argnames, argvalues, ids=idlist, scope=\"module\")\n\n\[email protected](scope=\"class\")\ndef vm_name():\n return \"test_dscvry_\" + generate_random_string()\n\n\ndef if_scvmm_refresh_provider(provider):\n # No eventing from SCVMM so force a relationship refresh\n if isinstance(provider, SCVMMProvider):\n provider.refresh_provider_relationships()\n\n\ndef wait_for_vm_state_changes(vm, timeout=600):\n\n count = 0\n while count < timeout:\n try:\n quadicon = vm.find_quadicon(refresh=True)\n if quadicon.state is 'Archived':\n return True\n elif quadicon.state is 'Orphaned':\n raise CFMEException(\"VM should be Archived but it is Orphaned now.\")\n except:\n pass\n time.sleep(15)\n count += 15\n if count > timeout:\n raise CFMEException(\"VM should be Archived but it is Orphaned now.\")\n\n\ndef test_vm_discovery(request, setup_provider, provider_crud, provider_mgmt, vm_name):\n \"\"\"\n Tests whether cfme will discover a vm change\n (add/delete) without being manually refreshed.\n\n Metadata:\n test_flag: discovery\n \"\"\"\n vm = virtual_machines.Vm(vm_name, provider_crud)\n\n def _cleanup():\n vm.delete_from_provider()\n if_scvmm_refresh_provider(provider_crud)\n\n request.addfinalizer(_cleanup)\n vm.create_on_provider()\n if_scvmm_refresh_provider(provider_crud)\n\n try:\n vm.wait_to_appear(timeout=600, load_details=False)\n except TimedOutError:\n pytest.fail(\"VM was not found in CFME\")\n vm.delete_from_provider()\n if_scvmm_refresh_provider(provider_crud)\n wait_for_vm_state_changes(vm)\n"
},
{
"alpha_fraction": 0.6306027770042419,
"alphanum_fraction": 0.6311179995536804,
"avg_line_length": 31.082645416259766,
"blob_id": "ed1bda70c5aff38ad647efb95a0973c16b3f4dcb",
"content_id": "e283f78824f52d1133f19c08a61a071d98d0b20f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3882,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 121,
"path": "/cfme/tests/services/test_dynamicdd_dialogelement.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.services.catalogs.catalog_item import CatalogItem\nfrom cfme.automate.service_dialogs import ServiceDialog\nfrom cfme.services.catalogs.catalog import Catalog\nfrom cfme.services.catalogs.service_catalogs import ServiceCatalogs\nfrom cfme.automate.explorer import Domain, Namespace, Class, Method, Instance\nfrom cfme.web_ui import flash\nfrom utils.randomness import generate_random_string\nimport utils.randomness as rand\n\npytestmark = [\n pytest.mark.usefixtures(\"logged_in\"),\n pytest.mark.long_running,\n pytest.mark.ignore_stream(\"5.2\"),\n pytest.mark.ignore_stream(\"upstream\"),\n pytest.mark.meta(server_roles=\"+automate\")\n]\n\nitem_name = generate_random_string()\n\nMETHOD_TORSO = \"\"\"\n# Method for logging\ndef log(level, message)\n @method = 'Service Dialog Provider Select'\n $evm.log(level, \"#{@method} - #{message}\")\nend\n\n# Start Here\nlog(:info, \" - Listing Root Object Attributes:\") if @debug\n$evm.root.attributes.sort.each { |k, v| $evm.log('info', \"#{@method} - \\t#{k}: #{v}\") if @debug }\nlog(:info, \"===========================================\") if @debug\n\n dialog_field = $evm.object\n dialog_field['data_type'] = 'string'\n dialog_field['required'] = 'true'\n dialog_field['sort_by'] = 'value'\n list = []\n list << ['item_value', 'item_description']\n dialog_field['values'] = list\n\"\"\"\n\n\[email protected]_fixture(scope=\"function\")\ndef dialog(copy_instance, create_method):\n dialog = \"dialog_\" + generate_random_string()\n element_data = dict(\n ele_label=\"ele_\" + rand.generate_random_string(),\n ele_name=rand.generate_random_string(),\n ele_desc=\"my ele desc\",\n choose_type=\"Drop Down Dynamic List\",\n field_entry_point=\"InspectMe\",\n field_show_refresh_button=True\n )\n service_dialog = ServiceDialog(label=dialog, description=\"my dialog\",\n submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(), tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(), box_desc=\"my box desc\")\n service_dialog.create(element_data)\n flash.assert_success_message('Dialog \"%s\" was added' % dialog)\n yield dialog\n\n\[email protected]_fixture(scope=\"function\")\ndef catalog():\n cat_name = \"cat_\" + generate_random_string()\n catalog = Catalog(name=cat_name,\n description=\"my catalog\")\n catalog.create()\n yield catalog\n\n\[email protected](scope=\"function\")\ndef copy_domain(request):\n domain = Domain(name=\"new_domain\", enabled=True)\n domain.create()\n request.addfinalizer(lambda: domain.delete() if domain.exists() else None)\n return domain\n\n\[email protected](scope=\"function\")\ndef create_method(request, copy_domain):\n method = Method(\n name=\"InspectMe\",\n data=METHOD_TORSO,\n cls=Class(\n name=\"Request\",\n namespace=Namespace(\n name=\"System\",\n parent=copy_domain\n )\n )\n )\n method.create()\n return method\n\n\[email protected](scope=\"function\")\ndef copy_instance(request, copy_domain):\n miq_domain = Domain(name=\"ManageIQ (Locked)\", enabled=True)\n instance = Instance(\n name=\"InspectMe\",\n cls=Class(\n name=\"Request\",\n namespace=Namespace(\n name=\"System\",\n parent=miq_domain\n )\n )\n )\n instance.copy_to(copy_domain)\n\n\ndef test_dynamicdropdown_dialog(dialog, catalog):\n item_name = generate_random_string()\n catalog_item = CatalogItem(item_type=\"Generic\", name=item_name,\n description=\"my catalog\", display_in=True, catalog=catalog.name,\n dialog=dialog)\n catalog_item.create()\n service_catalogs = ServiceCatalogs(\"service_name\")\n service_catalogs.order(catalog_item.catalog, catalog_item)\n"
},
{
"alpha_fraction": 0.7147595286369324,
"alphanum_fraction": 0.7761194109916687,
"avg_line_length": 66.11111450195312,
"blob_id": "657f1aaf45602841b76ab73dcbdfe1f3f5d36ce0",
"content_id": "8046ab9a268f8ded4672895ac93fcd140bd43b87",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 603,
"license_type": "no_license",
"max_line_length": 225,
"num_lines": 9,
"path": "/scripts/dockerbot/pytestbase/Dockerfile",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "FROM fedora:20\nRUN yum install -y gcc postgresql-devel libxml2-devel libxslt-devel zeromq3-devel git nano python-pip python-devel gnupg gnupg2\nRUN git clone https://github.com/RedHatQE/cfme_tests.git \nRUN pip install -U -r /cfme_tests/requirements.txt\nRUN yum -y install https://kojipkgs.fedoraproject.org//packages/git/2.1.0/1.fc21/x86_64/git-2.1.0-1.fc21.x86_64.rpm git.rpm https://kojipkgs.fedoraproject.org//packages/git/2.1.0/1.fc21/noarch/perl-Git-2.1.0-1.fc21.noarch.rpm\nADD setup.sh /setup.sh\nADD post_result.py /post_result.py\nADD get_keys.py /get_keys.py\nADD verify_commit.py /verify_commit.py"
},
{
"alpha_fraction": 0.5787625312805176,
"alphanum_fraction": 0.5816889405250549,
"avg_line_length": 37.83116912841797,
"blob_id": "a8ba095d9f90c45bfc98c3cf5e81fa0e8bd43e73",
"content_id": "6c4c3755e65d0b393893a2ffb94d1a55575baf3a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11960,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 308,
"path": "/cfme/web_ui/menu.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import ui_navigate as nav\n\nfrom cfme.fixtures import pytest_selenium as sel\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.common.action_chains import ActionChains\nfrom utils import version, classproperty\nfrom utils.wait import wait_for\n\n\nclass Loc(object):\n\n @classproperty\n def toplevel_tabs_loc(cls):\n return version.pick({version.LOWEST: '//div[@class=\"navbar\"]/ul',\n '5.4': '//nav[contains(@class, \"navbar\")]/div/ul[@id=\"maintab\"]'})\n\n @classproperty\n def toplevel_loc(cls):\n return version.pick({version.LOWEST: ('//div[@class=\"navbar\"]/ul/li'\n '/a[normalize-space(.)=\"{}\"]'),\n '5.4': cls.toplevel_tabs_loc + ('/li/a[normalize-space(.)=\"{}\"'\n 'and (contains(@class, \"visible-lg\"))]')})\n\n @classproperty\n def secondlevel_first_item_loc(cls):\n return version.pick({version.LOWEST: ('//div[@class=\"navbar\"]/ul/li'\n '/a[normalize-space(.)=\"{}\"]/../ul/li[1]/a'),\n '5.4': cls.toplevel_tabs_loc + '/li/a[normalize-space(.)=\"{}\"]/../ul/li[1]/a'})\n\n @classproperty\n def inactive_box_loc(cls):\n return version.pick({version.LOWEST: (\"//ul[@id='maintab']//\"\n \"ul[contains(@class, 'inactive')]\"),\n '5.4': \"//ul[@id='maintab']//ul[contains(@class, 'inactive')]\"})\n\n @classproperty\n def a(cls):\n return version.pick({version.LOWEST: \"./a\",\n '5.4': \"./a[contains(@class, 'visible-lg')]\"})\n\n\ndef any_box_displayed():\n \"\"\"Checks whether any of the not-currently-selected toplevel items is hovered (active).\n\n First part of the condition is for the 5.3+ pop-up, second is for 5.2.\n \"\"\"\n return version.pick({\n version.LOWEST:\n lambda: sel.is_displayed(\"//a[contains(@class, 'maintab_active')]\", _no_deeper=True),\n \"5.3\":\n lambda: any(map(\n lambda e: sel.is_displayed(e, _no_deeper=True),\n sel.elements(Loc.inactive_box_loc))),\n \"5.4\":\n lambda: sel.is_displayed(\n \"//li[contains(@class, 'dropdown') and contains(@class, 'open')]\", _no_deeper=True)\n })()\n\n\ndef get_top_level_element(title):\n \"\"\"Returns the ``li`` element representing the menu item in top-level menu.\"\"\"\n return sel.element((Loc.toplevel_loc + \"/..\").format(title))\n\n\ndef open_top_level(title):\n \"\"\"Opens the section.\"\"\"\n sel.raw_click(sel.element(Loc.a, root=get_top_level_element(title)))\n\n\ndef get_second_level_element(top_level_el, title):\n \"\"\"Returns the ``li`` element representing the menu item in second-level menu.\"\"\"\n return sel.element(\"./ul/li/a[normalize-space(.)='{}']/..\".format(title), root=top_level_el)\n\n\ndef open_second_level(top_level_element, title):\n \"\"\"Click on second-level menu.\"\"\"\n second = get_second_level_element(top_level_element, title)\n sel.raw_click(sel.element(\"./a\", root=second))\n\n\ndef get_current_toplevel_name():\n \"\"\"Returns text of the currently selected top level menu item.\"\"\"\n get_rid_of_the_menu_box()\n return sel.text(\n version.pick({\n \"5.4\": \"//ul[@id='maintab']/li[not(contains(@class, 'drop'))]/a[2]\",\n \"5.3\": \"//ul[@id='maintab']/li[not(contains(@class, 'in'))]/a\",\n version.LOWEST: \"//ul[@id='maintab']/li/ul[not(contains(@style, 'none'))]/../a\"\n })).encode(\"utf-8\").strip()\n\n\ndef get_rid_of_the_menu_box():\n \"\"\"Moves the mouse pointer away from the menu location and waits for the popups to hide.\"\"\"\n ActionChains(sel.browser()).move_to_element(sel.element(\"#tP\")).perform()\n wait_for(lambda: not any_box_displayed(), num_sec=10, delay=0.1, message=\"menu box\")\n\n# Dictionary of (nav destination name, section title) section tuples\n# Keys are toplevel sections (the main tabs), values are a supertuple of secondlevel sections\nsections = {\n ('cloud_intelligence', 'Cloud Intelligence'): (\n ('dashboard', 'Dashboard'),\n ('reports', 'Reports'),\n ('chargeback', 'Chargeback'),\n ('timelines', 'Timelines'),\n ('rss', 'RSS')\n ),\n ('services', 'Services'): (\n ('my_services', 'My Services'),\n ('services_catalogs', 'Catalogs'),\n ('services_workloads', 'Workloads'),\n ('services_requests', 'Requests')\n ),\n ('clouds', 'Clouds'): (\n ('clouds_providers', 'Providers'),\n ('clouds_availability_zones', 'Availability Zones'),\n ('clouds_tenants', 'Tenants'),\n ('clouds_flavors', 'Flavors'),\n ('clouds_security_groups', 'Security Groups'),\n ('clouds_instances', 'Instances'),\n ('clouds_stacks', 'Stacks')\n ),\n ('infrastructure', 'Infrastructure'): (\n ('infrastructure_providers', 'Providers'),\n ('infrastructure_clusters', 'Clusters'),\n ('infrastructure_hosts', 'Hosts'),\n ('infrastructure_virtual_machines', 'Virtual Machines'),\n ('infrastructure_resource_pools', 'Resource Pools'),\n ('infrastructure_datastores', 'Datastores'),\n ('infrastructure_repositories', 'Repositories'),\n ('infrastructure_pxe', 'PXE'),\n ('infrastructure_requests', 'Requests'),\n ('infrastructure_config_management', 'Configuration Management')\n ),\n ('storage', 'Storage'): (\n ('filers', 'Filers'),\n ('volumes', 'Volumes'),\n ('luns', 'LUNs'),\n ('file_shares', 'File Shares'),\n ('storage_managers', 'Storage Managers')\n ),\n ('control', 'Control'): (\n ('control_explorer', 'Explorer'),\n ('control_simulation', 'Simulation'),\n ('control_import_export', 'Import / Export'),\n ('control_log', 'Log')\n ),\n ('automate', 'Automate'): (\n ('automate_explorer', 'Explorer'),\n ('automate_simulation', 'Simulation'),\n ('automate_customization', 'Customization'),\n ('automate_import_export', 'Import / Export'),\n ('automate_log', 'Log'),\n ('automate_requests', 'Requests')\n ),\n ('optimize', 'Optimize'): (\n ('utilization', 'Utilization'),\n ('planning', 'Planning'),\n ('bottlenecks', 'Bottlenecks')\n ),\n ('configure', 'Configure'): (\n ('my_settings', 'My Settings'),\n ('tasks', 'Tasks'),\n ('configuration', 'Configuration'),\n ('smartproxies', 'SmartProxies'),\n ('about', 'About')\n )\n}\n\n\ndef is_page_active(toplevel, secondlevel=None):\n try:\n if get_current_toplevel_name() != toplevel:\n return False\n except NoSuchElementException:\n return False\n if secondlevel:\n try:\n sel.element(version.pick({\n \"5.4\": (\"//nav[contains(@class, 'navbar')]//ul/li[@class='active']\"\n \"/a[normalize-space(.)='{}']/..\".format(secondlevel)),\n version.LOWEST: (\"//div[@class='navbar']//ul/li[@class='active']\"\n \"/a[normalize-space(.)='{}']/..\".format(secondlevel))\n }))\n except NoSuchElementException:\n return False\n return True\n\n\ndef nav_to_fn(toplevel, secondlevel=None):\n def f(_):\n if not is_page_active(toplevel):\n try:\n # Try to circumvent the issue on fir\n get_rid_of_the_menu_box()\n open_top_level(toplevel)\n get_rid_of_the_menu_box()\n if get_current_toplevel_name() != toplevel:\n # Infrastructure / Requests workaround\n sel.move_to_element(get_top_level_element(toplevel))\n # Using pure move_to_element to not move the mouse anywhere else\n # So in this case, we move the mouse to the first item of the second level\n ActionChains(sel.browser())\\\n .move_to_element(sel.element(Loc.secondlevel_first_item_loc.format(\n toplevel)))\\\n .click()\\\n .perform()\n get_rid_of_the_menu_box()\n # Now when we went directly to the first item, everything should just work\n tl = get_current_toplevel_name()\n if tl != toplevel:\n raise Exception(\"Navigation screwed! (wanted {}, got {}\".format(toplevel,\n tl))\n except NoSuchElementException:\n if visible_toplevel_tabs(): # Target menu is missing\n raise\n else:\n return # no menu at all, assume single permission\n\n # Can't do this currently because silly menu traps us\n # if is_page_active(toplevel, secondlevel):\n # return\n if secondlevel is not None:\n get_rid_of_the_menu_box()\n open_second_level(get_top_level_element(toplevel), secondlevel)\n get_rid_of_the_menu_box()\n return f\n\n\ndef reverse_lookup(toplevel_path, secondlevel_path=None):\n \"\"\"Reverse lookup for navigation destinations defined in this module, based on menu text\n\n Usage:\n\n # Returns 'clouds'\n reverse_lookup('Clouds')\n\n # Returns 'clouds_providers'\n reverse_lookup('Clouds', 'Providers')\n\n # Returns 'automate_import_export'\n reverse_lookup('Automate', 'Import / Export')\n\n Note:\n\n It may be tempting to use this when you don't know the name of a page, e.g.:\n\n go_to(reverse_lookup('Infrastructure', 'Providers'))\n\n Don't do that; use the nav tree name.\n\n \"\"\"\n if secondlevel_path:\n menu_path = '%s/%s' % (toplevel_path, secondlevel_path)\n else:\n menu_path = toplevel_path\n\n for (toplevel_dest, toplevel), secondlevels in sections.items():\n if menu_path == toplevel:\n return toplevel_dest\n for secondlevel_dest, secondlevel in secondlevels:\n if menu_path == '%s/%s' % (toplevel, secondlevel):\n return secondlevel_dest\n\n\ndef visible_toplevel_tabs():\n menu_names = []\n ele = version.pick({\n \"5.4\": 'li/a[2]',\n version.LOWEST: 'li/a'})\n for menu_elem in sel.elements(ele, root=Loc.toplevel_tabs_loc):\n menu_names.append(sel.text(menu_elem))\n return menu_names\n\n\ndef visible_pages():\n \"\"\"Return a list of all the menu pages currently visible top- and second-level pages\n\n Mainly useful for RBAC testing\n\n \"\"\"\n # Gather up all the visible toplevel tabs\n menu_names = visible_toplevel_tabs()\n\n # Now go from tab to tab and pull the secondlevel names from the visible links\n displayed_menus = []\n for menu_name in menu_names:\n menu_elem = sel.element(Loc.toplevel_loc.format(menu_name))\n sel.move_to_element(menu_elem)\n for submenu_elem in sel.elements('../ul/li/a', root=menu_elem):\n displayed_menus.append((menu_name, sel.text(submenu_elem)))\n\n # Do reverse lookups so we can compare to the list of nav destinations for this group\n return sorted([reverse_lookup(*displayed) for displayed in displayed_menus])\n\n# Construct the nav tree based on sections\n_branches = dict()\n# The main tab destination is usually the first secondlevel page in that tab\n# Since this is redundant, it's arguable that the toplevel tabs should be\n# nav destination at all; they're included here \"just in case\". The toplevel\n# and secondlevel destinations exist at the same level of nav_tree because the\n# secondlevel destinations don't depend on the toplevel nav taking place to reach\n# their destination.\nfor (toplevel_dest, toplevel), secondlevels in sections.items():\n for secondlevel_dest, secondlevel in secondlevels:\n _branches[secondlevel_dest] = nav_to_fn(toplevel, secondlevel)\n _branches[toplevel_dest] = [nav_to_fn(toplevel, None), {}]\n\nnav.add_branch('toplevel', _branches)\n"
},
{
"alpha_fraction": 0.6314111948013306,
"alphanum_fraction": 0.6386554837226868,
"avg_line_length": 33.16831588745117,
"blob_id": "0b49eaf169d2cbe5ba32b106899f1aef8c8ce37f",
"content_id": "44a481f714917034c86ea7a4d030888225e35555",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3451,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 101,
"path": "/cfme/tests/services/test_catalog_item.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom cfme.web_ui import flash\nfrom cfme.services.catalogs.catalog_item import CatalogItem\nfrom cfme.automate.service_dialogs import ServiceDialog\nfrom cfme.services.catalogs.catalog import Catalog\nfrom utils.randomness import generate_random_string\nfrom utils import error\nfrom utils.update import update\nimport cfme.tests.configure.test_access_control as tac\nimport utils.randomness as rand\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\"),\n pytest.mark.ignore_stream(\"5.2\")]\n\n\[email protected]_fixture(scope=\"function\")\ndef dialog():\n dialog = \"dialog_\" + generate_random_string()\n\n element_data = dict(\n ele_label=\"ele_\" + rand.generate_random_string(),\n ele_name=rand.generate_random_string(),\n ele_desc=\"my ele desc\",\n choose_type=\"Text Box\",\n default_text_box=\"default value\"\n )\n\n service_dialog = ServiceDialog(label=dialog, description=\"my dialog\",\n submit=True, cancel=True,\n tab_label=\"tab_\" + rand.generate_random_string(),\n tab_desc=\"my tab desc\",\n box_label=\"box_\" + rand.generate_random_string(),\n box_desc=\"my box desc\")\n service_dialog.create(element_data)\n flash.assert_success_message('Dialog \"%s\" was added' % dialog)\n yield dialog\n\n\[email protected]_fixture(scope=\"function\")\ndef catalog():\n catalog = \"cat_\" + generate_random_string()\n cat = Catalog(name=catalog,\n description=\"my catalog\")\n cat.create()\n yield catalog\n\n\[email protected]_fixture(scope=\"function\")\ndef catalog_item(dialog, catalog):\n catalog_item = CatalogItem(item_type=\"Generic\",\n name=generate_random_string(),\n description=\"my catalog\", display_in=True,\n catalog=catalog, dialog=dialog)\n yield catalog_item\n\n\ndef test_create_catalog_item(catalog_item):\n catalog_item.create()\n flash.assert_success_message('Service Catalog Item \"%s\" was added' %\n catalog_item.name)\n\n\ndef test_update_catalog_item(catalog_item):\n catalog_item.create()\n with update(catalog_item):\n catalog_item.description = \"my edited description\"\n\n\ndef test_delete_catalog_item(catalog_item):\n catalog_item.create()\n catalog_item.delete()\n\n\ndef test_add_button_group(catalog_item):\n catalog_item.create()\n catalog_item.add_button_group()\n\n\ndef test_add_button(catalog_item):\n catalog_item.create()\n catalog_item.add_button()\n\n\ndef test_edit_tags(catalog_item):\n catalog_item.create()\n catalog_item.edit_tags(\"Cost Center 001\")\n\n\[email protected](message='downstream - https://bugzilla.redhat.com/show_bug.cgi?id=996789 ;'\n 'upstream - https://bugzilla.redhat.com/show_bug.cgi?id=1092651')\ndef test_catalog_item_duplicate_name(catalog_item):\n catalog_item.create()\n with error.expected(\"Name has already been taken\"):\n catalog_item.create()\n\n\[email protected](blockers=[1130301])\ndef test_permissions_catalog_item_add(setup_cloud_providers, catalog_item):\n \"\"\" Tests that a catalog can be added only with the right permissions\"\"\"\n tac.single_task_permission_test([['Services', 'Catalogs Explorer', 'Catalog Items']],\n {'Add Catalog Item': catalog_item.create})\n"
},
{
"alpha_fraction": 0.6700873970985413,
"alphanum_fraction": 0.6754086017608643,
"avg_line_length": 31.481481552124023,
"blob_id": "0c8a23287f5a7b324c9dd7fa9bb7eaf63fed6d55",
"content_id": "e3545532c9af0a0e9884604c7ed032d7e538c8e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2631,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 81,
"path": "/utils/tests/test_randomness.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*-coding: utf-8\nimport pytest\nfrom utils import randomness\n\npytestmark = [\n pytest.mark.nondestructive,\n pytest.mark.skip_selenium,\n]\n\n\ndef test_generate_random_string_noargs():\n random_string = randomness.generate_random_string()\n # 8 is the default length\n assert len(random_string) == 8\n\n\ndef test_generate_random_string_args():\n length = 16\n random_string = randomness.generate_random_string(length)\n assert len(random_string) == length\n\n\ndef test_generate_random_int_noargs():\n # maxint is the default max, so no need to check against it\n random_int = randomness.generate_random_int()\n assert random_int > 0\n\n\ndef test_generate_random_int_args():\n maxvalue = 1\n random_int = randomness.generate_random_int(maxvalue)\n assert 0 <= random_int <= maxvalue\n\n\ndef test_generate_random_uuid():\n \"\"\"Not sure if there's a better test than a string of normal uuid length (36)\"\"\"\n uuid = randomness.generate_random_uuid_as_str()\n assert len(uuid) == 36\n assert isinstance(uuid, basestring)\n\n\ndef test_randomness_fixtures(random_uuid_as_string, random_string):\n \"\"\"Make sure the fixtures work as intended\"\"\"\n assert len(random_uuid_as_string) == 36\n assert isinstance(random_uuid_as_string, basestring)\n assert isinstance(random_string, basestring)\n\n\[email protected](scope=\"class\")\ndef random_stash(request):\n request.cls.before = {\n 'str': '{random_str}',\n 'tuple': ('{random_str}',),\n 'list': ['{random_str}'],\n 'set': set(['{random_str}']),\n 'notrandom': '{random_thisisabogusrandomizer}',\n }\n request.cls.after = randomness.RandomizeValues.from_dict(request.cls.before)\n request.cls.again = randomness.RandomizeValues.from_dict(request.cls.before)\n\n\[email protected](\"random_stash\")\nclass TestRandomizeValues(object):\n def test_randomizevalues(self):\n # These should be different in the two dicts\n assert self.after['str'] != self.before['str']\n assert self.after['tuple'] != self.before['tuple']\n assert self.after['list'] != self.before['list']\n assert self.after['set'] != self.before['set']\n\n def test_randomizevalues_type(self):\n \"\"\"Object type should still be dict\"\"\"\n assert isinstance(self.after, type(self.before))\n\n def test_randomizevalues_bogus_randomizer(self):\n \"\"\"Unmatched randomizer shouldn't change\"\"\"\n assert self.after['notrandom'] == self.before['notrandom']\n\n def test_randomizevalues_again(self):\n \"\"\"If we generate the dict again, it should be newly randomized\"\"\"\n assert self.after != self.again\n"
},
{
"alpha_fraction": 0.7058200836181641,
"alphanum_fraction": 0.7089946866035461,
"avg_line_length": 32.75,
"blob_id": "19780f1810e03d759f15eb9ac20813fdc58be7b7",
"content_id": "3907bace3fbfcdc0ae5b0dd56b7af04b1769a935",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 945,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 28,
"path": "/cfme/tests/integration/test_ipa_external.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nfrom cfme.configure.access_control import Group, User\nfrom cfme import login\nfrom utils.conf import cfme_data\nfrom utils.providers import setup_a_provider\n\n\[email protected](scope=\"module\")\ndef setup_first_provider():\n setup_a_provider(validate=True, check_existing=True)\n\n\[email protected]_stream(\"5.2\") # Old version can't do IPA\ndef test_external_auth_ipa(request, setup_first_provider, configure_external_auth_ipa_module):\n try:\n data = cfme_data.get(\"ipa_test\", {})\n except KeyError:\n pytest.skip(\"No ipa_test section in yaml\")\n group = Group(description='cfme', role=\"EvmRole-user\")\n request.addfinalizer(group.delete)\n group.create()\n user = User(name=data[\"fullname\"])\n request.addfinalizer(user.delete)\n request.addfinalizer(login.login_admin)\n login.login(data[\"username\"], data[\"password\"])\n assert login.current_full_name() == data[\"fullname\"]\n"
},
{
"alpha_fraction": 0.6136667132377625,
"alphanum_fraction": 0.620585024356842,
"avg_line_length": 32.76639175415039,
"blob_id": "7ad08a87fbd510f8c72311924246edf748ee80b6",
"content_id": "61ca85a1ec848bea9803bf0113e526b1fb134631",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8239,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 244,
"path": "/cfme/tests/configure/test_register_appliance.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nimport pytest\nimport re\n\nfrom cfme.configure import red_hat_updates\nfrom cfme.web_ui import InfoBlock, flash\nfrom utils import conf, error, version\nfrom utils.blockers import BZ\nfrom utils.ssh import SSHClient\nfrom utils.testgen import parametrize\nfrom utils.wait import wait_for\n\n\nREG_METHODS = ('rhsm', 'sat5', 'sat6')\n\n\"\"\"\nTests RHSM, Sat5 and Sat6 registration and checks result over ssh\n(update is not performed - it is non-destructive).\n\nFor setup, see test_update_appliances.py (red_hat_updates section in cfme_data yaml).\n\nThese tests do not check registration results in the web UI, only through SSH.\n\"\"\"\n\n\ndef pytest_generate_tests(metafunc):\n argnames = ['reg_method', 'reg_data', 'proxy_url', 'proxy_creds']\n argvalues = []\n idlist = []\n\n stream = version.current_stream()\n try:\n all_reg_data = conf.cfme_data.get('redhat_updates', {})['streams'][stream]\n except KeyError:\n pytest.mark.uncollect(metafunc.function)\n return\n\n if 'reg_method' in metafunc.fixturenames:\n for reg_method in REG_METHODS:\n # We cannot validate against Satellite 5\n if metafunc.function.__name__ == 'test_rh_creds_validation' and reg_method == 'sat5':\n continue\n\n reg_data = all_reg_data.get(reg_method, None)\n if not reg_data or not reg_data.get('test_registration', False):\n continue\n\n proxy_data = conf.cfme_data['redhat_updates'].get('http_proxy', False)\n if proxy_data and reg_data.get('use_http_proxy', False):\n proxy_url = proxy_data['url']\n proxy_creds_key = proxy_data['credentials']\n proxy_creds = conf.credentials[proxy_creds_key]\n argval = [reg_method, reg_data, proxy_url, proxy_creds]\n argid = '{}-{}'.format(reg_method, 'proxy_on')\n idlist.append(argid)\n argvalues.append(argval)\n\n argval = [reg_method, reg_data, None, None]\n argid = '{}-{}'.format(reg_method, 'proxy_off')\n idlist.append(argid)\n argvalues.append(argval)\n parametrize(metafunc, argnames, argvalues, ids=idlist, scope=\"module\")\n\n\n# We must make sure that org ID is unset (because of BZ#1048997)\[email protected]\ndef unset_org_id():\n try:\n red_hat_updates.update_registration(\n service='sat5',\n url=\"http://not.used.for.reg/XMLRPC\",\n username='not_used_for_reg',\n password='not_used_for_reg',\n organization=''\n )\n except Exception as ex:\n # Did this happen because the save button was dimmed?\n try:\n # If so, its fine - just return\n if red_hat_updates.form_buttons.save.is_dimmed:\n return\n except:\n # And if we cant access the save button\n pass\n # Something else happened so return the original exception\n raise ex\n\n\ndef rhsm_unregister():\n with SSHClient() as ssh:\n ssh.run_command('subscription-manager remove --all')\n ssh.run_command('subscription-manager unregister')\n ssh.run_command('subscription-manager clean')\n\n\ndef sat5_unregister():\n with SSHClient() as ssh:\n ssh.run_command('rm -f /etc/sysconfig/rhn/systemid')\n\n\ndef sat6_unregister():\n with SSHClient() as ssh:\n ssh.run_command('subscription-manager remove --all')\n ssh.run_command('subscription-manager unregister')\n ssh.run_command('subscription-manager clean')\n ssh.run_command('mv -f /etc/rhsm/rhsm.conf.kat-backup /etc/rhsm/rhsm.conf')\n ssh.run_command('rpm -qa | grep katello-ca-consumer | xargs rpm -e')\n\n\ndef is_registration_complete(used_repo_or_channel):\n with SSHClient() as ssh:\n ret, out = ssh.run_command('yum repolist enabled')\n # Check that the specified (or default) repo (can be multiple, separated by a space)\n # is enabled and that there are packages available\n for repo_or_channel in used_repo_or_channel.split(' '):\n if (repo_or_channel not in out) or (not re.search(r'repolist: [^0]', out)):\n return False\n return True\n\n\[email protected]_stream(\"upstream\")\[email protected](\n blockers=[\n BZ(1198111, unblock=lambda reg_method: reg_method not in {'rhsm', 'sat6'})\n ]\n)\ndef test_rh_creds_validation(request, unset_org_id,\n reg_method, reg_data, proxy_url, proxy_creds):\n if reg_method in ('rhsm', 'sat6'):\n repo_or_channel = reg_data.get('enable_repo', None)\n else:\n repo_or_channel = reg_data.get('add_channel', None)\n\n if not repo_or_channel:\n set_default_repo = True\n else:\n set_default_repo = False\n\n if proxy_url:\n use_proxy = True\n proxy_username = proxy_creds['username']\n proxy_password = proxy_creds['password']\n else:\n use_proxy = False\n proxy_url = None\n proxy_username = None\n proxy_password = None\n\n red_hat_updates.update_registration(\n service=reg_method,\n url=reg_data['url'],\n username=conf.credentials[reg_method]['username'],\n password=conf.credentials[reg_method]['password'],\n repo_name=repo_or_channel,\n organization=reg_data.get('organization', None),\n use_proxy=use_proxy,\n proxy_url=proxy_url,\n proxy_username=proxy_username,\n proxy_password=proxy_password,\n set_default_repository=set_default_repo,\n cancel=True\n )\n\n\[email protected]_stream(\"upstream\")\[email protected](\n blockers=[\n BZ(1102724, unblock=lambda proxy_url: proxy_url is None),\n # Sat6 requires validation to register\n BZ(1198111, unblock=lambda reg_method: reg_method != 'sat6')\n ]\n)\ndef test_rh_registration(request, unset_org_id,\n reg_method, reg_data, proxy_url, proxy_creds):\n\n if reg_method in ('rhsm', 'sat6'):\n repo_or_channel = reg_data.get('enable_repo', None)\n else:\n repo_or_channel = reg_data.get('add_channel', None)\n\n if not repo_or_channel:\n set_default_repo = True\n else:\n set_default_repo = False\n\n if proxy_url:\n use_proxy = True\n proxy_username = proxy_creds['username']\n proxy_password = proxy_creds['password']\n else:\n use_proxy = False\n proxy_url = None\n proxy_username = None\n proxy_password = None\n\n red_hat_updates.update_registration(\n service=reg_method,\n url=reg_data['url'],\n username=conf.credentials[reg_method]['username'],\n password=conf.credentials[reg_method]['password'],\n repo_name=repo_or_channel,\n organization=reg_data.get('organization', None),\n use_proxy=use_proxy,\n proxy_url=proxy_url,\n proxy_username=proxy_username,\n proxy_password=proxy_password,\n set_default_repository=set_default_repo,\n # Satellite 6 registration requires validation to be able to choose organization\n validate=False if reg_method != 'sat6' else True\n )\n\n used_repo_or_channel = InfoBlock('Red Hat Software Updates', 'Update Repository').text\n\n red_hat_updates.register_appliances() # Register all\n\n if reg_method == 'rhsm':\n request.addfinalizer(rhsm_unregister)\n elif reg_method == 'sat5':\n request.addfinalizer(sat5_unregister)\n else:\n request.addfinalizer(sat6_unregister)\n\n wait_for(\n func=is_registration_complete,\n func_args=[used_repo_or_channel],\n delay=40,\n num_sec=400,\n fail_func=red_hat_updates.register_appliances\n )\n\n\[email protected]_stream(\"upstream\")\ndef test_sat5_incorrect_url_format_check(request, unset_org_id):\n # Check that we weren't allowed to save the data\n with error.expected('No matching flash message'):\n red_hat_updates.update_registration(\n service=\"sat5\",\n url=\"url.not.matching.format.example.com\",\n username=\"not_used\",\n password=\"not_used\"\n )\n # Confirm that it was the Sat5 url check that blocked it\n flash.assert_message_contain(\"https://server.example.com/XMLRPC\")\n"
},
{
"alpha_fraction": 0.609087347984314,
"alphanum_fraction": 0.6122209429740906,
"avg_line_length": 26.45161247253418,
"blob_id": "852306e0b4f6d6dae59aa374c5801ad6686483ae",
"content_id": "b892844878171c1dc8bad188cc04197974272fcd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2553,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 93,
"path": "/cfme/tests/intelligence/reports/test_report_chargeback.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\nimport cfme.web_ui.flash as flash\n\nfrom cfme.intelligence.reports.reports import CustomReport\nfrom utils.providers import setup_a_provider\nfrom utils.randomness import generate_random_string\nfrom utils.log import logger\n\n\[email protected](scope=\"module\")\ndef setup_first_provider():\n setup_a_provider(validate=True, check_existing=True)\n\n\ndef _cleanup_report(report):\n\n try:\n\n logger.info('Cleaning up report {}'.format(report.menu_name))\n report.delete()\n\n except:\n\n logger.warning('Failed to clean up report {}'.\n format(report.menu_name))\n\n\n# These tests are meant to catch issues such as BZ 1203022\ndef test_charge_report_filter_owner(setup_first_provider, request):\n \"\"\"Tests creation of chargeback report that is filtered by owner\n\n \"\"\"\n\n report = CustomReport(\n menu_name=generate_random_string(),\n title=generate_random_string(),\n base_report_on=\"Chargebacks\",\n report_fields=[\n \"Network I/O Used\",\n \"Network I/O Used Cost\",\n \"Storage Used\",\n \"Storage Used Cost\",\n \"Disk I/O Used\",\n \"Disk I/O Used Cost\",\n \"Owner\",\n \"Total Cost\",\n ],\n filter_show_costs=\"Owner\",\n filter_owner=\"Administrator\",\n )\n report.create()\n\n def cleanup_report(report):\n return lambda: _cleanup_report(report)\n\n request.addfinalizer(cleanup_report(report))\n\n flash.assert_message_match('Report \"{}\" was added'.format(report.menu_name))\n report.queue(wait_for_finish=True)\n\n\ndef test_charge_report_filter_tag(setup_first_provider, request):\n \"\"\"Tests creation of chargeback report that is filtered by tag\n\n \"\"\"\n\n report = CustomReport(\n menu_name=generate_random_string(),\n title=generate_random_string(),\n base_report_on=\"Chargebacks\",\n report_fields=[\n \"CPU Used\",\n \"CPU Used Cost\",\n \"Memory Used\",\n \"Memory Used Cost\",\n \"Owner\",\n \"vCPUs Allocated Cost\",\n \"Total Cost\",\n ],\n filter_show_costs=\"My Company Tag\",\n filter_tag_cat=\"Location\",\n filter_tag_value=\"Chicago\",\n )\n report.create()\n\n def cleanup_report(report):\n return lambda: _cleanup_report(report)\n\n request.addfinalizer(cleanup_report(report))\n\n flash.assert_message_match('Report \"{}\" was added'.format(report.menu_name))\n report.queue(wait_for_finish=True)\n"
},
{
"alpha_fraction": 0.656222403049469,
"alphanum_fraction": 0.6568843722343445,
"avg_line_length": 29.01324462890625,
"blob_id": "9706634aae946ec2aaf7e2a74fd049012436a102",
"content_id": "119980b7b2ab9e8aa84daf71bc3ba002bdd9e9ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4532,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 151,
"path": "/cfme/intelligence/reports/menus.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"Module handling report menus contents\"\"\"\nfrom contextlib import contextmanager\n\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.intelligence.reports.ui_elements import FolderManager\nfrom cfme.web_ui import Region, Tree, accordion, form_buttons, menu\nfrom cfme.web_ui.multibox import MultiBoxSelect\nfrom utils import version\nfrom utils.log import logger\n\nmenu.nav.add_branch(\n \"reports\",\n {\n \"report_menus_group\":\n lambda ctx: accordion.tree(\"Edit Report Menus\", \"All EVM Groups\", ctx[\"group\"])\n }\n)\n\nreports_tree = Tree({\n \"5.3\": \"//div[@id='menu_roles_treebox']/ul\",\n version.LOWEST: \"//div[@id='treebox']//table[not(tbody/tr/td[@class='hiddenRow'])]\"\n})\n\nmanager = FolderManager(\"//div[@id='folder_lists']/table\")\nreport_select = MultiBoxSelect(\n \"//select[@id='available_reports']\",\n \"//select[@id='selected_reports']\",\n \"//a[@title='Move selected reports right']/img\",\n \"//a[@title='Move selected reports left']/img\",\n)\n\nbuttons = Region(locators=dict(\n commit=\"//a[@title='Commit report management changes']/img\",\n discard=\"//a[@title='Discard report management changes']/img\",\n))\n\ndefault_button = form_buttons.FormButton(\"Reset All menus to CFME defaults\")\n\n\ndef get_folders(group):\n \"\"\"Returns list of folders for given user group.\n\n Args:\n group: User group to check.\n \"\"\"\n sel.force_navigate(\"report_menus_group\", context={\"group\": group})\n reports_tree.click_path(\"Top Level\")\n return manager.fields\n\n\ndef get_subfolders(group, folder):\n \"\"\"Returns list of sub-folders for given user group and folder.\n\n Args:\n group: User group to check.\n folder: Folder to read.\n \"\"\"\n sel.force_navigate(\"report_menus_group\", context={\"group\": group})\n reports_tree.click_path(\"Top Level\", folder)\n return manager.fields\n\n\ndef add_folder(group, folder):\n \"\"\"Adds a folder under top-level.\n\n Args:\n group: User group.\n folder: Name of the new folder.\n \"\"\"\n with manage_folder() as top_level:\n top_level.add(folder)\n\n\ndef add_subfolder(group, folder, subfolder):\n \"\"\"Adds a subfolder under specified folder.\n\n Args:\n group: User group.\n folder: Name of the folder.\n subfolder: Name of the new subdfolder.\n \"\"\"\n with manage_folder(folder) as fldr:\n fldr.add(subfolder)\n\n\ndef reset_to_default(group):\n \"\"\"Clicks the `Default` button.\n\n Args:\n group: Group to set to Default\n \"\"\"\n sel.force_navigate(\"report_menus_group\", context={\"group\": group})\n sel.click(default_button)\n sel.click(form_buttons.save)\n\n\n@contextmanager\ndef manage_folder(group, folder=None):\n \"\"\"Context manager to use when modifying the folder contents.\n\n Args:\n group: User group.\n folder: Which folder to manage. If None, top-level will be managed.\n Returns: Context-managed :py:class:`cfme.intelligence.reports.ui_elements.FolderManager` inst.\n \"\"\"\n sel.force_navigate(\"report_menus_group\", context={\"group\": group})\n if folder is None:\n reports_tree.click_path(\"Top Level\")\n else:\n reports_tree.click_path(\"Top Level\", folder)\n try:\n yield manager\n except FolderManager._BailOut:\n logger.info(\"Discarding editation modifications on {}\".format(str(repr(manager))))\n manager.discard()\n except:\n # In case of any exception, nothing will be saved\n manager.discard()\n raise # And reraise the exception\n else:\n # If no exception happens, save!\n manager.commit()\n form_buttons.save()\n\n\n@contextmanager\ndef manage_subfolder(group, folder, subfolder):\n \"\"\"Context manager to use when modifying the subfolder contents.\n\n Args:\n group: User group.\n folder: Parent folder name.\n subfolder: Subfodler name to manage.\n Returns: Context-managed :py:class:`cfme.intelligence.reports.ui_elements.FolderManager` inst.\n \"\"\"\n sel.force_navigate(\"report_menus_group\", context={\"group\": group})\n reports_tree.click_path(\"Top Level\", folder, subfolder)\n try:\n yield report_select\n except FolderManager._BailOut:\n logger.info(\"Discarding editation modifications on {}\".format(str(repr(manager))))\n manager.discard()\n except:\n # In case of any exception, nothing will be saved\n manager.discard()\n raise # And reraise the exception\n else:\n # If no exception happens, save!\n manager.commit()\n form_buttons.save()\n"
},
{
"alpha_fraction": 0.6723549365997314,
"alphanum_fraction": 0.6769055724143982,
"avg_line_length": 26.904762268066406,
"blob_id": "cf09fba4336d47a23fd2c15b174f6bff3c2ddccb",
"content_id": "23d20c7bd53f32d75bb1d520ed68c87681735cb2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1758,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 63,
"path": "/cfme/tests/infrastructure/test_tag_infrastructure.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.web_ui import Quadicon, mixins\nfrom cfme.configure.configuration import Category, Tag\nfrom utils import providers\nfrom utils.randomness import generate_lowercase_random_string, generate_random_string\n\n\[email protected](scope=\"module\")\ndef setup_first_provider():\n providers.setup_a_provider(prov_class=\"infra\", validate=True, check_existing=True)\n\n\npytestmark = [\n pytest.mark.parametrize(\"location\", [\n \"infrastructure_providers\",\n \"infrastructure_clusters\",\n \"infrastructure_hosts\",\n \"infrastructure_datastores\",\n \"infra_vms\",\n \"infra_templates\",\n ]),\n pytest.mark.usefixtures(\"setup_first_provider\")\n]\n\n\[email protected]_fixture(scope=\"module\")\ndef category():\n cg = Category(name=generate_lowercase_random_string(size=8),\n description=generate_random_string(size=32),\n display_name=generate_random_string(size=32))\n cg.create()\n yield cg\n cg.delete()\n\n\[email protected]_fixture(scope=\"module\")\ndef tag(category):\n tag = Tag(name=generate_lowercase_random_string(size=8),\n display_name=generate_random_string(size=32),\n category=category)\n tag.create()\n yield tag\n tag.delete()\n\n\ndef test_tag_infra_item_through_selecting(location, tag):\n \"\"\"Add a tag to a infra item\n \"\"\"\n pytest.sel.force_navigate(location)\n Quadicon.select_first_quad()\n mixins.add_tag(tag)\n Quadicon.select_first_quad() # It goes back to the list view.\n mixins.remove_tag(tag)\n\n\ndef test_tag_infra_item_through_details(location, tag):\n \"\"\"Add a tag to a infra item\n \"\"\"\n pytest.sel.force_navigate(location)\n pytest.sel.click(Quadicon.first())\n mixins.add_tag(tag)\n mixins.remove_tag(tag)\n"
},
{
"alpha_fraction": 0.669959306716919,
"alphanum_fraction": 0.6728646159172058,
"avg_line_length": 28.672412872314453,
"blob_id": "1d0c70fe88c3f0c3f38664f0fb14f92fbec9b848",
"content_id": "d13c9df0863e829feb6096191740207f8fcb0422",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1721,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 58,
"path": "/cfme/tests/infrastructure/test_system_image_type.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.infrastructure import pxe\nimport utils.error as error\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\ndef test_system_image_type_crud():\n \"\"\"\n Tests a System Image Type using CRUD operations.\n \"\"\"\n sys_image_type = pxe.SystemImageType(\n name=generate_random_string(size=8),\n provision_type='Vm')\n sys_image_type.create()\n with update(sys_image_type):\n sys_image_type.name = sys_image_type.name + \"_update\"\n sys_image_type.delete(cancel=False)\n\n\ndef test_duplicate_name_error_validation():\n \"\"\"\n Tests a System Image for duplicate name.\n \"\"\"\n sys_image_type = pxe.SystemImageType(\n name=generate_random_string(size=8),\n provision_type='Vm')\n sys_image_type.create()\n with error.expected('Name has already been taken'):\n sys_image_type.create()\n sys_image_type.delete(cancel=False)\n\n\ndef test_name_required_error_validation():\n \"\"\"\n Tests a System Image with no name.\n \"\"\"\n sys_image_type = pxe.SystemImageType(\n name=None,\n provision_type='Vm')\n with error.expected('Name is required'):\n sys_image_type.create()\n\n# Commenting the maximum charater validation due to\n# http://cfme-tests.readthedocs.org/guides/gotchas.html#\n# selenium-is-not-clicking-on-the-element-it-says-it-is\n# def test_name_max_character_validation():\n# \"\"\"\n# Tests a System Image name with max characters.\n# \"\"\"\n# sys_image_type = pxe.SystemImageType(\n# name=generate_random_string(size=256),\n# provision_type='Vm')\n# sys_image_type.create()\n# sys_image_type.delete(cancel=False)\n"
},
{
"alpha_fraction": 0.6497290134429932,
"alphanum_fraction": 0.6510840058326721,
"avg_line_length": 34.14285659790039,
"blob_id": "d7f1fc2cbc4e469082720aa7e3203e236e3a14c0",
"content_id": "7494a0bdbb5daafa269675208800f496d9fe58b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1476,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 42,
"path": "/cfme/cloud/stack.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import ui_navigate as nav\nimport cfme.fixtures.pytest_selenium as sel\nfrom cfme.web_ui import Quadicon, Region, SplitTable\nfrom utils.pretty import Pretty\n\ndetails_page = Region(infoblock_type='detail')\noutput_table = SplitTable(\n ('//*[@id=\"list_grid\"]//table[contains(@class, \"hdr\")]/tbody', 1),\n ('//*[@id=\"list_grid\"]//table[contains(@class, \"obj\")]/tbody', 1)\n)\n\nnav.add_branch(\n 'clouds_stacks', {\n 'clouds_stack':\n lambda ctx: sel.click(Quadicon(ctx['stack'].name, 'stack'))\n }\n)\n\n\nclass Stack(Pretty):\n pretty_attrs = ['name']\n\n def __init__(self, name=None):\n self.name = name\n\n def nav_to_security_group_link(self):\n sel.force_navigate('clouds_stack', context={'stack': self})\n sel.click(details_page.infoblock.element(\"Relationships\", \"Security Groups\"))\n\n def nav_to_parameters_link(self):\n sel.force_navigate('clouds_stack', context={'stack': self})\n sel.click(details_page.infoblock.element(\"Relationships\", \"Parameters\"))\n\n def nav_to_output_link(self):\n sel.force_navigate('clouds_stack', context={'stack': self})\n sel.click(details_page.infoblock.element(\"Relationships\", \"Outputs\"))\n cells = {'Key': \"WebsiteURL\"}\n output_table.click_rows_by_cells(cells, \"Key\", True)\n\n def nav_to_resources_link(self):\n sel.force_navigate('clouds_stack', context={'stack': self})\n sel.click(details_page.infoblock.element(\"Relationships\", \"Resources\"))\n"
},
{
"alpha_fraction": 0.5574073791503906,
"alphanum_fraction": 0.5604938268661499,
"avg_line_length": 36.67441940307617,
"blob_id": "125b27e20690253cb515d1528ad8abcf906c6205",
"content_id": "cd46e04a1b0931be73fad208802e25cfdb626655",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3240,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 86,
"path": "/cfme/tests/intelligence/reports/test_report_corresponds.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nimport utils\nfrom cfme.intelligence.reports.reports import CustomReport\nfrom utils import version\nfrom utils.providers import provider_factory_by_name, setup_a_provider\nfrom utils.randomness import generate_random_string, pick\n\n\[email protected](scope=\"module\")\ndef setup_first_provider():\n setup_a_provider(validate=True, check_existing=True)\n\n\[email protected]_fixture(scope=\"function\")\ndef report_vms(setup_first_provider):\n report = CustomReport(\n menu_name=generate_random_string(),\n title=generate_random_string(),\n base_report_on=\"Virtual Machines\",\n report_fields=[\n version.pick({\n version.LOWEST: \"Provider : Name\",\n \"5.3\": \"Cloud/Infrastructure Provider : Name\",\n }),\n \"Cluster : Name\",\n \"Datastore : Name\",\n \"Hardware : Number of CPUs\",\n \"Hardware : RAM\",\n \"Host : Name\",\n \"Name\",\n ]\n )\n report.create()\n report.queue(wait_for_finish=True)\n yield pick(\n filter(\n lambda i: len(i[\n version.pick({\n version.LOWEST: \"Provider : Name\",\n \"5.3\": \"Cloud/Infrastructure Provider Name\",\n })\n ].strip()) > 0,\n list(report.get_saved_reports()[0].data.rows)), 2)\n report.delete()\n\n\ndef test_custom_vm_report(soft_assert, report_vms):\n for row in report_vms:\n if row[\"Name\"].startswith(\"test_\"):\n continue # Might disappear meanwhile\n provider_name = row[version.pick({\n version.LOWEST: \"Provider : Name\",\n \"5.3\": \"Cloud/Infrastructure Provider Name\",\n })]\n provider = provider_factory_by_name(provider_name)\n provider_hosts_and_ips = utils.net.resolve_ips(provider.list_host())\n provider_datastores = provider.list_datastore()\n provider_clusters = provider.list_cluster()\n soft_assert(provider.does_vm_exist(row[\"Name\"]), \"VM {} does not exist in {}!\".format(\n row[\"Name\"], provider_name\n ))\n if row[\"Cluster Name\"]:\n soft_assert(\n row[\"Cluster Name\"] in provider_clusters,\n \"Cluster {} not found in {}!\".format(row[\"Cluster Name\"], str(provider_clusters))\n )\n if row[\"Datastore Name\"]:\n soft_assert(\n row[\"Datastore Name\"] in provider_datastores,\n \"Datastore {} not found in {}!\".format(\n row[\"Datastore Name\"], str(provider_datastores))\n )\n # Because of mixing long and short host names, we have to use both-directional `in` op.\n if row[\"Host Name\"]:\n found = False\n possible_ips_or_hosts = utils.net.resolve_ips((row[\"Host Name\"], ))\n for possible_ip_or_host in possible_ips_or_hosts:\n for host_ip in provider_hosts_and_ips:\n if possible_ip_or_host in host_ip or host_ip in possible_ip_or_host:\n found = True\n soft_assert(\n found,\n \"Host {} not found in {}!\".format(possible_ips_or_hosts, provider_hosts_and_ips)\n )\n"
},
{
"alpha_fraction": 0.5669338703155518,
"alphanum_fraction": 0.5700681209564209,
"avg_line_length": 28.573974609375,
"blob_id": "7389f91864785c73abe66a223bfaee3df30fe77e",
"content_id": "c3f052017f3ed68709447723126927268ce6d346",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16591,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 561,
"path": "/cfme/web_ui/expression_editor.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\" The expression editor present in some locations of CFME.\n\n\"\"\"\nfrom functools import partial\nfrom selenium.common.exceptions import NoSuchElementException\nfrom multimethods import singledispatch\nfrom utils.wait import wait_for, TimedOutError\nimport cfme.fixtures.pytest_selenium as sel\nfrom cfme.web_ui import Anything, Calendar, Form, Input, Region, Select, fill\nimport re\nimport sys\nimport types\nfrom utils.pretty import Pretty\n\n\ndef _make_button(title):\n return \"//span[not(contains(@style,'none'))]//img[@alt='%s']\"\\\n % title\n\n\ndef _root():\n return sel.element(\"//div[@id='exp_editor_div']\")\n\n\ndef _atom_root():\n return sel.element(\"./div[@id='exp_atom_editor_div']\", root=_root())\n\n\ndef _expressions_root():\n return sel.element(\"./fieldset/div\", root=_root())\n\n\n###\n# Buttons container\nbuttons = Region(\n locators=dict(\n commit=\"//img[@alt='Commit expression element changes']\",\n discard=\"//img[@alt='Discard expression element changes']\",\n remove=\"//span[not(contains(@style, 'none'))]//img[@alt='Remove this expression element']\",\n NOT=\"//span[not(contains(@style, 'none'))]\" +\n \"//img[@alt='Wrap this expression element with a NOT']\",\n OR=\"//span[not(contains(@style, 'none'))]//img[@alt='OR with a new expression element']\",\n AND=\"//span[not(contains(@style, 'none'))]//img[@alt='AND with a new expression element']\",\n redo=\"//img[@alt='Redo']\",\n undo=\"//img[@alt='Undo']\",\n select_specific=\"//img[@alt='Click to change to a specific Date/Time format']\",\n select_relative=\"//img[@alt='Click to change to a relative Date/Time format']\",\n )\n)\n\n\n###\n# Buttons for operationg the expression concatenation\n#\ndef click_undo():\n sel.click(buttons.undo)\n\n\ndef click_redo():\n sel.click(buttons.redo)\n\n\ndef click_and():\n sel.click(buttons.AND)\n\n\ndef click_or():\n sel.click(buttons.OR)\n\n\ndef click_not():\n sel.click(buttons.NOT)\n\n\ndef click_remove():\n sel.click(buttons.remove)\n\n\n###\n# Buttons for operating the atomic expressions\n#\ndef click_commit():\n sel.click(buttons.commit)\n\n\ndef click_discard():\n sel.click(buttons.discard)\n\n\n###\n# Functions for operating the selection of the expressions\n#\ndef select_first_expression():\n \"\"\" There is always at least one (???), so no checking of bounds.\n\n \"\"\"\n sel.click(sel.elements(\"//a[contains(@id,'exp_')]\", root=_expressions_root())[0])\n\n\ndef select_expression_by_text(text):\n sel.click(\n sel.element(\n \"//a[contains(@id,'exp_')][contains(normalize-space(text()),'%s')]\" % text,\n root=_expressions_root()\n )\n )\n\n\ndef no_expression_present():\n els = sel.elements(\"//a[contains(@id,'exp_')]\", root=_expressions_root())\n if len(els) > 1:\n return False\n return els[0].text.strip() == \"???\"\n\n\ndef any_expression_present():\n return not no_expression_present()\n\n\ndef is_editing():\n try:\n sel.element(\n \"//a[contains(@id,'exp_')][contains(normalize-space(text()),'???')]\",\n root=_expressions_root()\n )\n return True\n except NoSuchElementException:\n return False\n\n\ndef delete_whole_expression():\n while any_expression_present():\n select_first_expression()\n click_remove()\n\n\ndef get_expression_as_text():\n \"\"\" Returns whole expression as represented visually.\n\n \"\"\"\n return sel.text(\"//div[@id='exp_editor_div']/fieldset/div\").encode(\"utf-8\").strip()\n\n\n###\n# Form handling\n#\nfield_form = Form(\n fields=[\n (\"type\", Select(\"select#chosen_typ\")),\n (\"field\", Select(\"select#chosen_field\")),\n (\"key\", Select(\"select#chosen_key\")),\n (\"value\", Input(\"chosen_value\")),\n (\"user_input\", Input(\"user_input\")),\n ]\n)\n\nfield_date_form = Form(\n fields=[\n (\"dropdown_select\", Select(\"select#chosen_from_1\")),\n (\"input_select_date\", Calendar(\"miq_date_1_0\")),\n (\"input_select_time\", Select(\"select#miq_time_1_0\"))\n ]\n)\n\ncount_form = Form(\n fields=[\n (\"type\", Select(\"select#chosen_typ\")),\n (\"count\", Select(\"select#chosen_count\")),\n (\"key\", Select(\"select#chosen_key\")),\n (\"value\", Input(\"chosen_value\")),\n (\"user_input\", Input(\"user_input\")),\n ]\n)\n\ntag_form = Form(\n fields=[\n (\"type\", Select(\"select#chosen_typ\")),\n (\"tag\", Select(\"select#chosen_tag\")),\n (\"value\", Select(\"#chosen_value\")),\n (\"user_input\", Input(\"user_input\")),\n ]\n)\n\nfind_form = Form(\n fields=[\n (\"type\", Select(\"select#chosen_typ\")),\n (\"field\", Select(\"select#chosen_field\")),\n (\"skey\", Select(\"select#chosen_skey\")),\n (\"value\", \"#chosen_value\"),\n (\"check\", Select(\"select#chosen_check\")),\n (\"cfield\", Select(\"select#chosen_cfield\")),\n (\"ckey\", Select(\"select#chosen_ckey\")),\n (\"cvalue\", Input(\"chosen_cvalue\")),\n ]\n)\n\nregistry_form = Form(\n fields=[\n (\"type\", Select(\"select#chosen_typ\")),\n (\"key\", Input(\"chosen_regkey\")),\n (\"value\", Input(\"chosen_regval\")),\n (\"operation\", Select(\"#chosen_key\")),\n (\"contents\", Input(\"chosen_value\")),\n ]\n)\n\ndate_switch_buttons = Region(\n locators=dict(\n to_relative=\"//img[@alt='Click to change to a relative Date/Time format']\",\n to_specific=\"//img[@alt='Click to change to a specific Date/Time format']\"\n )\n)\n\ndate_specific_form = Form(\n fields=[\n (\"date\", Input(\"miq_date_1_0\")),\n (\"time\", Input(\"miq_time_1_0\")),\n ]\n)\n\ndate_relative_form = Form(\n fields=[\n (\"from\", Select(\"select#chosen_from_1\")),\n (\"through\", Select(\"select#chosen_through_1\")),\n ]\n)\n\n\n###\n# Fill commands\n#\ndef fill_count(count=None, key=None, value=None):\n \"\"\" Fills the 'Count of' type of form.\n\n If the value is unspecified and we are in the advanced search form (user input), the user_input\n checkbox will be checked if the value is None.\n\n Args:\n count: Name of the field to compare (Host.VMs, ...).\n key: Operation to do (=, <, >=, ...).\n value: Value to check against.\n Returns: See :py:func:`cfme.web_ui.fill`.\n \"\"\"\n fill(\n count_form,\n dict(\n type=\"Count of\",\n count=count,\n key=key,\n value=value,\n ),\n )\n # In case of advanced search box\n if sel.is_displayed(field_form.user_input):\n user_input = value is None\n else:\n user_input = None\n fill(field_form.user_input, user_input)\n sel.click(buttons.commit)\n\n\ndef fill_tag(tag=None, value=None):\n \"\"\" Fills the 'Tag' type of form.\n\n Args:\n tag: Name of the field to compare.\n value: Value to check against.\n Returns: See :py:func:`cfme.web_ui.fill`.\n \"\"\"\n fill(\n tag_form,\n dict(\n type=\"Tag\",\n tag=tag,\n value=value,\n ),\n )\n # In case of advanced search box\n if sel.is_displayed(field_form.user_input):\n user_input = value is None\n else:\n user_input = None\n fill(field_form.user_input, user_input)\n sel.click(buttons.commit)\n\n\ndef fill_registry(key=None, value=None, operation=None, contents=None):\n \"\"\" Fills the 'Registry' type of form.\"\"\"\n return fill(\n registry_form,\n dict(\n type=\"Registry\",\n key=key,\n value=value,\n operation=operation,\n contents=contents,\n ),\n action=buttons.commit\n )\n\n\ndef fill_find(field=None, skey=None, value=None, check=None, cfield=None, ckey=None, cvalue=None):\n fill(\n find_form,\n dict(\n type=\"Find\",\n field=field,\n skey=skey,\n value=value,\n check=check,\n cfield=cfield,\n ckey=ckey,\n cvalue=cvalue,))\n sel.click(buttons.commit)\n\n\ndef fill_field(field=None, key=None, value=None):\n \"\"\" Fills the 'Field' type of form.\n\n Args:\n tag: Name of the field to compare (Host.VMs, ...).\n key: Operation to do (=, <, >=, IS NULL, ...).\n value: Value to check against.\n Returns: See :py:func:`cfme.web_ui.fill`.\n \"\"\"\n field_norm = field.strip().lower()\n if \"date updated\" in field_norm or \"date created\" in field_norm or \"boot time\" in field_norm:\n no_date = False\n else:\n no_date = True\n fill(\n field_form,\n dict(\n type=\"Field\",\n field=field,\n key=key,\n value=value if no_date else None,\n ),\n )\n # In case of advanced search box\n if sel.is_displayed(field_form.user_input):\n user_input = value is None\n else:\n user_input = None\n fill(field_form.user_input, user_input)\n if not no_date:\n # Flip the right part of form\n if isinstance(value, basestring) and not re.match(r\"^[0-9]{2}/[0-9]{2}/[0-9]{4}$\", value):\n if not sel.is_displayed(field_date_form.dropdown_select):\n sel.click(date_switch_buttons.to_relative)\n fill(field_date_form, {\"dropdown_select\": value})\n sel.click(buttons.commit)\n else:\n # Specific selection\n if not sel.is_displayed(field_date_form.input_select_date):\n sel.click(date_switch_buttons.to_specific)\n if (isinstance(value, tuple) or isinstance(value, list)) and len(value) == 2:\n date, time = value\n elif isinstance(value, basestring): # is in correct format mm/dd/yyyy\n # Date only (for now)\n date = value[:]\n time = None\n else:\n raise TypeError(\"fill_field expects a 2-tuple (date, time) or string with date\")\n # TODO datetime.datetime support\n fill(field_date_form.input_select_date, date)\n # Try waiting a little bit for time field\n # If we don't wait, committing the expression will glitch\n try:\n wait_for(lambda: sel.is_displayed(field_date_form.input_select_time), num_sec=6)\n # It appeared, so if the time is to be set, we will set it (passing None glitches)\n if time:\n fill(field_date_form.input_select_time, time)\n except TimedOutError:\n # Did not appear, ignore that\n pass\n finally:\n # And finally, commit the expression :)\n sel.click(buttons.commit)\n else:\n sel.click(buttons.commit)\n\n\n###\n# Processor for YAML commands\n#\n_banned_commands = {\"get_func\", \"run_commands\", \"dsl_parse\", \"create_program_from_dsl\"}\n\n\ndef get_func(name):\n \"\"\" Return callable from this module by its name.\n\n Args:\n name: Name of the variable containing the callable.\n Returns: Callable from this module\n \"\"\"\n assert name not in _banned_commands, \"Command '%s' is not permitted!\" % name\n assert not name.startswith(\"_\"), \"Command '%s' is private!\" % name\n try:\n func = getattr(sys.modules[__name__], name)\n except AttributeError:\n raise NameError(\"Could not find function %s to operate the editor!\" % name)\n try:\n func.__call__\n return func\n except AttributeError:\n raise NameError(\"%s is not callable!\" % name)\n\n\ndef run_commands(command_list, clear_expression=True):\n \"\"\" Run commands from the command list.\n\n Command list syntax:\n .. code-block:: python\n\n [\n \"function1\", # no args\n \"function2\", # dtto\n {\"fill_fields\": {\"field1\": \"value\", \"field2\": \"value\"}}, # Passes kwargs\n {\"do_other_things\": [1,2,3]} # Passes args\n ]\n\n In YAML:\n .. code-block:: yaml\n\n - function1\n - function2\n -\n fill_fields:\n field1: value\n field2: value\n -\n do_other_things:\n - 1\n - 2\n - 3\n\n Args:\n command_list: :py:class:`list` object of the commands\n clear_expression: Whether to clear the expression before entering new one (default `True`)\n \"\"\"\n assert isinstance(command_list, list) or isinstance(command_list, tuple)\n step_list = []\n for command in command_list:\n if isinstance(command, basestring):\n # Single command, no params\n step_list.append(get_func(command))\n elif isinstance(command, dict):\n for key, value in command.iteritems():\n func = get_func(key)\n args = []\n kwargs = {}\n if isinstance(value, list) or isinstance(value, tuple):\n args.extend(value)\n elif isinstance(value, dict):\n kwargs.update(value)\n else:\n raise Exception(\"I use '%s' type here!\" % type(value).__name__)\n step_list.append(partial(func, *args, **kwargs))\n else:\n raise Exception(\"I cannot process '%s' type here!\" % type(command).__name__)\n if clear_expression:\n delete_whole_expression()\n for step in step_list:\n step()\n\n\n@singledispatch\ndef create_program(source):\n \"\"\" Wrong call\n\n \"\"\"\n raise TypeError(\"Program code wrong! You must specify string (DSL), command list or None!\")\n\n\n@create_program.method(basestring)\ndef _create_program_from_dsl(dsl_program):\n \"\"\" Simple DSL to fill the expression editor.\n\n Syntax:\n DSL consists from statements. Statements are separated with newline or ;.\n Each statement is a single function call. Functions are called in this module.\n Function without parameters can be called like this:\n function\n or\n function()\n\n If the function has some parameters, you have to choose whether they are kwargs or args.\n DSL has no string literals, so if you want to call a function with classic parameters:\n function(parameter one, parameter two, you cannot use comma)\n And with kwargs:\n function(username=John Doe, password=top secret)\n You cannot split the statement to multiple lines as the DSL is regexp-based.\n\n Args:\n dsl_program: Source string with the program.\n Returns: Callable, which fills the expression.\n \"\"\"\n SIMPLE_CALL = r\"^[a-z_A-Z][a-z_A-Z0-9]*$\"\n ARGS_CALL = r\"^(?P<name>[a-z_A-Z][a-z_A-Z0-9]*)\\((?P<args>.*)\\)$\"\n KWARG = r\"^[^=]+=.*$\"\n command_list = []\n for i, line in enumerate([x.strip() for x in re.split(r\"\\n|;\", dsl_program)]):\n if len(line) == 0:\n continue\n elif re.match(SIMPLE_CALL, line):\n command_list.append(line)\n continue\n args_match = re.match(ARGS_CALL, line)\n if not args_match:\n raise SyntaxError(\"Could not resolve statement `%s' on line %d\" % (line, i))\n fname = args_match.groupdict()[\"name\"]\n args = [x.strip() for x in args_match.groupdict()[\"args\"].split(\",\")]\n if len(args) > 0 and len(args[0]) > 0:\n if re.match(KWARG, args[0]):\n # kwargs\n kwargs = dict([map(lambda x: x.strip(), x.split(\"=\", 1)) for x in args])\n command_list.append({fname: kwargs})\n else:\n # Args\n command_list.append({fname: [None if arg == \"/None/\" else arg for arg in args]})\n else:\n command_list.append(fname)\n return create_program(command_list)\n\n\n@create_program.method(list)\n@create_program.method(tuple)\ndef _create_program_from_list(command_list):\n \"\"\" Create function which fills the expression from the command list.\n\n Args:\n command_list: Command list for :py:func:`run_program`\n Returns: Callable, which fills the expression.\n \"\"\"\n return partial(run_commands, command_list)\n\n\n@create_program.method(types.NoneType)\ndef _create_program_from_none(none):\n return lambda: none\n\n\nclass Expression(Pretty):\n \"\"\"This class enables to embed the expression in a Form.\n\n Args:\n show_func: Function to call to show the expression if there are more of them.\n \"\"\"\n pretty_attrs = ['show_func']\n\n def __init__(self, show_func=lambda: None):\n self.show_func = show_func\n\n\[email protected]((Expression, Anything))\ndef _fill_expression(e, p):\n e.show_func()\n prog = create_program(p)\n prog()\n"
},
{
"alpha_fraction": 0.6589303612709045,
"alphanum_fraction": 0.6750757098197937,
"avg_line_length": 28.147058486938477,
"blob_id": "c11c58e074305b0a25c3fb193bb9aac5e5548cc3",
"content_id": "a8baeed602c4e08dd30128f6d6b7ec031b018469",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 991,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 34,
"path": "/cfme/tests/storage/test_storage_crud.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nfrom cfme.storage.managers import StorageManager\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nfrom utils.version import current_version\n\npytestmark = [pytest.mark.usefixtures(\"use_storage\")]\n\n\[email protected](lambda: not current_version().is_in_series(\"5.2\"))\ndef test_storage_manager_crud(request):\n sm = StorageManager(\n name=generate_random_string(),\n type=StorageManager.NETAPP_RS,\n hostname=generate_random_string(),\n ip=\"127.0.0.250\",\n port=\"12345\",\n credentials=StorageManager.Credential(\n username=\"test\",\n password=\"pw\"\n )\n\n )\n request.addfinalizer(lambda: sm.delete() if sm.exists else None)\n assert not sm.exists\n sm.create(validate=False)\n assert sm.exists\n with update(sm, validate=False):\n sm.hostname = generate_random_string()\n assert sm.exists\n sm.delete()\n assert not sm.exists\n"
},
{
"alpha_fraction": 0.6837607026100159,
"alphanum_fraction": 0.6858974099159241,
"avg_line_length": 28.25,
"blob_id": "d8e5783f4910d29c8054d27b84938561c0412b57",
"content_id": "fea2115aef066329a52f3642e7358c221e1ab984",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 468,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 16,
"path": "/fixtures/widgets.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nfrom cfme.dashboard import Widget\nfrom cfme.intelligence.reports import widgets\n\n\[email protected](scope=\"session\")\ndef widgets_generated(any_provider_session):\n pytest.sel.force_navigate(\"dashboard\")\n widget_list = []\n for widget in Widget.all():\n widget_list.append((widget.name, widget.content_type))\n for w_name, w_type in widget_list:\n w = widgets.Widget.detect(w_type, w_name)\n w.generate()\n"
},
{
"alpha_fraction": 0.6646207571029663,
"alphanum_fraction": 0.6773344874382019,
"avg_line_length": 27.160493850708008,
"blob_id": "78a0b10ded113c1c9acc27ebdcde585c36c4d501",
"content_id": "c6a1d21c0e9be1156180fc31ed2a12a8a29cdf77",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2281,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 81,
"path": "/cfme/tests/automate/test_class.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom cfme.automate.explorer import Class\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\nimport utils.error as error\nimport cfme.tests.automate as ta\n\n\npytestmark = [pytest.mark.usefixtures(\"logged_in\")]\n\n\[email protected](scope=\"module\")\ndef make_namespace(request):\n return ta.make_namespace(request=request)\n\n\[email protected](scope=\"function\")\ndef a_class(make_namespace):\n return ta.a_class(make_namespace)\n\n\ndef test_class_crud(a_class):\n a_class.create()\n orig = a_class.description\n with update(a_class):\n a_class.description = 'edited'\n with update(a_class):\n a_class.description = orig\n a_class.delete()\n assert not a_class.exists()\n\n\ndef test_schema_crud(a_class):\n a_class.create()\n f1 = Class.SchemaField(name='foo')\n f2 = Class.SchemaField(name='bar')\n f3 = Class.SchemaField(name='baz')\n a_class.edit_schema(add_fields=(f1, f2))\n a_class.edit_schema(remove_fields=(f1,), add_fields=(f3,))\n\n\n# The inheritance box has been removed from the UI until it is implemented properly,\n# see 1138859\n#\n# def test_add_class_inherited(a_class):\n# subclass = Class(name=generate_random_string(8),\n# namespace=a_class.namespace,\n# description=\"subclass\",\n# inherits_from=a_class)\n# a_class.create()\n# subclass.create()\n\n\ndef test_duplicate_class_disallowed(a_class):\n a_class.create()\n with error.expected(\"Name has already been taken\"):\n a_class.create()\n\n\ndef test_same_class_name_different_namespace(make_namespace):\n other_namespace = ta.make_namespace()\n name = generate_random_string(8)\n cls1 = Class(name=name, namespace=make_namespace)\n cls2 = Class(name=name, namespace=other_namespace)\n cls1.create()\n cls2.create()\n # delete one and check the other still exists\n cls1.delete()\n assert cls2.exists()\n\n\[email protected](blockers=[1148541])\ndef test_display_name_unset_from_ui(request, a_class):\n a_class.create()\n request.addfinalizer(a_class.delete)\n with update(a_class):\n a_class.display_name = generate_random_string()\n assert a_class.exists\n with update(a_class):\n a_class.display_name = \"\"\n assert a_class.exists\n"
},
{
"alpha_fraction": 0.5959404706954956,
"alphanum_fraction": 0.6059539914131165,
"avg_line_length": 32.89908218383789,
"blob_id": "69450c31c06749aea426d9f6d4ec1be4647b9006",
"content_id": "b5232489566ed500cb37a75cce9752a1a4f8c5e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3695,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 109,
"path": "/cfme/tests/configure/test_docs.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\nimport re\nimport requests\ntry:\n # Faster, C-ext\n from cStringIO import StringIO\nexcept ImportError:\n # Slower, pure python\n from StringIO import StringIO\nfrom PyPDF2 import PdfFileReader\n\nfrom cfme.configure.about import product_assistance as about\nfrom utils import version\n\n\[email protected](scope=\"module\")\ndef guides():\n return [loc for loc in about.locators.iterkeys() if loc.endswith(\"_guide\")]\n\n\[email protected](scope=\"session\")\ndef docs_info():\n return [\n 'Control',\n 'Lifecycle and Automation',\n 'Quick Start',\n 'Settings And Operations',\n 'Insight',\n 'Integration Services'\n ]\n\n\ndef test_links(guides, soft_assert):\n \"\"\"Test whether the PDF documents are present.\"\"\"\n pytest.sel.force_navigate(\"about\")\n for link in guides:\n locator = getattr(about, link)\n url = pytest.sel.get_attribute(locator, \"href\")\n soft_assert(\n requests.head(url, verify=False).status_code == 200,\n \"'{}' is not accessible\".format(pytest.sel.text(locator).encode(\"utf-8\").strip())\n )\n\n\[email protected](blockers=[1145326, \"GH#ManageIQ/manageiq:2246\"])\ndef test_contents(guides, soft_assert):\n \"\"\"Test contents of each document.\"\"\"\n pytest.sel.force_navigate(\"about\")\n precomp_noguide = re.compile(\"(.*) Guide\")\n for link in guides:\n locator = getattr(about, link)\n url = pytest.sel.get_attribute(locator, \"href\")\n data = requests.get(url, verify=False)\n pdf = PdfFileReader(StringIO(data.content))\n pdf_info = pdf.getDocumentInfo()\n pdf_title_low = pdf_info[\"/Title\"].lower()\n # don't include the word 'guide'\n title_text_low = precomp_noguide.search(pytest.sel.text(locator)).group(1).lower()\n\n cur_ver = version.current_version()\n expected = [title_text_low]\n if cur_ver == version.LATEST:\n expected.append('manageiq')\n else:\n expected.append('cloudforms')\n expected.append('{}.{}'.format(cur_ver.version[0], cur_ver.version[1]))\n\n for exp_str in expected:\n soft_assert(exp_str in pdf_title_low, \"{} not in {}\".format(exp_str, pdf_title_low))\n\n\[email protected](blockers=[1026939])\ndef test_info(guides, soft_assert):\n pytest.sel.force_navigate(\"about\")\n for link in guides:\n l_a = getattr(about, link)\n # l_icon also implicitly checks for the icon url == text url\n l_icon = lambda: pytest.sel.element(\n \"../a[contains(@href, '{}')]/img\".format(\n pytest.sel.get_attribute(l_a, \"href\").rsplit(\"/\", 1)[-1]\n ),\n root=l_a\n )\n l_icon_a = lambda: pytest.sel.element(\"..\", root=l_icon)\n soft_assert(\n pytest.sel.get_attribute(l_icon, \"alt\") == pytest.sel.get_attribute(l_icon_a, \"title\"),\n \"Icon alt attr should match icon title attr ({})\".format(pytest.sel.text(l_a))\n )\n soft_assert(\n pytest.sel.get_attribute(l_icon_a, \"href\") == pytest.sel.get_attribute(l_a, \"href\"),\n \"Icon url should match text url ({})\".format(pytest.sel.text(l_a))\n )\n\n\[email protected](blockers=[1026946])\ndef test_all_docs_present(guides, docs_info):\n pytest.sel.force_navigate(\"about\")\n docs_list = list(docs_info)\n for link in guides:\n for doc in docs_list:\n if doc.lower() in pytest.sel.text(getattr(about, link)).lower():\n break\n else:\n continue\n docs_list.remove(doc)\n assert len(docs_list) == 0, \"All documents should be available ({} are missing)\".format(\n \", \".join(docs_list)\n )\n"
},
{
"alpha_fraction": 0.5892981290817261,
"alphanum_fraction": 0.5920778512954712,
"avg_line_length": 33.261905670166016,
"blob_id": "7f0f4a5db22244163face40cb2c207d0a53245ca",
"content_id": "9ee9a317bf7fa43c6ad00065ce6338ce10845ac5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1439,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 42,
"path": "/cfme/web_ui/mixins.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "from cfme.fixtures import pytest_selenium as sel\nfrom cfme.web_ui import fill, Form, Select, Table, toolbar, form_buttons, flash\n\ntag_form = Form(\n fields=[\n ('category', Select('//select[@id=\"tag_cat\"]')),\n ('tag', Select('//select[@id=\"tag_add\"]'))\n ])\n\ntag_table = Table(\"//div[@id='assignments_div']//table\")\n\n\ndef add_tag(tag, single_value=False):\n toolbar.select('Policy', 'Edit Tags')\n if isinstance(tag, (list, tuple)):\n fill_d = {\n \"category\": tag[0] if not single_value else \"{} *\".format(tag[0]),\n \"tag\": tag[1]\n }\n else:\n fill_d = {\"tag\": tag.display_name}\n if tag.category.single_value:\n fill_d[\"category\"] = \"{} *\".format(tag.category.display_name)\n else:\n fill_d[\"category\"] = tag.category.display_name\n fill(tag_form, fill_d)\n form_buttons.save()\n flash.assert_success_message('Tag edits were successfully saved')\n\n\ndef remove_tag(tag):\n toolbar.select('Policy', 'Edit Tags')\n if isinstance(tag, (tuple, list)):\n category, tag_name = tag\n else:\n category = tag.category.display_name\n tag_name = tag.display_name\n row = tag_table.find_row_by_cells({'category': category, 'assigned_value': tag_name},\n partial_check=True)\n sel.click(row[0])\n form_buttons.save()\n flash.assert_success_message('Tag edits were successfully saved')\n"
},
{
"alpha_fraction": 0.6019629240036011,
"alphanum_fraction": 0.6052344441413879,
"avg_line_length": 40.681819915771484,
"blob_id": "8fec6038590c6cc15bc382efa4edf57ae9f87290",
"content_id": "b19c3b8bd9fe6ba68016f51617e9bc5fe5e2af3d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 917,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 22,
"path": "/cfme/configure/about.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom cfme.web_ui import Region\nfrom utils import version\n\nproduct_assistance = Region(\n locators={\n 'quick_start_guide': \"//a[normalize-space(.)='Quick Start Guide']\",\n 'insight_guide': \"//a[normalize-space(.)='Insight Guide']\",\n 'control_guide': \"//a[normalize-space(.)='Control Guide']\",\n 'lifecycle_and_automation_guide':\n \"//a[normalize-space(.)='Lifecycle and Automation Guide']\",\n 'integrate_guide': {\n version.LOWEST: \"//a[normalize-space(.)='Integrate Guide']\",\n '5.3': \"//a[normalize-space(.)='Integration Services Guide']\"\n },\n 'settings_and_operations_guide': \"//a[normalize-space(.)='Settings and Operations Guide']\",\n 'red_hat_customer_portal': \"//a[normalize-space(.)='Red Hat Customer Portal']\"\n },\n title='About',\n identifying_loc='quick_start_guide',\n infoblock_type=\"form\"\n)\n"
},
{
"alpha_fraction": 0.6935483813285828,
"alphanum_fraction": 0.7008797526359558,
"avg_line_length": 18.485713958740234,
"blob_id": "91a97ec8738ebc8017fce42263ff34e4e1d07740",
"content_id": "ebd93d350b7d707a35c14232c9d6647277dae71f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1364,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 70,
"path": "/fixtures/parallelizer/parallelizer_tester.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "\"\"\"parallelizer tester\n\nUseful to make sure tests are being parallelized properly, and then reported correctly.\n\nThis file is named specially to prevent being picked up by py.test's default collector, and should\nnot be run during a normal test run.\n\n\"\"\"\nimport random\nfrom time import sleep\n\nimport pytest\n\npytestmark = pytest.mark.usefixtures('param', 'wait')\n\n\ndef pytest_generate_tests(metafunc):\n # Starts at 10 for vane reason: Artifactor report does a naive sort, so 10 comes before 1\n ids = [i + 10 for i in xrange(20)]\n random.shuffle(ids)\n argvalues = [[v] for v in ids]\n metafunc.parametrize(['param'], argvalues, ids=ids, scope='module')\n\n\[email protected]\ndef wait():\n # Add some randomness to make sure reports are getting mixed up like they would in a \"real\" run\n sleep(random.random() * 5)\n\n\[email protected]\ndef setup_fail():\n raise Exception('I failed to setup!')\n\n\[email protected]_fixture\ndef teardown_fail():\n yield\n raise Exception('I failed to teardown!')\n\n\ndef test_passes():\n pass\n\n\ndef test_fails():\n raise Exception('I failed!')\n\n\[email protected]\ndef test_xfails():\n raise Exception('I failed!')\n\n\[email protected]\ndef test_xpasses():\n pass\n\n\ndef test_fails_setup(setup_fail):\n pass\n\n\ndef test_fails_teardown(teardown_fail):\n pass\n\n\[email protected]('True')\ndef test_skipped():\n pass\n"
},
{
"alpha_fraction": 0.6748595237731934,
"alphanum_fraction": 0.6774344444274902,
"avg_line_length": 32.375,
"blob_id": "5f6bae5e3556b9ae3741522bdf260f78b0fffcea",
"content_id": "d91287dd2e26f1ca4be6e780620e8e257966a7b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4272,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 128,
"path": "/cfme/tests/services/test_operations.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"Tests checking for link access from outside.\"\"\"\nimport pytest\n\nimport cfme.provisioning\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.login import login_admin\nfrom cfme.provisioning import provisioning_form\nfrom cfme.services import requests\nfrom cfme.web_ui import flash\nfrom utils.browser import browser\nfrom utils.providers import setup_a_provider\nfrom utils.randomness import generate_random_string\nfrom utils.wait import wait_for\nfrom fixtures.pytest_store import store\n\n\npytestmark = [\n pytest.mark.meta(server_roles=\"-automate\"), # To prevent the provisioning itself.\n]\n\n\[email protected](scope=\"module\")\ndef provider():\n return setup_a_provider(\"infra\")\n\n\[email protected](scope=\"module\")\ndef provider_data(provider):\n return provider.get_yaml_data()\n\n\[email protected](scope=\"module\")\ndef provisioning(provider_data):\n return provider_data.get(\"provisioning\", {})\n\n\[email protected](scope=\"module\")\ndef template_name(provisioning):\n return provisioning.get(\"template\")\n\n\[email protected](scope=\"module\")\ndef vm_name():\n return generate_random_string(size=16)\n\n\[email protected]_fixture(scope=\"module\")\ndef generated_request(provider, provider_data, provisioning, template_name, vm_name):\n \"\"\"Creates a provision request, that is not automatically approved, and returns the search data.\n\n After finishing the test, request should be automatically deleted.\n\n Slightly modified code from :py:module:`cfme.tests.infrastructure.test_provisioning`\n \"\"\"\n first_name = generate_random_string()\n last_name = generate_random_string()\n notes = generate_random_string()\n e_mail = \"{}@{}.test\".format(first_name, last_name)\n host, datastore = map(provisioning.get, ('host', 'datastore'))\n pytest.sel.force_navigate('infrastructure_provision_vms', context={\n 'provider': provider,\n 'template_name': template_name,\n })\n\n provisioning_data = {\n 'email': e_mail,\n 'first_name': first_name,\n 'last_name': last_name,\n 'notes': notes,\n 'vm_name': vm_name,\n 'host_name': {'name': [host]},\n 'datastore_name': {'name': [datastore]},\n 'num_vms': \"10\", # so it won't get auto-approved\n }\n\n # Same thing, different names. :\\\n if provider_data[\"type\"] == 'rhevm':\n provisioning_data['provision_type'] = 'Native Clone'\n elif provider_data[\"type\"] == 'virtualcenter':\n provisioning_data['provision_type'] = 'VMware'\n\n try:\n provisioning_data['vlan'] = provisioning['vlan']\n except KeyError:\n # provisioning['vlan'] is required for rhevm provisioning\n if provider_data[\"type\"] == 'rhevm':\n raise pytest.fail('rhevm requires a vlan value in provisioning info')\n\n provisioning_form.fill(provisioning_data)\n pytest.sel.click(provisioning_form.submit_button)\n flash.assert_no_errors()\n request_cells = {\n \"Description\": \"Provision from [{}] to [{}###]\".format(template_name, vm_name),\n }\n yield request_cells\n\n browser().get(store.base_url)\n login_admin()\n\n requests.delete_request(request_cells)\n flash.assert_no_errors()\n\n\ndef test_services_request_direct_url(generated_request):\n \"\"\"Go to the request page, save the url and try to access it directly.\"\"\"\n assert requests.go_to_request(generated_request), \"could not find the request!\"\n request_url = sel.current_url()\n sel.get(sel.base_url()) # I need to flip it with something different here\n sel.get(request_url) # Ok, direct access now.\n wait_for(\n lambda: sel.is_displayed(\".brand\"),\n num_sec=20,\n message=\"wait for a CFME page appear\",\n delay=0.5\n )\n\n\ndef test_copy_request(request, generated_request, vm_name, template_name):\n \"\"\"Check if request gets properly copied.\"\"\"\n new_vm_name = generate_random_string(size=16)\n cfme.provisioning.copy_request_by_vm_and_template_name(\n vm_name, template_name, {\"vm_name\": new_vm_name}, multi=True)\n request.addfinalizer(lambda: requests.delete_request({\n \"Description\": \"Provision from [{}] to [{}###]\".format(template_name, new_vm_name),\n }))\n assert cfme.provisioning.go_to_request_by_vm_and_template_name(\n new_vm_name, template_name, multi=True)\n"
},
{
"alpha_fraction": 0.5581395626068115,
"alphanum_fraction": 0.5601915121078491,
"avg_line_length": 29.45833396911621,
"blob_id": "72be028fe5b6eb986951c9aa4dd6beb0d4c74537",
"content_id": "94369404c224b9f3ec7fab308b5d2825e0833f33",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1462,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 48,
"path": "/cfme/services/catalogs/service_catalogs.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom functools import partial\n\nimport cfme.fixtures.pytest_selenium as sel\nfrom cfme.web_ui import accordion, flash, menu, form_buttons\nfrom utils.update import Updateable\nfrom utils.pretty import Pretty\nfrom utils import version\n\norder_button = {\n version.LOWEST: \"//img[@title='Order this Service']\",\n '5.4': \"//button[@title='Order this Service']\"\n}\naccordion_tree = partial(accordion.tree, \"Service Catalogs\")\n\nmenu.nav.add_branch(\n 'services_catalogs',\n {\n 'service_catalogs':\n [\n lambda _: accordion.click('Service Catalogs'),\n {\n 'service_catalog':\n [\n lambda ctx: accordion_tree(\n 'All Services', ctx['catalog'], ctx['catalog_item'].name),\n {\n 'order_service_catalog': lambda _: sel.click(order_button)\n }\n ]\n }\n ]\n }\n)\n\n\nclass ServiceCatalogs(Updateable, Pretty):\n pretty_attrs = ['service_name']\n\n def __init__(self, service_name=None):\n self.service_name = service_name\n\n def order(self, catalog, catalog_item):\n sel.force_navigate('order_service_catalog',\n context={'catalog': catalog,\n 'catalog_item': catalog_item})\n sel.click(form_buttons.submit)\n flash.assert_success_message(\"Order Request was Submitted\")\n"
},
{
"alpha_fraction": 0.6224046349525452,
"alphanum_fraction": 0.633510410785675,
"avg_line_length": 27.369863510131836,
"blob_id": "695ea3fda23bfa597882981f5c9862d2e99232db",
"content_id": "66cf0f1bd1e5f50d33734629e3d7634166de217a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2071,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 73,
"path": "/cfme/tests/configure/test_timeprofile.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nimport cfme.configure.settings as st\nimport utils.error as error\nimport utils.randomness as random\nfrom utils.update import update\n\n\ndef new_timeprofile():\n return st.Timeprofile(description='time_profile' + random.generate_random_string(),\n scope='Current User',\n days=True,\n hours=True,\n timezone=\"(GMT-10:00) Hawaii\")\n\n\ndef test_timeprofile_crud():\n timeprofile = new_timeprofile()\n timeprofile.create()\n with update(timeprofile):\n timeprofile.scope = 'All Users'\n copied_timeprofile = timeprofile.copy()\n copied_timeprofile.delete()\n timeprofile.delete()\n\n\ndef test_timeprofile_duplicate_name():\n nt = new_timeprofile()\n nt.create()\n msg = \"Error during 'add': Validation failed: Description has already been taken\"\n with error.expected(msg):\n nt.create()\n nt. delete()\n\n\ndef test_timeprofile_name_max_character_validation():\n tp = st.Timeprofile(\n description=random.generate_random_string(size=50),\n scope='Current User',\n timezone=\"(GMT-10:00) Hawaii\")\n tp.create()\n tp.delete()\n\n\ndef test_days_required_error_validation():\n tp = st.Timeprofile(\n description='time_profile' + random.generate_random_string(),\n scope='Current User',\n timezone=\"(GMT-10:00) Hawaii\",\n days=False,\n hours=True)\n with error.expected(\"At least one Day must be selected\"):\n tp.create()\n\n\ndef test_hours_required_error_validation():\n tp = st.Timeprofile(\n description='time_profile' + random.generate_random_string(),\n scope='Current User',\n timezone=\"(GMT-10:00) Hawaii\",\n days=True,\n hours=False)\n with error.expected(\"At least one Hour must be selected\"):\n tp.create()\n\n\ndef test_description_required_error_validation():\n tp = st.Timeprofile(\n description=None,\n scope='Current User',\n timezone=\"(GMT-10:00) Hawaii\")\n with error.expected(\"Description is required\"):\n tp.create()\n"
},
{
"alpha_fraction": 0.636617124080658,
"alphanum_fraction": 0.6524163484573364,
"avg_line_length": 33.709678649902344,
"blob_id": "f174307b754bef2894eff3893e9c73507dcc0c35",
"content_id": "5c7e61b7cd622420442cd620cbcbeb5cffaaa19f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1076,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 31,
"path": "/cfme/tests/configure/test_session_timeout.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nimport pytest\n\nimport time\n\nfrom cfme.configure.configuration import AuthSetting\nfrom utils.browser import ensure_browser_open, quit\nfrom utils.wait import wait_for\n\n\ndef test_session_timeout(request):\n \"\"\"Sets the timeout to shortest possible time and waits if it really times out.\"\"\"\n @request.addfinalizer # Wow, why we did not figure this out before?!\n def _finalize():\n quit()\n ensure_browser_open()\n AuthSetting.set_session_timeout(hours=\"24\", minutes=\"0\")\n\n AuthSetting.set_session_timeout(hours=\"0\", minutes=\"5\")\n # Wait 10 minutes\n time.sleep(10 * 60)\n # Try getting timeout\n # I had to use wait_for because on 5.4 and upstream builds it made weird errors\n wait_for(\n lambda: pytest.sel.elements(\n \"//div[(@id='flash_div' or @id='login_div') and contains(normalize-space(.), \"\n \"'Session was timed out due to inactivity')]\"),\n num_sec=60,\n delay=5,\n fail_func=lambda: pytest.sel.click(\"//a[normalize-space(text())='Cloud Intelligence']\")\n )\n"
},
{
"alpha_fraction": 0.6506137847900391,
"alphanum_fraction": 0.6553352475166321,
"avg_line_length": 30.611940383911133,
"blob_id": "f01f6ae2bc6e5f30db6b3b3055ef680e60d0833d",
"content_id": "6315d14b791148674bb4d07e110d65fa702833ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2118,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 67,
"path": "/cfme/tests/test_utilization.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom utils import db\nfrom utils import providers\nfrom utils import testgen\nfrom utils import conf\nimport time\nfrom cfme.configure.configuration import candu\n\npytestmark = [\n pytest.mark.meta(\n server_roles=\"+ems_metrics_coordinator +ems_metrics_collector +ems_metrics_processor\")\n]\n\npytest_generate_tests = testgen.generate(testgen.provider_by_type, None)\n\n\[email protected]_fixture(scope=\"module\")\ndef enable_candu():\n try:\n candu.enable_all()\n yield\n finally:\n candu.disable_all()\n\n\n# blow away all providers when done - collecting metrics for all of them is\n# too much\[email protected]\ndef handle_provider(provider_key):\n providers.clear_providers()\n providers.setup_provider(provider_key)\n\n\ndef test_metrics_collection(handle_provider, provider_key, provider_crud, enable_candu):\n \"\"\"check the db is gathering collection data for the given provider\n\n Metadata:\n test_flag: metrics_collection\n \"\"\"\n metrics_tbl = db.cfmedb()['metrics']\n mgmt_systems_tbl = db.cfmedb()['ext_management_systems']\n\n # the id for the provider we're testing\n mgmt_system_id = db.cfmedb().session.query(mgmt_systems_tbl).filter(\n mgmt_systems_tbl.name == conf.cfme_data.get('management_systems', {})[provider_key]['name']\n ).first().id\n\n start_time = time.time()\n metric_count = 0\n timeout = 900.0 # 15 min\n while time.time() < start_time + timeout:\n last_metric_count = metric_count\n print \"name: %s, id: %s, metrics: %s\" % (provider_key,\n mgmt_system_id, metric_count)\n # count all the metrics for the provider we're testing\n metric_count = db.cfmedb().session.query(metrics_tbl).filter(\n metrics_tbl.parent_ems_id == mgmt_system_id\n ).count()\n\n # collection is working if increasing\n if metric_count > last_metric_count and last_metric_count > 0:\n return\n else:\n time.sleep(15)\n\n if time.time() > start_time + timeout:\n raise Exception(\"Timed out waiting for metrics to be collected\")\n"
},
{
"alpha_fraction": 0.6756756901741028,
"alphanum_fraction": 0.6885457038879395,
"avg_line_length": 30.079999923706055,
"blob_id": "8a7cbda8365b4077819d9361529180b6e1419710",
"content_id": "8ceb962febcd738e41ad8be8456f24c0bd337acd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 777,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 25,
"path": "/cfme/tests/configure/test_tag.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.configure.configuration import Category, Tag\nfrom utils.update import update\nfrom utils.randomness import generate_lowercase_random_string, generate_random_string\n\n\[email protected]_fixture\ndef category():\n cg = Category(name=generate_lowercase_random_string(size=8),\n description=generate_random_string(size=32),\n display_name=generate_random_string(size=32))\n cg.create()\n yield cg\n cg.delete()\n\n\ndef test_tag_crud(category):\n tag = Tag(name=generate_lowercase_random_string(size=8),\n display_name=generate_random_string(size=32),\n category=category)\n tag.create()\n with update(tag):\n tag.display_name = generate_random_string(size=32)\n tag.delete(cancel=False)\n"
},
{
"alpha_fraction": 0.629763126373291,
"alphanum_fraction": 0.6395468711853027,
"avg_line_length": 25.243244171142578,
"blob_id": "ba1c24746eb07a04765054d16b6758d2b110abee",
"content_id": "4d27673d4b64b34b45be82d595f7de3be3bb7ff1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1942,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 74,
"path": "/cfme/tests/automate/__init__.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "from utils.randomness import generate_random_string\nfrom cfme.automate.explorer import Namespace, Class, Instance, Domain\nfrom utils import version\n\n\[email protected]\ndef a_domain():\n return None\n\n\n@a_domain.method('5.3')\ndef a_domain_53():\n return Domain(name=generate_random_string(8),\n description=generate_random_string(32),\n enabled=True)\n\n\ndef make_domain(request=None):\n d = a_domain()\n if d:\n d.create()\n if request is not None:\n request.addfinalizer(d.delete)\n return d\n\n\ndef a_namespace(domain=None, request=None):\n if not domain:\n domain = make_domain(request=request)\n return Namespace(name=generate_random_string(8),\n description=generate_random_string(32),\n parent=domain)\n\n\ndef a_namespace_with_path(domain=None, request=None):\n name = generate_random_string(8)\n if not domain:\n domain = make_domain(request=request)\n\n n = Namespace.make_path('Factory', 'StateMachines', name, domain=domain)\n n.description = generate_random_string(32)\n return n\n\n\ndef make_namespace(request=None):\n ns = a_namespace(request=request)\n ns.create()\n if request is not None:\n request.addfinalizer(ns.delete)\n return ns\n\n\ndef a_class(ns=None, request=None):\n if not ns:\n ns = make_namespace(request=request)\n return Class(name=generate_random_string(8),\n description=generate_random_string(32),\n namespace=ns)\n\n\ndef make_class(ns=None, request=None):\n cls = a_class(ns, request=request)\n cls.create()\n if request is not None:\n request.addfinalizer(cls.delete)\n return cls\n\n\ndef an_instance(cls=None, request=None):\n if not cls:\n cls = make_class(request=request)\n return Instance(name=generate_random_string(8),\n description=generate_random_string(32),\n cls=cls)\n"
},
{
"alpha_fraction": 0.6308503746986389,
"alphanum_fraction": 0.6321687698364258,
"avg_line_length": 35.55421829223633,
"blob_id": "7835b4503d1d57d4ea8cbd1d959f79319581e58b",
"content_id": "0c22ed7edc16d48ccb6ed24d7f35bf4159c961c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3034,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 83,
"path": "/cfme/tests/intelligence/reports/test_widgets.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\"\"\"\nimport pytest\n\nfrom cfme.fixtures import pytest_selenium as sel\nfrom cfme.dashboard import Widget\nfrom cfme.intelligence.reports.widgets import MenuWidget, ReportWidget, RSSFeedWidget, ChartWidget\nfrom cfme.intelligence.reports.dashboards import DefaultDashboard\nfrom cfme.web_ui import toolbar\nfrom utils.randomness import generate_random_string\nfrom utils.update import update\n\n\[email protected](scope=\"module\")\ndef default_widgets():\n sel.force_navigate(\"reports_default_dashboard_edit\")\n return DefaultDashboard.form.widgets.selected_items\n\n\[email protected](scope=\"module\")\ndef dashboard(default_widgets):\n return DefaultDashboard(widgets=default_widgets)\n\n\[email protected](scope=\"function\")\ndef custom_widgets(request):\n ws = [\n MenuWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n shortcuts={\n \"Services / Catalogs\": generate_random_string(),\n \"Clouds / Providers\": generate_random_string(),\n },\n visibility=\"<To All Users>\"),\n ReportWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n filter=[\"Events\", \"Operations\", \"Operations VMs Powered On/Off for Last Week\"],\n columns=[\"VM Name\", \"Message\"],\n rows=\"10\",\n timer={\"run\": \"Hourly\", \"hours\": \"Hour\"},\n visibility=\"<To All Users>\"),\n ChartWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n filter=\"Configuration Management/Virtual Machines/Vendor and Guest OS\",\n timer={\"run\": \"Hourly\", \"hours\": \"Hour\"},\n visibility=\"<To All Users>\"),\n RSSFeedWidget(\n generate_random_string(),\n description=generate_random_string(),\n active=True,\n type=\"Internal\",\n feed=\"Administrative Events\",\n rows=\"8\",\n visibility=\"<To All Users>\"),\n ]\n map(lambda w: w.create(), ws) # create all widgets\n request.addfinalizer(lambda: map(lambda w: w.delete(), ws)) # Delete them after test\n return ws\n\n\ndef test_widgets_on_dashboard(request, dashboard, default_widgets, custom_widgets, soft_assert):\n with update(dashboard):\n dashboard.widgets = map(lambda w: w.title, custom_widgets)\n\n def _finalize():\n with update(dashboard):\n dashboard.widgets = default_widgets\n request.addfinalizer(_finalize)\n sel.force_navigate(\"dashboard\")\n toolbar.select(\"Reset Dashboard Widgets to the defaults\", invokes_alert=True)\n sel.handle_alert(False)\n soft_assert(len(Widget.all()) == len(custom_widgets), \"Count of the widgets differ\")\n for custom_w in custom_widgets:\n try:\n Widget.by_name(custom_w.title)\n except NameError:\n soft_assert(False, \"Widget {} not found on dashboard\".format(custom_w.title))\n"
},
{
"alpha_fraction": 0.6211734414100647,
"alphanum_fraction": 0.6231778264045715,
"avg_line_length": 30.722543716430664,
"blob_id": "d7ae5fdde0a28cce8c8a3bbccb587fe25d0010ef",
"content_id": "7e1a478b0979361eb07512d747b3cbe9673d3209",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5488,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 173,
"path": "/cfme/infrastructure/repositories.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "\"\"\"Infrastructure / Repositories\"\"\"\n\nfrom functools import partial\n\nimport cfme.fixtures.pytest_selenium as sel\nimport cfme.web_ui.flash as flash\nfrom cfme.web_ui import menu\nimport cfme.web_ui.toolbar as tb\n\n\nfrom cfme.web_ui import Region, Form, Input, SplitCheckboxTable, fill, form_buttons\nfrom cfme.web_ui.form_buttons import FormButton\nfrom cfme.web_ui.paginator import pages\nfrom utils.update import Updateable\nfrom utils.pretty import Pretty\nfrom utils.version import LOWEST\n\nrepo_list = SplitCheckboxTable(\n (\"//div[@id='list_grid']/div[1]//tbody\", 1),\n (\"//div[@id='list_grid']/div[2]//tbody\", 1),\n header_checkbox_locator=\"#masterToggle\"\n)\n\ndetails_page = Region(infoblock_type='detail')\n\nform = Form(\n fields=[\n ('name', Input('repo_name')),\n ('path', Input('repo_path')),\n ]\n)\n\nadd_btn = {\n LOWEST: FormButton('Add this Repository'),\n # wonky upstream locator\n '5.4': '//button[.=\"Add\"]'\n}\nsave_btn = {\n LOWEST: form_buttons.save,\n '5.4': '//button[.=\"Save\"]'\n}\ncfg_btn = partial(tb.select, 'Configuration')\npol_btn = partial(tb.select, 'Policy')\n\n\ndef _repo_row(name):\n for page in pages():\n row = repo_list.find_row('Name', name)\n if row:\n return row\n else:\n raise Exception('row not found for repo {}'.format(name))\n\n\ndef _repo_nav_fn(context):\n repo = context['repository']\n sel.click(_repo_row(repo.name)[1])\n sel.wait_for_element(repo._detail_page_identifying_loc)\n\n\ndef _check_repo(name, callback=None):\n sel.check(sel.element('.//img', root=_repo_row(name)[0]))\n if callback:\n return callback()\n\n\nmenu.nav.add_branch(\n 'infrastructure_repositories', {\n 'infrastructure_repository_new': lambda _: cfg_btn('Add a new Repository'),\n 'infrastructure_repository_edit': lambda ctx: _check_repo(ctx['repository'].name,\n lambda: cfg_btn('Edit the Selected Repository')),\n 'infrastructure_repository': [\n _repo_nav_fn, {\n 'infrastructure_repository_policy_assignment': lambda _: pol_btn('Manage Policies'),\n 'infrastructure_repository_policy_tags': lambda _: pol_btn('Edit Tags'),\n }\n ]\n }\n)\n\n\nclass Repository(Updateable, Pretty):\n \"\"\"\n Model of an infrastructure repository in cfme.\n\n Args:\n name: Name of the repository host\n path: UNC path to the repository share\n\n Usage:\n\n myrepo = Repository(name='vmware', path='//hostname/path/to/share')\n myrepo.create()\n\n \"\"\"\n pretty_attrs = ['name', 'path']\n\n def __init__(self, name=None, path=None):\n self.name = name\n self.path = path\n self._detail_page_identifying_loc = \"//h1[contains(., '{}')]\".format(self.name)\n\n def _submit(self, cancel, submit_button):\n if cancel:\n sel.click(form_buttons.cancel)\n # sel.wait_for_element(page.configuration_btn)\n else:\n sel.click(submit_button)\n flash.assert_no_errors()\n\n def create(self, cancel=False, validate_credentials=False):\n \"\"\"\n Creates a repository in the UI\n\n Args:\n cancel (boolean): Whether to cancel out of the creation. The cancel is done\n after all the information present in the Host has been filled in the UI.\n validate_credentials (boolean): Whether to validate credentials - if True and the\n credentials are invalid, an error will be raised.\n \"\"\"\n sel.force_navigate('infrastructure_repository_new')\n fill(form, vars(self))\n self._submit(cancel, add_btn)\n\n def update(self, updates, cancel=False, validate_credentials=False):\n \"\"\"\n Updates a repository in the UI. Better to use utils.update.update context\n manager than call this directly.\n\n Args:\n updates (dict): fields that are changing.\n cancel (boolean): whether to cancel out of the update.\n \"\"\"\n sel.force_navigate('infrastructure_repository_edit', context={'repository': self})\n fill(form, updates)\n self._submit(cancel, save_btn)\n\n def delete(self, cancel=False):\n \"\"\"\n Deletes a repository from CFME\n\n Args:\n cancel: Whether to cancel the deletion, defaults to False\n \"\"\"\n if self.exists:\n sel.force_navigate('infrastructure_repository', context={'repository': self})\n cfg_btn('Remove from the VMDB', invokes_alert=True)\n sel.handle_alert(cancel=cancel)\n\n def get_detail(self, *ident):\n \"\"\" Gets details from the details infoblock\n\n The function first ensures that we are on the detail page for the specific repository.\n\n Args:\n *ident: An InfoBlock title, followed by the Key name, e.g. \"Relationships\", \"Images\"\n Returns: A string representing the contents of the InfoBlock's value.\n \"\"\"\n if not self._on_detail_page():\n sel.force_navigate('infrastructure_repository', context={'repository': self})\n return details_page.infoblock.text(*ident)\n\n def _on_detail_page(self):\n \"\"\" Returns ``True`` if on the repository detail page, ``False`` if not.\"\"\"\n return self.is_displayed(self._detail_page_identifying_loc)\n\n @property\n def exists(self):\n sel.force_navigate('infrastructure_repositories')\n try:\n return bool(_repo_row(self.name))\n except: # exception?\n return False\n"
},
{
"alpha_fraction": 0.699999988079071,
"alphanum_fraction": 0.7137255072593689,
"avg_line_length": 38.230770111083984,
"blob_id": "2b982ee00f37df613997764d8e3fd9ecf5eedaeb",
"content_id": "62d68d959d17872f18f18eb90503bd9583a79330",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 510,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 13,
"path": "/cfme/tests/configure/test_tag_category.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "from cfme.configure.configuration import Category\nfrom utils.update import update\nfrom utils.randomness import generate_lowercase_random_string, generate_random_string\n\n\ndef test_category_crud():\n cg = Category(name=generate_lowercase_random_string(size=8),\n description=generate_random_string(size=32),\n display_name=generate_random_string(size=32))\n cg.create()\n with update(cg):\n cg.description = generate_random_string(size=32)\n cg.delete(cancel=False)\n"
},
{
"alpha_fraction": 0.7194127440452576,
"alphanum_fraction": 0.7194127440452576,
"avg_line_length": 24.54166603088379,
"blob_id": "7ae9a4a6c0e2bc2ed1a16fd4a04caf5310933ffd",
"content_id": "35f65cb66b6bda7e0f3429013acd11164a56b1ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 613,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 24,
"path": "/cfme/tests/infrastructure/test_pxe.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom cfme.infrastructure import pxe\nfrom utils.update import update\nfrom utils.testgen import generate, pxe_servers\n\n\npytest_generate_tests = generate(pxe_servers)\n\n\[email protected](scope='function')\ndef has_no_pxe_servers():\n pxe.remove_all_pxe_servers()\n\n\[email protected]('has_no_pxe_servers')\ndef test_pxe_server_crud(pxe_name, pxe_server_crud):\n \"\"\"\n Basic Add test for PXE server including refresh.\n \"\"\"\n pxe_server_crud.create()\n with update(pxe_server_crud):\n pxe_server_crud.name = pxe_server_crud.name + \"_update\"\n pxe_server_crud.delete(cancel=False)\n"
},
{
"alpha_fraction": 0.6681684851646423,
"alphanum_fraction": 0.6749624609947205,
"avg_line_length": 39.76676559448242,
"blob_id": "0789ad8ed929bfc20d9d0339b488a555fbe770ce",
"content_id": "bc5dd032fe41cf5ec936f41f1acdc55c77f84ccd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13983,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 343,
"path": "/cfme/tests/cloud/test_instance_power_control.py",
"repo_name": "petrblaho/cfme_tests",
"src_encoding": "UTF-8",
"text": "import cfme.web_ui.flash as flash\nimport pytest\nfrom cfme.cloud.instance import instance_factory, get_all_instances, EC2Instance, OpenStackInstance\nfrom cfme.fixtures import pytest_selenium as sel\nfrom utils import error, testgen\nfrom utils.randomness import generate_random_string\nfrom utils.wait import wait_for, TimedOutError\n\npytestmark = [pytest.mark.usefixtures('test_power_control')]\n\n\ndef pytest_generate_tests(metafunc):\n final_argv, final_argn, final_ids = [], [], []\n\n if 'ec2_only' in metafunc.fixturenames:\n prov_types = ['ec2']\n elif 'openstack_only' in metafunc.fixturenames:\n prov_types = ['openstack']\n else:\n prov_types = ['ec2', 'openstack']\n\n # Get all providers and pick those, that have power control test enabled\n argnames, argvalues, idlist = testgen.provider_by_type(\n metafunc, prov_types, 'test_power_control')\n if not idlist:\n return\n for argn, argv, single_id in zip(argnames, argvalues, idlist):\n test_pwr_ctl_i = argnames.index('test_power_control')\n provider_key_i = argnames.index('provider_key')\n final_argn = argnames\n if argv[test_pwr_ctl_i] is True:\n final_argv.append(argv)\n final_ids.append(argv[provider_key_i])\n\n # Then append '{provider_type}_only' fixture, if necessary\n if len(prov_types) == 1:\n final_argn.append('{}_only'.format(prov_types[0]))\n for argval in final_argv:\n argval.append('')\n\n testgen.parametrize(metafunc, final_argn, final_argv, ids=final_ids, scope=\"function\")\n\n\n# This fixture must be named 'vm_name' because its tied to fixtures/virtual_machine\[email protected](scope=\"module\")\ndef vm_name():\n return \"test_instance_pwrctl_{}\".format(generate_random_string())\n\n\[email protected](scope=\"function\")\ndef test_instance(request, delete_instances_fin, setup_provider,\n provider_crud, provider_mgmt, vm_name):\n \"\"\" Fixture to provision instance on the provider\n \"\"\"\n instance = instance_factory(vm_name, provider_crud)\n if not provider_mgmt.does_vm_exist(vm_name):\n delete_instances_fin[provider_crud.key] = instance\n instance.create_on_provider()\n elif isinstance(instance, EC2Instance) and \\\n provider_mgmt.is_vm_state(vm_name, provider_mgmt.states['deleted']):\n provider_mgmt.set_name(vm_name, 'test_terminated_{}'.format(generate_random_string()))\n delete_instances_fin[provider_crud.key] = instance\n instance.create_on_provider()\n return instance\n\n\[email protected](scope=\"module\")\ndef delete_instances_fin(request):\n \"\"\" Fixture to add a finalizer to delete provisioned instances at the end of tests\n\n This is a \"trashbin\" fixture - it returns a mutable that you put stuff into.\n \"\"\"\n provisioned_instances = {}\n\n def delete_instances(instances_dict):\n for instance in instances_dict.itervalues():\n instance.delete_from_provider()\n request.addfinalizer(lambda: delete_instances(provisioned_instances))\n return provisioned_instances\n\n\ndef wait_for_state_change_time_refresh(instance, state_change_time, timeout=300):\n \"\"\" Waits for 'State Changed On' refresh\n \"\"\"\n def _wait_for_state_refresh():\n instance.load_details()\n return state_change_time != instance.get_detail(\n properties=(\"Power Management\", \"State Changed On\"))\n\n try:\n wait_for(_wait_for_state_refresh, num_sec=timeout, delay=30)\n except TimedOutError:\n return False\n\n\ndef check_power_options(soft_assert, instance, power_state):\n \"\"\" Checks if power options match given power state ('on', 'off')\n \"\"\"\n must_be_available = {\n EC2Instance: {\n 'on': [EC2Instance.STOP, EC2Instance.SOFT_REBOOT, EC2Instance.TERMINATE],\n 'off': [EC2Instance.START, EC2Instance.TERMINATE]\n },\n OpenStackInstance: {\n 'on': [\n OpenStackInstance.SUSPEND,\n OpenStackInstance.SOFT_REBOOT,\n OpenStackInstance.HARD_REBOOT,\n OpenStackInstance.TERMINATE\n ],\n 'off': [OpenStackInstance.START, OpenStackInstance.TERMINATE]\n }\n }\n mustnt_be_available = {\n EC2Instance: {\n 'on': [EC2Instance.START],\n 'off': [EC2Instance.STOP, EC2Instance.SOFT_REBOOT]\n },\n OpenStackInstance: {\n 'on': [OpenStackInstance.START],\n 'off': [\n OpenStackInstance.SUSPEND,\n OpenStackInstance.SOFT_REBOOT,\n OpenStackInstance.HARD_REBOOT\n ]\n }\n }\n\n for pwr_option in must_be_available[instance.__class__][power_state]:\n soft_assert(\n instance.is_pwr_option_available_in_cfme(option=pwr_option, from_details=True),\n \"{} must be available in current power state\".format(pwr_option))\n for pwr_option in mustnt_be_available[instance.__class__][power_state]:\n soft_assert(\n not instance.is_pwr_option_available_in_cfme(option=pwr_option, from_details=True),\n \"{} must not be available in current power state\".format(pwr_option))\n\n\[email protected]_running\ndef test_quadicon_terminate_cancel(setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, verify_vm_running, soft_assert):\n \"\"\" Tests terminate cancel\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720)\n test_instance.power_control_from_cfme(option=test_instance.TERMINATE, cancel=True)\n with error.expected('instance still exists'):\n # try to find VM, if found, try again - times out with expected message\n wait_for(\n lambda: provider_mgmt.does_vm_exist(test_instance.name),\n fail_condition=True,\n num_sec=60,\n delay=15,\n message=\"instance still exists\")\n soft_assert(test_instance.find_quadicon().state == 'currentstate-on')\n\n\[email protected]_running\ndef test_quadicon_terminate(setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, verify_vm_running, soft_assert):\n \"\"\" Tests terminate instance\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720)\n test_instance.power_control_from_cfme(option=test_instance.TERMINATE, cancel=False)\n wait_for(test_instance.does_vm_exist_in_cfme, fail_condition=True, num_sec=300, delay=30,\n fail_func=test_instance.provider_crud.refresh_provider_relationships,\n message=\"instance still exists in cfme UI\")\n if provider_type == 'openstack':\n soft_assert(not provider_mgmt.does_vm_exist(test_instance.name), \"instance still exists\")\n else:\n soft_assert(\n provider_mgmt.is_vm_state(test_instance.name, provider_mgmt.states['deleted']),\n \"instance still exists\")\n sel.force_navigate(\"clouds_instances_archived_branch\")\n soft_assert(\n test_instance.name in get_all_instances(do_not_navigate=True),\n \"instance is not among archived instances\")\n\n\[email protected]_running\ndef test_stop(ec2_only, setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, soft_assert, verify_vm_running):\n \"\"\" Tests instance stop\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n check_power_options(soft_assert, test_instance, 'on')\n test_instance.power_control_from_cfme(\n option=test_instance.STOP, cancel=False, from_details=True)\n flash.assert_message_contain(\"Stop initiated\")\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_OFF, timeout=720, from_details=True)\n wait_for(\n lambda: provider_mgmt.is_vm_stopped(test_instance.name),\n num_sec=180,\n delay=20,\n message=\"mgmt system check - instance stopped\")\n\n\[email protected]_running\ndef test_start(ec2_only, setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, soft_assert, verify_vm_stopped):\n \"\"\" Tests instance start\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_OFF, timeout=720, from_details=True)\n check_power_options(soft_assert, test_instance, 'off')\n test_instance.power_control_from_cfme(\n option=test_instance.START, cancel=False, from_details=True)\n flash.assert_message_contain(\"Start initiated\")\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n soft_assert(\n provider_mgmt.is_vm_running(test_instance.name),\n \"instance is not running\")\n\n\[email protected]_running\ndef test_soft_reboot(setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, soft_assert, verify_vm_running):\n \"\"\" Tests instance soft reboot\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n state_change_time = test_instance.get_detail(('Power Management', 'State Changed On'))\n test_instance.power_control_from_cfme(\n option=test_instance.SOFT_REBOOT, cancel=False, from_details=True)\n flash.assert_message_contain(\"Restart initiated\")\n wait_for_state_change_time_refresh(test_instance, state_change_time, timeout=720)\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, from_details=True)\n soft_assert(\n provider_mgmt.is_vm_running(test_instance.name),\n \"instance is not running\")\n\n\[email protected]_running\ndef test_hard_reboot(openstack_only, setup_provider_funcscope, provider_type,\n provider_mgmt, test_instance, soft_assert, verify_vm_running):\n \"\"\" Tests instance hard reboot\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n state_change_time = test_instance.get_detail(('Power Management', 'State Changed On'))\n test_instance.power_control_from_cfme(\n option=test_instance.HARD_REBOOT, cancel=False, from_details=True)\n flash.assert_message_contain(\"Reset initiated\")\n wait_for_state_change_time_refresh(test_instance, state_change_time, timeout=720)\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, from_details=True)\n soft_assert(\n provider_mgmt.is_vm_running(test_instance.name),\n \"instance is not running\")\n\n\[email protected]_running\ndef test_suspend(openstack_only, setup_provider_funcscope, provider_type, provider_mgmt,\n test_instance, soft_assert, verify_vm_running):\n \"\"\" Tests instance suspend\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n check_power_options(soft_assert, test_instance, 'on')\n test_instance.power_control_from_cfme(\n option=test_instance.SUSPEND, cancel=False, from_details=True)\n flash.assert_message_contain(\"Suspend initiated\")\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_OFF, timeout=720, from_details=True)\n soft_assert(\n provider_mgmt.is_vm_suspended(test_instance.name),\n \"instance is still running\")\n\n\[email protected]_running\[email protected](blockers=[1183757])\ndef test_resume(openstack_only, setup_provider_funcscope,\n provider_type, provider_mgmt, test_instance, soft_assert, verify_vm_suspended):\n \"\"\" Tests instance resume\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_SUSPENDED, timeout=720, from_details=True)\n check_power_options(soft_assert, test_instance, 'off')\n test_instance.power_control_from_cfme(\n option=test_instance.START, cancel=False, from_details=True)\n flash.assert_message_contain(\"Start initiated\")\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n soft_assert(\n provider_mgmt.is_vm_running(test_instance.name),\n \"instance is not running\")\n\n\[email protected]_running\ndef test_terminate(setup_provider_funcscope,\n provider_type, provider_mgmt, test_instance, soft_assert, verify_vm_running):\n \"\"\" Tests instance terminate\n\n Metadata:\n test_flag: power_control, provision\n \"\"\"\n test_instance.wait_for_vm_state_change(\n desired_state=test_instance.STATE_ON, timeout=720, from_details=True)\n test_instance.power_control_from_cfme(\n option=test_instance.TERMINATE, cancel=False, from_details=True)\n flash.assert_message_contain(\"Terminate initiated\")\n wait_for(test_instance.does_vm_exist_in_cfme, fail_condition=True, num_sec=300, delay=30,\n fail_func=test_instance.provider_crud.refresh_provider_relationships,\n message=\"VM no longer exists in cfme UI\")\n if provider_type == 'openstack':\n soft_assert(not provider_mgmt.does_vm_exist(test_instance.name), \"instance still exists\")\n else:\n soft_assert(\n provider_mgmt.is_vm_state(test_instance.name, provider_mgmt.states['deleted']),\n \"instance still exists\")\n sel.force_navigate(\"clouds_instances_archived_branch\")\n soft_assert(\n test_instance.name in get_all_instances(do_not_navigate=True),\n \"instance is not among archived instances\")\n"
}
] | 55 |
CharlesBidaut/eopinion_TAL | https://github.com/CharlesBidaut/eopinion_TAL | 879f4a435323f9030af0f29c96ea1b04e039bc3c | cd5b3a9aa6216061f40a9651f80ece7e4431c7fa | 41c775d20574d794b539f07cbc3885f0068d9f91 | refs/heads/master | 2018-01-13T19:21:11.933070 | 2017-05-06T20:10:40 | 2017-05-06T20:10:40 | 86,586,176 | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.799225926399231,
"alphanum_fraction": 0.8030962944030762,
"avg_line_length": 56.41666793823242,
"blob_id": "bbf407b859f85e27a4b820c7446e4e9c24826884",
"content_id": "e8907c80918808b34da23af8723c47e3ff86c756",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2107,
"license_type": "no_license",
"max_line_length": 300,
"num_lines": 36,
"path": "/README.md",
"repo_name": "CharlesBidaut/eopinion_TAL",
"src_encoding": "UTF-8",
"text": "-- Récupération d'un fichier de commentaire sur le site AlloCine :\n\nUn fichier a déjà été généré sur le site AlloCiné et est situé dans le dossier ficText/\n\nDans le cas où vous souhaitez récupérer des commentaires d'un autre film, il suffit alors d'aller changer l'URL dans le fichier recupComAlloCine.py ainsi que de modifier le nombre dans la boucle qui se doit de correspondre au nombre de page de commentaires présent pour ledit film qui vous intéresse.\nIl faudra pour cela avoir importer la bibliothèque python-bs4\n\nEnsuite il vous suffit d'éxcuter recupComAlloCine.py avec la commande : \n\t# python recupComAlloCine.py\n\nCe script permet donc de créer un fichier avec le bon nom dans le dossier courant, il faudra terminer en plaçant ce fichier dans le dossier ficText et en supprimer l'ancien fichier contenant l'ensemble des commentaires.\n\n-- Execution du programme : \n\nPour des raisons d'encodage de caractères spécifique dans les tableaux et leur passage par argument, nous avons utilisé python3.\nAfin d'exécuter notre programme, il convient d'importer les bibliothèques \"re\" et \"argparse\" que nous avons utilisés.\n\nEnsuite dans le terminal, tapez la commande :\n\t# python3 EOpinion.py ficText/\n\t\nAu préalable, on se doit de trouver dans le dossier ficText, un fichier contenant l'ensemble des commentaires sous le nom :\n\tcommentaireAlloCineTOUS.txt\n\n-- Description du programme :\n\nLe programme vous propose de gérer les dictionnaires de mots positifs et négatifs en vous les affichant. Le programme permet également l'ajout de nouveaux mots (sous forme de regex).\n\nLe programme utilise 4 fonctions d'analyse de commentaires :\n\t1. Par dictionnaire de mots\n\t2. Par analyse des superlatifs\n\t3. Par analyse de négations\n\t4. Par analyse des émoticones\n\nChaque méthode implémente la première et lui ajoute une nouvelle fonctionnalitée.\n\nLe programme va par la suite générer un fichier par commentaire avec toute l'analyse du commentaire. Il suffit donc pour finir de consulter le commentaire choisi dans le dossier ficText pour connaître le résultat de l'analyse.\n"
},
{
"alpha_fraction": 0.6251060366630554,
"alphanum_fraction": 0.6368806958198547,
"avg_line_length": 36.81132125854492,
"blob_id": "d626b0fdea0f61435e644c443eaa503e9eb8bd02",
"content_id": "e92f78e86b96a0cc3da81f242ca22c4ba6befd2b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 20156,
"license_type": "no_license",
"max_line_length": 514,
"num_lines": 530,
"path": "/EOpinion.py",
"repo_name": "CharlesBidaut/eopinion_TAL",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport argparse\nimport re\n\n# Lit un fichier et ignore les paragraphes blanc\ndef lectureFichier(nomFichier):\n\tparagraphes = []\n\twith open(nomFichier, \"r\") as ptrFichier:\n\t\tfor ligne in ptrFichier.readlines():\n\t\t\tif ligne.strip()==\"\": continue # ignole les paragraphe blancs\n\t\t\tparagraphes.append(ligne.strip())\n\n\treturn paragraphes\n\n# Ecrit dans un fichier \ndef ecritureFichierCommentaire(paragraphes, nomFichier):\n\twith open(nomFichier, \"w\") as pointeurFichier:\n\t\tfor paragraphe in paragraphes:\n\t\t\tpointeurFichier.write(paragraphe+\"\\n\")\n\n#Fonction qui récupère la note du bloc\n#Renvoie la note et le commentaire sans la note\ndef recupNote(commentaire):\n\tnote = commentaire.split('::')\n\tcommentaire = re.sub(note[0]+'::', '', commentaire)\n\treturn note[0], commentaire\n\n# Découpe le paragraphe en ligne\ndef segment_into_sents(paragraph):\n\temotNegatifs = lectureFichier(dossierDico+\"/emotNegatifs.txt\")\n\temotPositifs = lectureFichier(dossierDico+\"/emotPositifs.txt\")\t\t\n\n\t# Gestion des émoticones \n\tfor emotPos in emotPositifs :\n\t\tparagraph = re.sub(emotPos, \"emoticonePos\", paragraph)\n\tfor emotNeg in emotNegatifs :\n\t\tparagraph = re.sub(emotNeg, \"emoticoneNeg\", paragraph)\n\n\tcannot_precede = [\"M\", \"Prof\", \"Sgt\", \"Lt\", \"Ltd\", \"co\", \"etc\", \"[A-Z]\", \"[Ii].e\", \"[eE].g\"] # non-exhaustive list\t\n\tregex_cannot_precede = \"(?:(?<!\"+\")(?<!\".join(cannot_precede)+\"))\"\n \n\tif \"\\n\" in paragraph: exit(\"Error in paragraph: paragraph contains \\n.\") \n\tnewline_separated = re.sub(regex_cannot_precede+\"([\\.\\!\\?]+([\\'\\’\\\"\\)]*( |$)| [\\'\\’\\\"\\) ]*))\", r\"\\1\\n\", paragraph)\n\tsents = newline_separated.strip().split(\"\\n\")\n\tfor s, sent in enumerate(sents):\n\t\tsents[s] = sent.strip()\n\treturn sents\n\n# Normalise quelque caractère spéciaux\n# Remplace les abréviations par leurs mot \ndef normaliser(envoi):\n\tenvoi = re.sub('\\'\\'', '\"', envoi) # deux simple quotes = double quotes\n\tenvoi = re.sub(\"[`‘’]+\", r\"'\", envoi) # normalise les apostrophes et simple quote\n\tenvoi = re.sub(\"[≪≫“â€]\", '\"', envoi) # normalise les double quotes\n\n\tremplacements = [(\"keske\", \"qu' est -ce que\"), (\"estke\", \"est -ce que\"), (\"bcp\", \"beaucoup\"),(\"qqln\",\"quelqu\\'un\"),(\"qqn\",\"quelqu\\'un\"),(\"qqch\",\"quelque chose\"),(\"tjs\",\"toujours\"),(\"tkt\",\"t'inquiete\"),(\"tqt\",\"t'inquiete\"),(\"expr\",\"expression\"),(\"irrég\",\"irrégulier\"),(\"comm\",\"commentaire\"),(\"auj\",\"aujourd'hui\"),(\"aujd\",\"aujourd'hui\"),(\"sf\",\"science fiction\"),(\"svp\",\"s'il vous plait\"),(\"apr\",\"après\"),(\"équiv\",\"équivalent\"),(\"admin\",\"administrateur\"),(\"d'hab\",\"d'habitude\"),(\"bon ap\",\"bon appétit\")] # à rajouter\n\tfor (original, remplacement) in remplacements:\n\t\tenvoi = re.sub(\"(^| )\"+original+\"( |$)\", r\"\\1\"+remplacement+r\"\\2\", envoi)\n\t\n\treturn envoi\n\n# Permet de découper en token un paragraphe\ndef tokeniser(envoi):\n\tenvoi = re.sub(\"([^ ])\\'\", r\"\\1 '\", envoi) # separate apostrophe from preceding word by a space if no space to left\n\tenvoi = re.sub(\" \\'\", r\" ' \", envoi) # separate apostrophe from following word if a space if left\n\tcannot_precede = [\"M\", \"Prof\", \"Sgt\", \"Lt\", \"Ltd\", \"co\", \"etc\", \"[A-Z]\", \"[Ii].e\", \"[eE].g\"] # non-exhaustive list\n\tregex_cannot_precede = \"(?:(?<!\"+\")(?<!\".join(cannot_precede)+\"))\"\n\tenvoi = re.sub(regex_cannot_precede+\"([\\.\\,\\;\\:\\)\\(\\\"\\?\\!]( |$))\", r\" \\1\", envoi)\n\tenvoi = re.sub(\"((^| )[\\.\\?\\!]) ([\\.\\?\\!]( |$))\", r\"\\1\\2\", envoi) # then restick several fullstops ... or several ?? or !!\n\tenvoi = envoi.split() # split on whitespace\n\n\treturn envoi\n\n# Methode d'analyse par dictionnaire de mot \ndef methodeDicoMot(listeCommentaire):\n\tnbCom = 0\n\t# Récupère les mots positifs et négatids\n\tmotsNegatifs = lectureFichier(dossierDico+\"/motsNegatifs.txt\")\n\tmotsPositifs = lectureFichier(dossierDico+\"/motsPositifs.txt\")\n\n\tfor p, commentaire in enumerate(listeCommentaire):\n\t\tnbCom = nbCom + 1\n\t\t#Rècupère la note du commentaire et sont commentaire sans la note\n\t\tnoteInitial, commentaireInitial = recupNote(commentaire)\n\t\tcommentaire = commentaireInitial\n\t\n\t\tnote, pos, neg, mots = 0, 0, 0, 0\n\t\tphrase = segment_into_sents(commentaire)\n\t\tfor ligne in phrase:\n\t\t\tligne = normaliser(ligne)\n\t\t\tligne = tokeniser(ligne)\n\t\t\t\n\t\t\tfor mot in ligne :\n\t\t\t\tmots = mots + 1\n\t\t\t\tfor positif in motsPositifs :\n\t\t\t\t\tif re.match(positif, mot) is not None : \n\t\t\t\t\t\tpos = pos + 1\n\t\t\t\tfor negatif in motsNegatifs :\n\t\t\t\t\tif re.match(negatif, mot) is not None : \n\t\t\t\t\t\tneg = neg + 1\n\t\t\t\t\t\t\n\n\t\t# Calcul la note en fonction des mots positifs et negatif\n\t\t# Divise par nombre de mots / 13 car on considere qu'il y a un mot par phrase (phrase = 13 mots)\n\t\tnote = 2.5 + ((pos - neg)/(mots/13)) * 2.5\n\t\tif note > 5 : note = 5\n\t\tif note < 0\t: note = 0 \n\t\n\t\tnomFichier = dossier + \"/commentaireAlloCine_Nb\" + str(nbCom) + \".txt\"\n\t\twith open(nomFichier, \"w\") as ptrFichier:\n\t\t\tptrFichier.write(\"La note initiale du commentaire était : \" + str(noteInitial) +\"\\n\\n\")\n\t\t\tptrFichier.write(\"Le commentaire était : \\n\")\n\t\t\tptrFichier.write(commentaireInitial+\"\\n\")\n\t\t\n\t\t\tptrFichier.write(\"\\nMETHODE --> Dictionnaire de mots.\\n\")\n\t\t\tptrFichier.write(\"\\n----- STATISTIQUE -----\\n\")\n\t\t\tptrFichier.write(\"Le commentaire contient \" + str(mots) + \" mots. Parmi ces mots : \\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(pos) + \" sont POSITIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(neg) + \" sont NEGATIFS.\\n\")\n\t\t\tptrFichier.write(\"\\nLa note de ce commentaire est définie à \" + str(note) + \" par la fonction.\\n\")\n\n\t\t\tptrFichier.write(\"\\n----- CONCLUSION -----\\n\")\n\t\t\tif float(noteInitial.replace(',','.')) + 1 < note and float(noteInitial.replace(',','.')) - 1 > note :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note dans la logique de la note initiale.\\n\")\n\t\t\telse :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note différente du ressenti de la note initiale.\\n\")\n\t\t\t\n\t\t\tif note > 3 :\n\t\t\t\tptrFichier.write(\"Le message est POSITIF selon la méthode.\\n\")\n\t\t\telif note < 2 :\n\t\t\t\tptrFichier.write(\"Le message est NEGATIF selon la méthode.\\n\")\n\t\t\telse : \n\t\t\t\tptrFichier.write(\"Le message est NEUTRE selon la méthode.\\n\")\n\n# Fonction qui analyse si devant un mot il y a un superlatif comme par exemple vraiment\ndef methodeAnalyseComplexe(listeCommentaire):\n\tnbCom = 0\n\tsuperlatif = lectureFichier(dossierDico+\"/motsCombi.txt\")\n\tmotsNegatifs = lectureFichier(dossierDico+\"/motsNegatifs.txt\")\n\tmotsPositifs = lectureFichier(dossierDico+\"/motsPositifs.txt\")\n\n\tfor p, commentaire in enumerate(listeCommentaire):\n\t\tnbCom = nbCom + 1\n\t\t#Rècupère la note du commentaire et sont commentaire sans la note\n\t\tnoteInitial, commentaireInitial = recupNote(commentaire)\n\t\tcommentaire = commentaireInitial\n\t\n\t\tnote, pos, neg, nbPos, nbNeg, nbSpl, mots, coef = 0, 0, 0, 0, 0, 0, 0, 1\n\t\tphrase = segment_into_sents(commentaire)\n\t\tfor ligne in phrase:\n\t\t\tligne = normaliser(ligne)\n\t\t\tligne = tokeniser(ligne)\n\t\t\t#Gestion des superlatifs\n\t\t\tfor mot in ligne :\n\t\t\t\tmots = mots + 1\n\t\t\t\tfor sprlt in superlatif :\n\t\t\t\t\tsprlt = sprlt.split(\"::\")\n\t\t\t\t\tif re.match(sprlt[0],mot) is not None :\n\t\t\t\t\t\tcoef = coef*int(sprlt[1])\n\t\t\t\t\n\t\t\t\tfor positif in motsPositifs :\n\t\t\t\t\tif re.match(positif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 :\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tpos = coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\tpos = pos + 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\tfor negatif in motsNegatifs :\n\t\t\t\t\tif re.match(negatif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 : \n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tneg = coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\telse : \n\t\t\t\t\t\t\tneg = neg + 1\t\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\t\t\n\t\t\n\t\t# Calcul la note en fonction des mots positifs et negatif\n\t\t# Divise par nombre de mots / 13 car on considere qu'il y a un mot par phrase (phrase = 13 mots)\n\t\tnote = 2.5 + (pos- neg)/(mots/13) * 2.5\n\t\tif note > 5 : note = 5\n\t\tif note < 0\t: note = 0 \n\t\n\t\tnomFichier = dossier + \"/commentaireAlloCine_Nb\" + str(nbCom) + \".txt\"\n\t\twith open(nomFichier, \"w\") as ptrFichier:\n\t\t\tptrFichier.write(\"La note initiale du commentaire était : \" + str(noteInitial) +\"\\n\\n\")\n\t\t\tptrFichier.write(\"Le commentaire était : \\n\")\n\t\t\tptrFichier.write(commentaireInitial+\"\\n\")\n\t\t\n\t\t\tptrFichier.write(\"\\nMETHODE --> AnalyseComplexe superlatif.\\n\")\n\t\t\tptrFichier.write(\"\\n----- STATISTIQUE -----\\n\")\n\t\t\tptrFichier.write(\"Le commentaire contient \" + str(mots) + \" mots. Parmi ces mots : \\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbPos) + \" sont POSITIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbNeg) + \" sont NEGATIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbSpl) + \" sont des superlatifs.\\n\")\n\t\t\tptrFichier.write(\"\\nLa note de ce commentaire est définie à \" + str(note) + \" par la fonction.\\n\")\n\n\t\t\tptrFichier.write(\"\\n----- CONCLUSION -----\\n\")\n\t\t\tif float(noteInitial.replace(',','.')) + 1 < note and float(noteInitial.replace(',','.')) - 1 > note :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note dans la logique de la note initiale.\\n\")\n\t\t\telse :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note différente du ressenti de la note initiale.\\n\")\n\t\t\t\n\t\t\tif note > 3 :\n\t\t\t\tptrFichier.write(\"Le message est POSITIF selon la méthode.\\n\")\n\t\t\telif note < 2 :\n\t\t\t\tptrFichier.write(\"Le message est NEGATIF selon la méthode.\\n\")\n\t\t\telse : \n\t\t\t\tptrFichier.write(\"Le message est NEUTRE selon la méthode.\\n\")\n\n# Fonction qui analyse les négations \ndef methodeAnalyseNegation(listeCommentaire):\n\tnbCom = 0\n\tsuperlatif = lectureFichier(dossierDico+\"/motsCombi.txt\")\n\tmotsNegatifs = lectureFichier(dossierDico+\"/motsNegatifs.txt\")\n\tmotsPositifs = lectureFichier(dossierDico+\"/motsPositifs.txt\")\n\n\tfor p, commentaire in enumerate(listeCommentaire):\n\t\tnbCom = nbCom + 1\n\t\t#Rècupère la note du commentaire et sont commentaire sans la note\n\t\tnoteInitial, commentaireInitial = recupNote(commentaire)\n\t\tcommentaire = commentaireInitial\n\t\n\t\tnote, pos, neg, nbNegation, coefNeg, nbPos, nbNeg, nbSpl, mots, coef = 0, 0, 0, 0, 0, 0, 0, 0, 0, 1\n\t\tphrase = segment_into_sents(commentaire)\n\t\tfor ligne in phrase:\n\t\t\tligne = normaliser(ligne)\n\t\t\tligne = tokeniser(ligne)\n\t\t\tfor mot in ligne :\n\t\t\t\tmots = mots + 1\n\t\t\t\t#Gestion des négations\n\t\t\t\tif re.match('^pas$', mot) is not None :\n\t\t\t\t\tcoefNeg = -1\n\t\t\t\t\n\t\t\t\tfor sprlt in superlatif :\n\t\t\t\t\tsprlt = sprlt.split(\"::\")\n\t\t\t\t\tif re.match(sprlt[0],mot) is not None :\n\t\t\t\t\t\tcoef = coef*int(sprlt[1])\n\t\t\t\t\n\t\t\t\tfor positif in motsPositifs :\n\t\t\t\t\tif re.match(positif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 and coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tpos = pos + (coefNeg * coef)\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tcoefNeg = 0\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telif coef != 1 :\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tpos = pos + coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\telif coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tneg = neg +coef\n\t\t\t\t\t\t\tcoefNeg = 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\tpos = pos + 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\tfor negatif in motsNegatifs :\n\t\t\t\t\tif re.match(negatif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 and coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tneg = neg + (coefNeg * coef)\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tcoefNeg = 0\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telif coef != 1 :\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tneg = neg + coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\telif coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tpos = coef\n\t\t\t\t\t\t\tcoefNeg = 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\tneg = neg + 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\n\t\t# Calcul la note en fonction des mots positifs et negatif\n\t\tnote = 2.5 + (pos- neg)/(mots/13) * 2.5\n\t\tif note > 5 : note = 5\n\t\tif note < 0\t: note = 0 \n\t\n\t\tnomFichier = dossier + \"/commentaireAlloCine_Nb\" + str(nbCom) + \".txt\"\n\t\twith open(nomFichier, \"w\") as ptrFichier:\n\t\t\tptrFichier.write(\"La note initiale du commentaire était : \" + str(noteInitial) +\"\\n\\n\")\n\t\t\tptrFichier.write(\"Le commentaire était : \\n\")\n\t\t\tptrFichier.write(commentaireInitial+\"\\n\")\n\t\t\n\t\t\tptrFichier.write(\"\\nMETHODE --> AnalyseComplexe négation.\\n\")\n\t\t\tptrFichier.write(\"\\n----- STATISTIQUE -----\\n\")\n\t\t\tptrFichier.write(\"Le commentaire contient \" + str(mots) + \" mots. Parmi ces mots : \\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbPos) + \" sont POSITIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbNeg) + \" sont NEGATIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbSpl) + \" sont des superlatifs.\\n\")\n\t\t\tptrFichier.write(\"\\nLe commentaire contient \" + str(nbNegation) + \" négations.\\n\")\n\t\t\tptrFichier.write(\"\\nLa note de ce commentaire est définie à \" + str(note) + \" par la fonction.\\n\")\n\t\t\n\t\t\tptrFichier.write(\"\\n----- CONCLUSION -----\\n\")\n\t\t\tif float(noteInitial.replace(',','.')) + 1 < note and float(noteInitial.replace(',','.')) - 1 > note :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note dans la logique de la note initiale.\\n\")\n\t\t\telse :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note différente du ressenti de la note initiale.\\n\")\n\t\t\t\n\t\t\tif note > 3 :\n\t\t\t\tptrFichier.write(\"Le message est POSITIF selon la méthode.\\n\")\n\t\t\telif note < 2 :\n\t\t\t\tptrFichier.write(\"Le message est NEGATIF selon la méthode.\\n\")\n\t\t\telse : \n\t\t\t\tptrFichier.write(\"Le message est NEUTRE selon la méthode.\\n\")\n\n# Fonction qui analyse les négations \ndef methodeAnalyseEmot(listeCommentaire):\n\tnbCom = 0\n\tsuperlatif = lectureFichier(dossierDico+\"/motsCombi.txt\")\n\tmotsNegatifs = lectureFichier(dossierDico+\"/motsNegatifs.txt\")\n\tmotsPositifs = lectureFichier(dossierDico+\"/motsPositifs.txt\")\n\n\tfor p, commentaire in enumerate(listeCommentaire):\n\t\tnbCom = nbCom + 1\n\t\t#Rècupère la note du commentaire et sont commentaire sans la note\n\t\tnoteInitial, commentaireInitial = recupNote(commentaire)\n\t\tcommentaire = commentaireInitial\t\n\n\t\tnote, pos, neg, nbEmotPos, nbEmotNeg, nbNegation, coefNeg, nbPos, nbNeg, nbSpl, mots, coef = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1\n\t\tphrase = segment_into_sents(commentaire)\n\t\tfor ligne in phrase:\n\t\t\tligne = normaliser(ligne)\n\t\t\tligne = tokeniser(ligne)\n\n\t\t\t#Gestion des émoticones\n\t\t\tfor mot in ligne :\n\t\t\t\t#Gestion des émoticones\n\t\t\t\tif re.match('emoticonePos', mot) is not None :\n\t\t\t\t\tnbEmotPos = nbEmotPos + 1\t\n\t\t\t\t\tcontinue\n\t\t\t\tif re.match('emoticoneNeg', mot) is not None :\n\t\t\t\t\tnbEmotNeg = nbEmotNeg + 1\n\t\t\t\t\tcontinue\n\n\t\t\t\tmots = mots + 1\n\t\t\t\tif re.match('^pas$', mot) is not None :\n\t\t\t\t\tcoefNeg = -1\n\t\t\t\t\n\t\t\t\tfor sprlt in superlatif :\n\t\t\t\t\tsprlt = sprlt.split(\"::\")\n\t\t\t\t\tif re.match(sprlt[0],mot) is not None :\n\t\t\t\t\t\tcoef = coef*int(sprlt[1])\n\t\t\t\t\n\t\t\t\tfor positif in motsPositifs :\n\t\t\t\t\tif re.match(positif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 and coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tpos = pos + (coefNeg * coef)\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tcoefNeg = 0\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telif coef != 1 :\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tpos = pos + coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\telif coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tneg = neg +coef\n\t\t\t\t\t\t\tcoefNeg = 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\tpos = pos + 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\tfor negatif in motsNegatifs :\n\t\t\t\t\tif re.match(negatif, mot) is not None :\n\t\t\t\t\t\tif coef != 1 and coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tneg = neg + (coefNeg * coef)\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tcoefNeg = 0\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telif coef != 1 :\n\t\t\t\t\t\t\tnbSpl = nbSpl + 1\n\t\t\t\t\t\t\tneg = neg + coef\n\t\t\t\t\t\t\tcoef = 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\t\t\t\t\t\telif coefNeg != 0 :\n\t\t\t\t\t\t\tnbNegation = nbNegation + 1\n\t\t\t\t\t\t\tpos = coef\n\t\t\t\t\t\t\tcoefNeg = 1\n\t\t\t\t\t\t\tnbPos = nbPos + 1\n\t\t\t\t\t\telse :\n\t\t\t\t\t\t\tneg = neg + 1\n\t\t\t\t\t\t\tnbNeg = nbNeg + 1\n\n\t\t\n\t\t# Calcul la note en fonction des mots positifs et negatif\n\t\tnote = 2.5 + ((pos + nbEmotPos) - (neg + nbEmotNeg))/(mots/13) * 2.5\n\t\tif note > 5 : note = 5\n\t\tif note < 0\t: note = 0 \n\t\n\t\tnomFichier = dossier + \"/commentaireAlloCine_Nb\" + str(nbCom) + \".txt\"\n\t\twith open(nomFichier, \"w\") as ptrFichier:\n\t\t\tptrFichier.write(\"La note initiale du commentaire était : \" + str(noteInitial) +\"\\n\\n\")\n\t\t\tptrFichier.write(\"Le commentaire était : \\n\")\n\t\t\tptrFichier.write(commentaireInitial+\"\\n\")\n\t\t\n\t\t\tptrFichier.write(\"\\nMETHODE --> AnalyseComplexe émoticone.\\n\")\n\t\t\tptrFichier.write(\"\\n----- STATISTIQUE -----\\n\")\n\t\t\tptrFichier.write(\"Le commentaire contient \" + str(mots) + \" mots. Parmi ces mots : \\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbPos) + \" sont POSITIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbNeg) + \" sont NEGATIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbSpl) + \" sont des superlatifs.\\n\")\n\t\t\tptrFichier.write(\"\\nLe commentaire contient \" + str(nbNegation) + \" négations.\\n\")\n\n\t\t\tptrFichier.write(\"\\nLe commentaire contient \" + str(nbEmotPos + nbEmotNeg) + \" émoticones : \\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbEmotPos) + \" sont POSITIFS.\\n\")\n\t\t\tptrFichier.write(\"\\t\" + str(nbEmotNeg) + \" sont NEGATIFS.\\n\")\n\t\t\tptrFichier.write(\"\\nLa note de ce commentaire est défini à \" + str(note) + \" par la fonction.\\n\")\n\n\t\t\tptrFichier.write(\"\\n----- CONCLUSION -----\\n\")\n\t\t\tif float(noteInitial.replace(',','.')) + 1 < note and float(noteInitial.replace(',','.')) - 1 > note :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note dans la logique de la note initiale.\\n\")\n\t\t\telse :\n\t\t\t\tptrFichier.write(\"La fonction trouve une note différente du ressenti de la note initiale.\\n\")\n\t\t\t\n\t\t\tif note > 3 :\n\t\t\t\tptrFichier.write(\"Le message est POSITIF selon la méthode.\\n\")\n\t\t\telif note < 2 :\n\t\t\t\tptrFichier.write(\"Le message est NEGATIF selon la méthode.\\n\")\n\t\t\telse : \n\t\t\t\tptrFichier.write(\"Le message est NEUTRE selon la méthode.\\n\")\n\n\n\n# --- MAIN --- \nif __name__ ==\"__main__\":\n\tprint('------ Bienvenue dans le logiciel EOpinion ------\\n')\n\tprint('Ce logiciel a pour but définir si le commentaire d\\'un film est positif ou négatif\\n')\n\n\t#Parse les arguments donnés en paramètres\n\targumentParser = argparse.ArgumentParser()\n\targumentParser.add_argument('textfilefolder')\n\targuments = argumentParser.parse_args()\n\n\t#Localisation des fichiers\n\tdossier = arguments.textfilefolder\n\tdossierDico = \"dico\"\n\tfichierCommentaire = dossier+\"/commentaireAlloCineTOUS.txt\"\n\tfichierCommentaireCopy = dossier+\"/commentaireAlloCineCopy.txt\"\n\n\t#Lit et réecrit le fichier de commentaire en paragraphe\n\tlisteCommentaire = lectureFichier(fichierCommentaire)\n\t#ecritureFichierCommentaire(listeCommentaire, fichierCommentaireCopy)\n\n\n\taction = (-1)\n\twhile action != '0' : \n\t\tprint('\\nQue voulez vous faire : ')\n\t\tprint('\\t - Modifier le dictionnaire de mot : 1')\n\t\tprint('\\t - Analyser par dictionnaire de mot : 2')\n\t\tprint('\\t - Analyser par facteur de mot : 3')\n\t\tprint('\\t - Analyser par négation : 4')\n\t\tprint('\\t - Analyser par émoticones : 5')\n\t\tprint('\\t - Sortir du logiciel : 0')\n\t\n\t\taction = input(\"\\t # Selection = \")\n\n\t\tif action == '1' :\n\t\t\tprint('\\t\\t - Gerer le dictionnaire de mot negatif : 1')\n\t\t\tprint('\\t\\t - Gerer -- ------------ -- --- positif : 2')\n\t\t\tprint('\\t\\t - Quitter gestion dictionnaire : 0')\n\t\t\tactionGestionDico = input(\"\\t\\t # Selection = \")\n\t\t\t\t\n\t\t\tif actionGestionDico == '1' :\n\t\t\t\tmotsNegatifs = lectureFichier(dossierDico+\"/motsNegatifs.txt\")\n\t\t\t\tfor mots in motsNegatifs :\n\t\t\t\t\tprint(mots)\n\t\t\t\tprint('\\n\\t - Ajouter un mot NEGATIF dans le dico : 1')\n\t\t\t\tprint('\\t - Quitter gestion dictionnaire : 0')\n\t\t\t\t\n\t\t\t\tactionNegatif = input(\"\\t # Selection = \")\n\n\t\t\t\tif actionNegatif == '1' :\n\t\t\t\t\twith open(dossierDico+\"/motsNegatifs.txt\", \"a\") as ptrFichier:\n\t\t\t\t\t\tajoutMot = input (\"\\n\\t # Quels est le mots à ajouter ? :\")\n\t\t\t\t\t\tptrFichier.write(ajoutMot)\n\t\t\t\t\t\n\n\t\t\tif actionGestionDico == '2' :\n\t\t\t\tmotsPositifs = lectureFichier(dossierDico+\"/motsPositifs.txt\")\n\t\t\t\tfor mots in motsPositifs :\n\t\t\t\t\tprint (mots)\n\t\t\t\tprint('\\t - Ajouter un mot POSITIF dans le dico : 1')\n\t\t\t\tprint('\\t - Quitter gestion dictionnaire : 0')\n\t\t\t\t\n\t\t\t\tactionPositif = input(\"\\t # Selection = \")\n\t\t\t\t\n\t\t\t\tif actionPositif == '1' :\n\t\t\t\t\twith open(dossierDico+\"/motsPositifs.txt\", \"a\") as ptrFichier:\n\t\t\t\t\t\tajoutMot = input (\"\\t # Quels est le mots à ajouter ? :\")\n\t\t\t\t\t\tptrFichier.write(ajoutMot)\n\n\t\tif action == '2' :\n\t\t\tmethodeDicoMot(listeCommentaire)\n\n\t\tif action == '3' :\n\t\t\tmethodeAnalyseComplexe(listeCommentaire)\n\t\t\n\t\tif action == '4' :\n\t\t\tmethodeAnalyseNegation(listeCommentaire)\n\n\t\tif action == '5' : \n\t\t\tmethodeAnalyseEmot(listeCommentaire)\n\t\t\n"
},
{
"alpha_fraction": 0.6765100955963135,
"alphanum_fraction": 0.7006711363792419,
"avg_line_length": 36.25,
"blob_id": "04b491048a39ffaceb9712e4102146d1782cff46",
"content_id": "99c2f27cc6f98e741dde6550669b4f71f142d648",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 745,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 20,
"path": "/recupComAlloCine.py",
"repo_name": "CharlesBidaut/eopinion_TAL",
"src_encoding": "UTF-8",
"text": "from urllib2 import urlopen\nimport bs4 as BeautifulSoup\n\nfichier = open(\"commentaireAlloCineTOUS.txt\", \"w\")\nfor i in range(1,21) :\n\thtml = urlopen('http://www.allocine.fr/film/fichefilm-221541/critiques/spectateurs/?page='+str(i)).read()\n\tsoup = BeautifulSoup.BeautifulSoup(html,\"lxml\")\n\tresults = soup.findAll(\"p\", {\"itemprop\" : \"description\"})\n\tresults2 = soup.findAll(\"span\",{\"class\" : \"stareval-note\"}, {\"itemprop\" : \"ratingValue\"})\n\t\n\n\t#assert(len(results)==len(results2))\n\tfor child,child2 in zip(results,results2) :\n\t\tif not child.string : continue\n\t\tfichier.write(child2.string.encode('utf8').strip())\n\t\tfichier.write(\"::\");\n\t\tfichier.write(child.string.encode('utf8').strip().replace('\\n',''))\n\t\tfichier.write(\"\\n\\n\")\n\t\nfichier.close()\n"
}
] | 3 |
Ginsusamurai/advent_2015 | https://github.com/Ginsusamurai/advent_2015 | 647d0900a2c9c37f2a9c902a54a3da7c77b1e6b7 | 2ff9fd220f6cb76cd2be292df9c4347406e22252 | e9b84db49fde12b412b8e960680cd19cc03addc4 | refs/heads/main | 2023-01-03T13:29:29.901519 | 2020-10-27T18:43:13 | 2020-10-27T18:43:13 | 301,026,922 | 0 | 0 | MIT | 2020-10-04T02:48:04 | 2020-10-27T18:42:52 | 2020-10-27T18:43:14 | Python | [
{
"alpha_fraction": 0.5212163925170898,
"alphanum_fraction": 0.5419024229049683,
"avg_line_length": 32.23030471801758,
"blob_id": "32e4383069e655e098e81830462da97e8468b54a",
"content_id": "c5a94ce62ca32f1f6cdbd4eb71d38c872f4e38d8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5656,
"license_type": "permissive",
"max_line_length": 109,
"num_lines": 165,
"path": "/advent2015/day07.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\n\r\nimport re\r\nfrom ctypes import *\r\n\r\n\r\n\r\n\r\ngates = dict()\r\n\r\nfinishedGates = []\r\n\r\nwire_re = re.compile(r\"^[a-z]+$\")\r\n\r\ndef is_wire(name):\r\n return (wire_re.match(name) is not None)\r\n\r\ndef parse_line(input):\r\n arr = input.split(' ')\r\n\r\n # return each line as output, operation, val1, val2\r\n if len(arr) == 3:\r\n return arr[2], 'SET', arr[0], None\r\n elif len(arr) == 4:\r\n return arr[3], 'NOT', arr[1], None\r\n else:\r\n return arr[4], arr[1], arr[0], arr[2]\r\n\r\n\r\nclass Gate(object):\r\n def __init__(self, name, operation, input1 = None, input2 = None):\r\n self.name = name\r\n self.operation = operation\r\n self.input1 = input1\r\n self.input2 = input2\r\n self.value1 = None\r\n self.result = None\r\n self.bits = None\r\n\r\n # 'set' requires only an input and a target, so input2 should be None\r\n # if input1 is not a wirename, cast it to an int\r\n if self.operation == \"SET\":\r\n assert input1 is not None\r\n assert input2 is None\r\n if not is_wire(input1):\r\n self.value1 = c_ushort(int(input1))\r\n finishedGates.append(self)\r\n\r\n # for a SHIFT, input1 should be a wire name. Assigns that value to the 'bits' property\r\n elif self.operation == \"LSHIFT\" or self.operation == \"RSHIFT\":\r\n assert is_wire(input1)\r\n self.bits = c_ushort(int(input2))\r\n\r\n # not operator only takes input 2 and give back the complement. confirms vals are in place\r\n elif self.operation == \"NOT\":\r\n assert input1 is not None\r\n assert input2 is None\r\n assert is_wire(input1)\r\n\r\n # and operator requires both inputs. assert input2 is wire and if input1 is not a wire, cast to int\r\n elif self.operation == \"AND\":\r\n assert input1 is not None\r\n assert input2 is not None\r\n assert is_wire(input2)\r\n if not is_wire(input1):\r\n self.value1 = c_ushort(input1)\r\n\r\n # or operator requires both inputs. assert input2 is wire and if input1 is not wire, cast to int\r\n elif self.operation == \"OR\":\r\n assert input1 is not None\r\n assert input2 is not None\r\n assert is_wire(input2)\r\n if not is_wire(input1):\r\n self.value1 = c_ushort(input1)\r\n\r\n def value(self):\r\n\r\n\r\n # on invoke, see if the result has been calculated yet\r\n if self.result is None:\r\n # depth += 1\r\n # if set op, check if value1 (only dependency) is populated. If so, set to result (no recursion)\r\n # else, run the value method on the matching gate object from the dict (1 recursion)\r\n if self.operation == \"SET\":\r\n if self.value1 is not None:\r\n self.result = c_ushort(int(self.value1))\r\n else:\r\n self.result = c_ushort(gates[self.input1].value())\r\n\r\n # if not, run value() on input2, set complement (1 recursion)\r\n elif self.operation == \"NOT\":\r\n self.result = ~(c_ushort(gates[self.input1].value()))\r\n\r\n # if RSHIFT, run value() on input1 class, apply bit shift from input2 (1 recursion)\r\n elif self.operation == \"RSHIFT\":\r\n self.result = (gates[self.input1].value() >> self.bits)\r\n\r\n # if LSHIFT, run value() on input1 class, apply bit shift from input2 (1 recursion)\r\n elif self.operation == \"LSHIFT\":\r\n self.result = (gates[self.input1].value() << self.bits)\r\n\r\n # if value1 is populated then & with value() of result of input2 gate object (1 recursion)\r\n # else, run value on input1 gate object and & with input2 gate object value (2 recursions)\r\n elif self.operation == \"AND\":\r\n if self.value1 is not None:\r\n self.result = (self.value1 & gates[self.input2].value())\r\n else:\r\n self.result = (gates[self.input1].value() & gates[self.input2].value())\r\n\r\n # if value1 is populated, value1 | on value of input2 gate object (1 recursion)\r\n # else run value on input1 and input2 gate objects (2 recursion)\r\n elif self.operation == \"OR\":\r\n if self.value1 is not None:\r\n self.result = (self.value1 | gates[self.input2].value())\r\n else:\r\n self.result = (gates[self.input1].value() | gates[self.input2].value())\r\n\r\n # if depth > max_depth:\r\n # max_depth = depth\r\n # depth -= 1\r\n # print(depth)\r\n\r\n return self.result\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n global depth\r\n global max_depth\r\n\r\n with open(\"inputs/day07input.txt\", 'r') as f:\r\n booklet = f.read()\r\n\r\n # print(booklet)\r\n\r\n q = c_ushort(-3)\r\n\r\n print('123', int('123'), q.value)\r\n print('not y', ~456)\r\n print(int(\"FF\", 16))\r\n\r\n for line in booklet.splitlines():\r\n print(parse_line(line))\r\n name, op, input1, input2 = parse_line(line)\r\n print(name, op, input1, input2)\r\n if op is not None:\r\n gates[name] = Gate(name, op, input1, input2)\r\n\r\n\r\n\r\n for gate in gates.keys():\r\n print(gates[gate].value())\r\n\r\n for gate in gates.keys():\r\n print(gates[gate].name, gates[gate].result)\r\n\r\n # answer_a = gates[\"a\"].value()\r\n #\r\n # print(\"Day07.1 -> Final bit of circuit 'a' is {}\".format(answer_a))\r\n\r\n #lengths\r\n # 3: just set\r\n # 5: and/or/lshift/rshift\r\n # 4: not\r\n\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.7470238208770752,
"alphanum_fraction": 0.7857142686843872,
"avg_line_length": 47.14285659790039,
"blob_id": "ea47f59e00ee7fce0ae069242cfc074a94cd7602",
"content_id": "b3509840fcbece7d214e006ecaafd42c76639a30",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 336,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 7,
"path": "/README.md",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "# Advent of Code 2015 challenges\n\nThis is a series of python solutions to the [advent of code for 2015.](https://adventofcode.com/2015/)\n\n- Each day has 2 parts in one matching file with distinct print outs of their answers \n- Inputs are typically imported from a CSV in a matching directory\n- Optimization is only marginally considered"
},
{
"alpha_fraction": 0.5624071359634399,
"alphanum_fraction": 0.5817236304283142,
"avg_line_length": 21.821428298950195,
"blob_id": "eb460929c37d372510f8cfc4158683fc19c00804",
"content_id": "33d2e8b319aea050ca683498390232795a0e8123",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1346,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 56,
"path": "/advent2015/day01.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\n\r\n# This is a sample Python script.\r\nimport itertools\r\nimport csv\r\n# Press Shift+F10 to execute it or replace it with your code.\r\n# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.\r\n\r\ndef splitter(str):\r\n return [char for char in str]\r\n\r\ndef find_floor(str):\r\n print(\"Day01.1 -> Santa goes to floor {}\".format(str.count('(') - str.count(')')))\r\n\r\n\r\n\r\ndef find_basement(steps):\r\n current = 0\r\n\r\n for ind, step in enumerate(steps, 0):\r\n # print(step, ind)\r\n if current == -1:\r\n print(\"Day01.2 -> Santa enters the basement on instruction {}\".format(ind))\r\n return\r\n if step == '(':\r\n current = current + 1\r\n elif step == ')':\r\n current = current - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef enter_basement(str):\r\n # print(str)\r\n floor_instructions = splitter(str)\r\n # print(floor_instructions)\r\n find_basement(floor_instructions)\r\n\r\n\r\n\r\n# Press the green button in the gutter to run the script.\r\nif __name__ == '__main__':\r\n\r\n\r\n with open(\"inputs/day01input.csv\", 'r') as f:\r\n reader = csv.reader(f, delimiter=\" \")\r\n data = list(reader)\r\n data = data[0][0]\r\n\r\n\r\n # Day01 challenges\r\n day01input = data\r\n find_floor(day01input)\r\n enter_basement(day01input)\r\n\r\n\r\n\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5841882824897766,
"alphanum_fraction": 0.6022933125495911,
"avg_line_length": 44.02777862548828,
"blob_id": "6a5b707a2acb28603854341e1b12d592fa7f1e60",
"content_id": "d9e7eaecc429106aa13d2a4f3a35434e44b254dd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1657,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 36,
"path": "/advent2015/day05.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\nimport csv\r\nimport re\r\n\r\nif __name__ == \"__main__\":\r\n with open(\"inputs/day05input.csv\", newline='') as f:\r\n reader = csv.reader(f)\r\n stringList = list(reader)\r\n\r\n niceStrings = set()\r\n advancedNiceStrings = set()\r\n\r\n for string in stringList:\r\n\r\n # rule1 -> at least 3 vowels -> letter in a range, then one or more other characters, then letter in range,\r\n # and repeat\r\n # rule2 -> at least a pair of repeating letters (xx) -> capture of any character and then a loop back to that\r\n # same capture group\r\n # rule3 -> does not contain a subset of letter pairs -> NOT in array of forbidden\r\n if re.search(r\"[aeiou].*[aeiou].*[aeiou]\", string[0]) \\\r\n and re.search(r\"(.)\\1\", string[0]) \\\r\n and not re.search(\"(ab|cd|pq|xy)\", string[0]):\r\n niceStrings.add(string[0])\r\n\r\n\r\n # rule1 -> pair of repeating letters with at least 1 other letter between (xyabvxy)\r\n # capture must be 2 word characters, any number of other characters,loop back on capture\r\n # rule2 -> 2 identical letters with 1 other letter between (hvh)\r\n # capture of a single character in range, single in range, loop back on capture (this implies anything between\r\n # is valid\r\n if re.search(r\"(\\w{2}).*(\\1)\", string[0])\\\r\n and re.search(r\"([a-z])[a-z]\\1\", string[0]):\r\n advancedNiceStrings.add(string[0])\r\n\r\n print(\"Day05.1 -> There are {} nice strings.\".format(len(niceStrings)))\r\n print(\"Day05.2 -> There are {} nice strings in the advanced filter.\".format(len(advancedNiceStrings)))\r\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.5290322303771973,
"avg_line_length": 23.189189910888672,
"blob_id": "2885537a7870bcc4fb655c6cc978e46e3dafbec1",
"content_id": "acdfcee30d4e0bc77d78df3ffbee12e7f89f4738",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 930,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 37,
"path": "/advent2015/day02.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\nimport csv\r\n\r\n\r\n\r\n\r\ndef returnNum(character):\r\n return int(character)\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\r\n with open(\"inputs/day02input.csv\", newline='') as f:\r\n reader = csv.reader(f)\r\n packages = list(reader)\r\n\r\n #Day02.1\r\n totalFeet = 0\r\n ribbon = 0\r\n for dimensions in packages:\r\n areas = []\r\n raw = dimensions[0].split('x')\r\n lwh = list(map(lambda x: int(x), raw))\r\n lwh.sort()\r\n\r\n ribbon = ribbon + (2 * lwh[0] + 2 * lwh[1]) + lwh[0] * lwh[1] * lwh[2]\r\n\r\n areas.append(2 * lwh[0] * lwh[1])\r\n areas.append(2 * lwh[1] * lwh[2])\r\n areas.append(2 * lwh[0] * lwh[2])\r\n areas.sort()\r\n print(areas)\r\n totalFeet = totalFeet + sum(areas) + int(areas[0]/2)\r\n\r\n print(\"The Elves need a total of {} feet of paper\".format(totalFeet))\r\n print(\"The Elves also need {} feet of ribbon\".format(ribbon))"
},
{
"alpha_fraction": 0.46338582038879395,
"alphanum_fraction": 0.4834645688533783,
"avg_line_length": 25.586956024169922,
"blob_id": "005950fee6fff5ca2a10680c3ab6af17f3e3f64f",
"content_id": "ce4520c7c8067bf73a626fb0157d5dd5b03a7cac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2540,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 92,
"path": "/advent2015/day06.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\nimport csv\r\nimport json\r\n\r\ndef buildGrid():\r\n\r\n lightMap = dict()\r\n for x in range(0, 1000):\r\n for y in range(0, 1000):\r\n key = str(x) + 'x' + str(y)\r\n lightMap[key] = False\r\n\r\n return lightMap\r\n\r\ndef flipper(command, start,end, dict):\r\n\r\n for x in range(start[0], end[0]+1):\r\n for y in range(start[1], end[1]+1):\r\n key = str(x) + 'x' + str(y)\r\n # print('blah', key)\r\n # helper.append(key)\r\n if command == 'toggle':\r\n dict[key] = not dict[key]\r\n elif command == 'on':\r\n dict[key] = True\r\n else:\r\n dict[key] = False\r\n\r\n # print('helper', helper)\r\n\r\ndef dimmer(command, start,end, dict):\r\n\r\n for x in range(start[0], end[0]+1):\r\n for y in range(start[1], end[1]+1):\r\n\r\n key = str(x) + 'x' + str(y)\r\n # print('blah', key)\r\n # helper.append(key)\r\n\r\n if dict[key] == False:\r\n dict[key] == 0\r\n\r\n if command == 'toggle':\r\n dict[key] += 2\r\n elif command == 'on':\r\n dict[key] += 1\r\n else:\r\n dict[key] -= 1\r\n\r\n if dict[key] < 0:\r\n dict[key] = 0\r\n\r\n # print('helper', helper)\r\n\r\nif __name__ == \"__main__\":\r\n f = open(\"inputs/day06input.txt\", 'r')\r\n lightInstructions = list(f)\r\n\r\n lightGrid = buildGrid()\r\n brightGrid = lightGrid.copy()\r\n\r\n cleanInstructions = [i.replace(\"\\n\", \"\") for i in lightInstructions]\r\n splitInstructions = [i.split(\" \") for i in cleanInstructions]\r\n # finalInstructions = []\r\n\r\n for i, val in enumerate(splitInstructions):\r\n if val[0] == 'toggle':\r\n first = val[1].split(',')\r\n last = val[3].split(',')\r\n command = val[0]\r\n else:\r\n first = val[2].split(',')\r\n last = val[4].split(',')\r\n command = val[1]\r\n\r\n start = [int(first[0]), int(first[1])]\r\n end = [int(last[0]), int(last[1])]\r\n\r\n flipper(command, start, end, lightGrid)\r\n dimmer(command, start, end, brightGrid)\r\n # finalInstructions.append([command, start, end])\r\n\r\n lightCount = 0\r\n brightnessTotal = 0\r\n\r\n for key in lightGrid:\r\n if lightGrid[key] == True:\r\n lightCount += 1\r\n brightnessTotal += brightGrid[key]\r\n\r\n print(\"Day06.1 -> There are {} total lights on.\".format(lightCount))\r\n print(\"Day06.2 -> The total combined brightness is {}\".format(brightnessTotal))\r\n\r\n"
},
{
"alpha_fraction": 0.5717905163764954,
"alphanum_fraction": 0.6097972989082336,
"avg_line_length": 26.926828384399414,
"blob_id": "aca364d33b8265b34639ae33a885ad050c10a97d",
"content_id": "fb39589125e21f80e2bc08aa7355e3f11b9add4b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1184,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 41,
"path": "/advent2015/day04.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\nimport csv\r\nimport re\r\nimport json\r\nimport hashlib\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n with open(\"inputs/day04input.csv\", newline='') as f:\r\n reader = csv.reader(f)\r\n key = list(reader)[0][0]\r\n\r\n print(\"Brute Forcing...please wait\")\r\n #input the key and a number in decimal\r\n #add key to numbers and cycle through until hash begins with 5 zeroes\r\n #answer is what number was input\r\n\r\n lowestInt = 0\r\n hash = key + json.dumps(lowestInt)\r\n\r\n while not re.search(\"^00000\", hashlib.md5(hash.encode()).hexdigest()):\r\n lowestInt += 1\r\n hash = key + json.dumps(lowestInt)\r\n\r\n print(\"Day04.1 -> The Number {} matches the hash requirement of 5 leading zeroes.\".format(lowestInt))\r\n print(\"Next step may take longer...\")\r\n\r\n\r\n lowestInt = 0\r\n for i in range(9900000, 10000000): #narrowed the range for faster run time\r\n print(i)\r\n hash = key + json.dumps(i)\r\n if re.search(\"^000000\", hashlib.md5(hash.encode()).hexdigest()):\r\n lowestInt = i\r\n break\r\n\r\n\r\n print(\"Day04.2 -> The number {} matches the hash requirement of 6 leading zeroes.\".format(lowestInt))"
},
{
"alpha_fraction": 0.5419074892997742,
"alphanum_fraction": 0.5664739608764648,
"avg_line_length": 21.931034088134766,
"blob_id": "820e60dd0a748aa8a045b1ab378575d173308752",
"content_id": "5370ef2da98b33e699657511eafa446607d3748a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 692,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 29,
"path": "/advent2015/test/test_one.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "import pytest\r\nimport logging\r\n\r\nLOGGER = logging.getLogger(__name__)\r\n\r\nerrors = []\r\n\r\ndef test_uppercase():\r\n LOGGER.info(\"test uppercase\")\r\n try:\r\n assert \"loud noises\".upper() == \"LOUD NOISES1\"\r\n except AssertionError as error:\r\n LOGGER.info(error)\r\n\r\ndef test_reversed():\r\n try:\r\n assert list(reversed([1, 2, 3, 4])) == [4, 3, 2, 1, None]\r\n except AssertionError as error:\r\n print(error)\r\n errors.append(\"This is some {} stuff\".format(error))\r\n\r\ndef test_some_primes():\r\n assert 37 in {\r\n num\r\n for num in range(1, 50)\r\n if num != 1 and not any([num % div == 0 for div in range(2, num)])\r\n }\r\n\r\n# print(errors)"
},
{
"alpha_fraction": 0.4679088890552521,
"alphanum_fraction": 0.4955141544342041,
"avg_line_length": 22.559322357177734,
"blob_id": "d87dae7a099acfde2931b4eda96ebebfff8c36d8",
"content_id": "cdd38c45e24a3d923b0fb03a84dfa59fbcbb1817",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1449,
"license_type": "permissive",
"max_line_length": 104,
"num_lines": 59,
"path": "/advent2015/day03.py",
"repo_name": "Ginsusamurai/advent_2015",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\nimport csv\r\nfrom day01 import splitter\r\nimport json\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n with open(\"inputs/day03input.csv\", newline='') as f:\r\n reader = csv.reader(f)\r\n packages = splitter(list(reader)[0][0])\r\n\r\n # print(len(packages))\r\n\r\n gifted = set()\r\n teamGifted = set()\r\n santa = [0,0]\r\n realSanta = [0,0]\r\n roboSanta = [0,0]\r\n gifted.add(json.dumps(santa))\r\n teamGifted.update({json.dumps(realSanta), json.dumps(roboSanta)})\r\n\r\n for step in packages:\r\n if step == \"<\":\r\n santa[0] -= 1\r\n elif step == \">\":\r\n santa[0] += 1\r\n elif step == \"^\":\r\n santa[1] += 1\r\n elif step == \"v\":\r\n santa[1] -= 1\r\n\r\n gifted.add(json.dumps(santa))\r\n\r\n for ind, step in enumerate(packages, 0):\r\n\r\n if ind % 2 == 0:\r\n listener = roboSanta\r\n else:\r\n listener = realSanta\r\n\r\n if step == \"<\":\r\n listener[0] -= 1\r\n elif step == \">\":\r\n listener[0] += 1\r\n elif step == \"^\":\r\n listener[1] += 1\r\n elif step == \"v\":\r\n listener[1] -= 1\r\n\r\n\r\n\r\n teamGifted.update({json.dumps(realSanta), json.dumps(roboSanta)})\r\n\r\n\r\n\r\n print(\"Day03.1 -> Santa gave {} households at least 1 toy.\".format(len(gifted)))\r\n print(\"Day03.2 -> Santa and Robot gifted {} houses at least 1 toy together\".format(len(teamGifted)))\r\n"
}
] | 9 |
rezafarhang/Kattis_Solution_2018 | https://github.com/rezafarhang/Kattis_Solution_2018 | a5046005bb5f5ba7af8d2f830371c536797f5728 | b63d484842ff1557f8a7dc1b2ed8ccf422cf698d | 1170fa44670f7fff8ef7e42ce877b1324cd64337 | refs/heads/master | 2022-01-19T01:17:04.726173 | 2019-04-06T09:16:05 | 2019-04-06T09:16:05 | null | 0 | 0 | null | null | null | null | null | [
{
"alpha_fraction": 0.3677685856819153,
"alphanum_fraction": 0.40909090638160706,
"avg_line_length": 13.1875,
"blob_id": "211e52b49ba5eb773b607843ecbfccb0af6db785",
"content_id": "7c43be390197a3eecf1ecee5a753b0d2074a718c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 484,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 32,
"path": "/Orderly Class.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <string>\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n\tlong i, j, k, s = 0, ni, nj;\r\n\tstring s1, s2;\r\n\tcin >> s1;\r\n\tcin >> s2;\r\n\ti = 0;\r\n\twhile (s1[i] == s2[i])\r\n\t\ti++;\r\n\tj = s1.size() - 1;\r\n\twhile (s1[j] == s2[j])\r\n\t\tj--;\r\n\tni = i;\r\n\tnj = j;\r\n\tfor (i; i <= nj; i++, j--)\r\n\t\tif (s1[i] != s2[j])\r\n\t\t{\r\n\t\t\tcout << 0 << endl;\r\n\t\t\treturn 0;\r\n\t\t}\r\n\ti = ni;\r\n\tj = nj;\r\n\ts++;\r\n\twhile (i - s >= 0 && s1[i - s] == s1[j + s])\r\n\t\ts++;\r\n\tcout << s << endl;\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.3976764976978302,
"alphanum_fraction": 0.41018766164779663,
"avg_line_length": 14.984848022460938,
"blob_id": "0ff9bc250a9b6a2122f8a5644c48cbfd925b2bea",
"content_id": "f146a8c0aa18062db5d83587e7d9609e49e46761",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1119,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 66,
"path": "/wolf.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <map>\r\n#include <algorithm>\r\n\r\nusing namespace std;\r\n\r\nint main() {\r\n\tint n, sas, mamad;\r\n\tchar asghar;\r\n\tbool flag = false;\r\n\tcin >> n;\r\n\tsas = 52 - n;\r\n\tif (n > sas)\r\n\t{\r\n\t\tfor (int i = 0; i < n; i++)\r\n\t\t{\r\n\t\t\tcin >> mamad >> asghar;\r\n\t\t}\r\n\t\tcout << \"possible\" << endl;\r\n\t}\r\n\telse if (n == sas)\r\n\t{\r\n\t\tmap<char, int> maxs;\r\n\t\tmaxs['H'] = 0;\r\n\t\tmaxs['D'] = 0;\r\n\t\tmaxs['C'] = 0;\r\n\t\tmaxs['S'] = 0;\r\n\r\n\t\tmap<char, int> help;\r\n\t\thelp['H'] = 0;\r\n\t\thelp['D'] = 0;\r\n\t\thelp['C'] = 0;\r\n\t\thelp['S'] = 0;\r\n\r\n\r\n\t\tfor (int i = 0; i < n; i++)\r\n\t\t{\r\n\t\t\tcin >> mamad >> asghar;\r\n\t\t\thelp[asghar]++;\r\n\t\t\tmaxs[asghar] = max(maxs[asghar], mamad);\r\n\t\t}\r\n\r\n\t\tif (maxs['H'] > help['H'] || maxs['D'] > help['D'] || maxs['C'] > help['C'] || maxs['S'] > help['S']) {\r\n\t\t\tflag = true;\r\n\t\t}\r\n\t\t//-------------------------------\r\n\t\tif (flag == false)\r\n\t\t{\r\n\t\t\tcout << \"impossible\" << endl;\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\tcout << \"possible\" << endl;\r\n\t\t}\r\n\t\t//-------------------------------\r\n\t}\r\n\telse\r\n\t{\r\n\t\tfor (int i = 0; i < n; i++)\r\n\t\t{\r\n\t\t\tcin >> mamad >> asghar;\r\n\t\t}\r\n\t\tcout << \"impossible\" << endl;\r\n\t}\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.30112722516059875,
"alphanum_fraction": 0.3107890486717224,
"avg_line_length": 12.488371849060059,
"blob_id": "41117bff6ee98b24257a27786572c49eb27f4d50",
"content_id": "24beb203ed4df46cac4acada49964b4d9d8b45d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 621,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 43,
"path": "/stararrangements.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\nusing namespace std;\r\nint main()\r\n {\r\n\tint a = 2, b = a-1, s = 0, sas;\r\n\tcin >> sas;\r\n\tcout << sas << \":\" << endl;\r\n\tfor (; b < sas;)\r\n\t{\r\n\t\tfor (; s < sas;)\r\n\t\t{\r\n\t\t\ts += a;\r\n\t\t\tif (s == sas)\r\n\t\t\t\tbreak;\r\n\r\n\t\t\ts += b;\r\n\t\t}\r\n\t\tif (s == sas && a + b <= sas && b <= a)\r\n\t\t{\r\n\t\t\tcout << a << \",\" << b << endl;\r\n\t\t}\r\n\t\telse {\r\n\t\t\ts = 0;\r\n\t\t\tfor (; s < sas;)\r\n\t\t\t{\r\n\t\t\t\ts += b;\r\n\t\t\t\tif (s == sas)\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\ts += a;\r\n\t\t\t}\r\n\t\t\tif ((s == sas && a + b <= sas) && a != b && b >= a) {\r\n\t\t\t\tcout << a << \",\" << b << endl;\r\n\t\t\t}\r\n\t\t}\r\n\t\ts = 0;\r\n\t\tif (a > b)\r\n\t\t\tb++;\r\n\t\telse\r\n\t\t\ta++;\r\n\t}\r\n\treturn 0;\r\n\t}"
},
{
"alpha_fraction": 0.4377777874469757,
"alphanum_fraction": 0.46444445848464966,
"avg_line_length": 12.580645561218262,
"blob_id": "caf27aa2a310f957da2d7441509ddb7d00e44b51",
"content_id": "e05756e3bf740d1b33da9decf75b5450ee3ea56b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 450,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 31,
"path": "/yinyangstones.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <map>\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n\tios_base::sync_with_stdio(false);\r\n\tcin.tie(0); cout.tie(0);\r\n\tchar sas[100013];\r\n\tcin >> sas;\r\n\tmap<char, int> seyed;\r\n\tfor(int i = 0; sas[i]; i++)\r\n\t{\r\n\t\tif (sas[i] == 'B')\r\n\t\t{\r\n\t\t\tseyed['B']++;\r\n\t\t}\r\n\t\telse if (sas[i] == 'W')\r\n\t\t{\r\n\t\t\tseyed['W']++;\r\n\t\t}\r\n\t}\r\n\tif (seyed['W'] == seyed['B'])\r\n\t{\r\n\t\tcout << 1 << endl;\r\n\t}\r\n\telse {\r\n\t\tcout << 0 << endl;\r\n\t}\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.4204081594944,
"alphanum_fraction": 0.4326530694961548,
"avg_line_length": 13.4375,
"blob_id": "6686c716f2f4f2b5b0f24ed79b1376dafc1f3d03",
"content_id": "5f848fe311ebc19d8dd1c6ae20551866a9281cdf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 245,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 16,
"path": "/applesack.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <cmath>\r\n\r\nusing namespace std;\r\n\r\nint main() {\r\n int n, k, sas =0;\r\n cin >> n >> k;\r\n for(sas=0;n > k;sas++)\r\n {\r\n n -= ceil(double(n)/k);\r\n }\r\n sas += n + 1;\r\n\r\n cout << sas << endl;\r\n}"
},
{
"alpha_fraction": 0.43995633721351624,
"alphanum_fraction": 0.45414847135543823,
"avg_line_length": 16.360000610351562,
"blob_id": "76a4d588d5d78f9c7855741eebb5618c2476efbc",
"content_id": "33c832f309ca55b54bbc424280b4456b610ed6f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 916,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 50,
"path": "/workout.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <vector>\r\n#include <iostream>\r\n#include <cstdio>\r\n#include <algorithm>\r\nusing namespace std;\r\n\r\nvoid workout(long long u, long long r, long long& t, long long vf, long long vs, long long &c) \r\n{\r\n\tif (t < c)\r\n\t\t t += ((c - t) / (u + r)) * (u + r);\r\n\twhile (true)\r\n\t{\r\n\t\tif (t <= c)\r\n\t\t{\r\n\t\t\tc = max(c, u + t);\r\n\t\t\tt += u + r;\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\tt = max(t, c + vf);\r\n\t\t\tc += vf + vs;\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n}\r\n\r\nint main()\r\n{\r\n\tlong long x, y, c = 0, i, j;\r\n\tvector<pair<long long, long long> >v;\r\n\tvector<long long> u, r, t;\r\n\tfor (i = 0; i < 10; i++)\r\n\t{\r\n\t\tcin >> x >> y;\r\n\t\tv.push_back(make_pair(x, y));\r\n\t}\r\n\tr = t = u;\r\n\tfor (i = 0; i < 10; i++)\r\n\t{\r\n\t\tcin >> x >> y >> j;\r\n\t\tu.push_back(x);\r\n\t\tr.push_back(y);\r\n\t\tt.push_back(j);\r\n\t}\r\n\tfor (j = 0; j < 3; j++)\r\n\t\tfor (i = 0; i < 10; i++) \r\n\t\t\tworkout(u[i], r[i], t[i], v[i].first, v[i].second, c);\r\n\tcout << c - v.back().second << endl;\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.4585365951061249,
"alphanum_fraction": 0.48130080103874207,
"avg_line_length": 15.628571510314941,
"blob_id": "fb329e0ec436c3459dca803e2c1b629e9f92fca5",
"content_id": "6f4ecc0be2327863c3e6facfc0e70af0482b9a5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 615,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 35,
"path": "/nodub.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <map>\r\n#include <cstring>\r\nusing namespace std;\r\nint main()\r\n{\r\n\tchar mamad[113];\r\n\tstring mohseni;\r\n\tshort sas = 0, j, f = 0;\r\n\tmap<string, int> asghar;\r\n\tcin.get(mamad, '/0');\r\n\tfor (j = 0; mamad[j] != '\\0'; j++);\r\n\tmamad[j] = ' ';\r\n\tmamad[j + 1] = '\\0';\r\n\tfor (int i = 0; mamad[i]; i++)\r\n\t{\r\n\t\tif (mamad[i] != ' ')\r\n\t\t\tmohseni += mamad[i];\r\n\t\telse\r\n\t\t{\r\n\t\t\tasghar[mohseni]++;\r\n\t\t\tmohseni = \"\";\r\n\t\t}\r\n\t}\r\n\tfor (auto it = asghar.begin(); it != asghar.end(); it++)\r\n\t{\r\n\t\tif (it->second > 1)\r\n\t\t{\r\n\t\t\tcout << \"no\" << endl;\r\n\t\t\treturn 0;\r\n\t\t}\r\n\t}\r\n\tcout << \"yes\" << endl;\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.4748603403568268,
"alphanum_fraction": 0.4972067177295685,
"avg_line_length": 9.3125,
"blob_id": "ae4efdfc0a905bb1df2f42952eaf97bf5c56959b",
"content_id": "44023ab81e3b1dac98a89ff04de70b5d99ffbbd3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 179,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 16,
"path": "/bossbattle.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n\tint sas;\r\n\tcin >> sas;\r\n\tif (sas < 3)\r\n\t{\r\n\t\tcout << 1 << endl;\r\n\t}\r\n\telse cout << sas - 2 << endl;\r\n\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.5454545617103577,
"alphanum_fraction": 0.5826446413993835,
"avg_line_length": 14.266666412353516,
"blob_id": "f47e87db7db58621d74df8cb8f87de16d050b01f",
"content_id": "72003355d05b95bc4fcba5f0adc8ef8dbc802cd6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 242,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 15,
"path": "/3dprinter.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <cstdio>\r\n#include <cassert>\r\n#include <cmath>\r\n#include <algorithm>\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n\tint n;\r\n\tcin >> n;\r\n\tassert(n >= 1 && n <= 10000);\r\n\tcout << ceil(log2(n)) + 1 << endl;\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.5069767236709595,
"alphanum_fraction": 0.5209302306175232,
"avg_line_length": 13.5,
"blob_id": "8edb3316aebabb97977771c18aa86a350cb530a8",
"content_id": "a10648fc2bde76f3e8428ffe8998f605e06a9eb5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 215,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 14,
"path": "/character.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <cmath>\r\n#include <cstdio>\r\nusing namespace std;\r\n\r\nint main()\r\n{\r\n long n;\r\n scanf(\"%ld\",&n);\r\n long long d=pow(2,n);\r\n d-=(n+1);\r\n printf(\"%lld\\n\",d);\r\n return 0;\r\n}"
},
{
"alpha_fraction": 0.4791666567325592,
"alphanum_fraction": 0.5520833134651184,
"avg_line_length": 19.377777099609375,
"blob_id": "04bdb1547c993ac0c39f1684cda7891362434659",
"content_id": "0ca7d1140715a7dfa31cbef1e16d42b9436bf18c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 960,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 45,
"path": "/politics.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <iomanip>\r\n#include <algorithm>\r\n\r\nusing namespace std;\r\n\r\nstruct despicable {\r\n\tint s, a, b;\r\n};\r\n\r\nint main()\r\n{\r\n\tint n, help, s, a, b;\r\n\tdouble asghar[1000012], mohseni[1000012], shit = 0, Oops = 0;\r\n\tcin >> n;\r\n\tdespicable ArreyOne[10000];\r\n\r\n\tfor (int j = 0; j < n; j++)\r\n\t{\r\n\t\tcin >> ArreyOne[j].s >> ArreyOne[j].a >> ArreyOne[j].b;\r\n\t}\r\n\r\n\thelp = n - 1, mohseni[1000011] = 0, asghar[1000011] = 0;\r\n\r\n\tfor (int i = 1000000; i > 0; i--)\r\n\t{\r\n\t\tasghar[i] = asghar[i + 1];\r\n\t\tfor (; help >= 0 && ArreyOne[help].s == i;)\r\n\t\t{\r\n\t\t\tint mashdosein(ArreyOne[help].a), gholom(ArreyOne[help].b);\r\n\r\n\t\t\tshit = (mohseni[min(i + mashdosein, 1000011)] - mohseni[min(i + gholom + 1, 1000011)]) / (gholom - mashdosein + 1);\r\n\t\t\tOops = shit + 1;\r\n\t\t\tasghar[i] = max(asghar[i], Oops);\r\n\t\t\thelp--;\r\n\t\t}\r\n\t\tmohseni[i] = asghar[i] + mohseni[i + 1];\r\n\t}\r\n\r\n\tcout << fixed;\r\n\tcout << setprecision(8);\r\n\tcout << asghar[1] << endl;\r\n\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.301857590675354,
"alphanum_fraction": 0.3359133005142212,
"avg_line_length": 13.428571701049805,
"blob_id": "a4d4d9ccd1d3167f48c3ae3b0e6cff43706a4577",
"content_id": "51b09e294a7b1da8c9069b767e3a38c361534014",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 646,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 42,
"path": "/imageprocessing.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\nusing namespace std;\r\n\r\nint main() {\r\n\tint h, w, n, m, a[25][25], b[25][25];\r\n\tlong long sas;\r\n\tcin >> h >> w >> n >> m;\r\n\r\n\tfor (int i = 0; i < h; i++)\r\n\t{\r\n\t\tfor (int j = 0; j < w; j++)\r\n\t\t{\r\n\t\t\tcin >> a[i][j];\r\n\t\t}\r\n\t}\r\n\r\n\tfor (int i = n - 1; i >= 0; i--)\r\n\t{\r\n\t\tfor (int j = m - 1; j >= 0; j--)\r\n\t\t{\r\n\t\t\tcin >> b[i][j];\r\n\t\t}\r\n\t}\r\n\r\n\tfor (int i = 0; i + n < h+1; i++)\r\n\t{\r\n\t\tfor (int j = 0; j + m < w+1; j++)\r\n\t\t{\r\n\t\t\tsas = 0;\r\n\t\t\tfor (int f = 0; f < n; f++)\r\n\t\t\t{\r\n\t\t\t\tfor (int s = 0; s < m; s++)\r\n\t\t\t\t{\r\n\t\t\t\t\tsas += b[f][s] * a[i + f][j + s];\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tcout << sas << \" \";\r\n\t\t}\r\n\t\tcout << endl;\r\n\t}\r\n\treturn 0;\r\n}"
},
{
"alpha_fraction": 0.3773416578769684,
"alphanum_fraction": 0.46654772758483887,
"avg_line_length": 21.35416603088379,
"blob_id": "de2cf4b8cedaf284d6313138201950e29bf7fef0",
"content_id": "a71151ba2fa51f335f72e9b234ebaaf3c7d46d01",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1121,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 48,
"path": "/racingalphabet.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <string>\r\n#include <algorithm>\r\n#include <cstdio>\r\n#include <iomanip>\r\nusing namespace std;\r\n\r\nlong double Pis = 3.141592653589793238462643383279502884197169399375105820974944592307816406286;\r\nstring amir028 = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ '\";\r\nint mashdosein(char a)\r\n{\r\n for (int i = 0; i < 28; i++)\r\n {\r\n if (a == amir028[i])\r\n {\r\n return i;\r\n }\r\n }\r\n return 0;\r\n}\r\nint main()\r\n{\r\n int n, x = 0, mohsen = 0 , sas = 0, help;\r\n\r\n cin >> n;\r\n while (n--)\r\n {\r\n string abas = \"\";\r\n getline(cin, abas);\r\n if (abas == \"\")\r\n {\r\n getline(cin, abas);\r\n }\r\n for (mohsen = 0; abas[mohsen]; mohsen++);\r\n // mohsen += 1;\r\n sas = mashdosein(abas[0]);\r\n for (int i = 0; i < mohsen; i++)\r\n {\r\n help = mashdosein(abas[i]);\r\n x += min(abs(sas - help), 28 - abs(sas - help));\r\n sas = help;\r\n }\r\n cout << fixed << setprecision(10);\r\n cout << ((x * Pis) / 7) + mohsen << endl;\r\n x = 0;\r\n }\r\n return 0;\r\n}\r\n"
},
{
"alpha_fraction": 0.5198135375976562,
"alphanum_fraction": 0.5431235432624817,
"avg_line_length": 31.153846740722656,
"blob_id": "d6ab997c837788bd336aede3e3a45ab1d819a2af",
"content_id": "efd3986866cd6fc11a9bbd37feb7f6d1b7d6eb86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 429,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 13,
"path": "/gcpc.py",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "(n, e) = map(int, input().split()) #base namosan\r\n\r\nsas = [(0, 0)] * (n+1)\r\nmohseni = set()\r\n\r\nfor i in range(e):\r\n (asghar, mamad) = [int(x) for x in input().split()]\r\n sas[asghar] = (sas[asghar][0] + 1, sas[asghar][1] - mamad)\r\n if sas[asghar] > sas[1]:\r\n mohseni.add(asghar)\r\n if asghar == 1:#wrong bekhater to shit\r\n mohseni = set([i for i in mohseni if sas[i] > sas[1]])\r\n print(len(mohseni)+1)"
},
{
"alpha_fraction": 0.41645243763923645,
"alphanum_fraction": 0.4524421691894531,
"avg_line_length": 19.72222137451172,
"blob_id": "85764f787fedf1b3bbf716e6fa52eb76eda5597a",
"content_id": "936b93f1859eafe1baf8f5f98a9fb4d467fe0196",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 389,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 18,
"path": "/heartrate.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <iomanip>\r\nusing namespace std;\r\n\r\nint main() {\r\n int sas;\r\n double mohsen ,asghar ,h1 ,h2;\r\n cin >> sas;\r\n\r\n while(sas--)\r\n {\r\n cin >> mohsen >> asghar;\r\n h1 = 60 * (mohsen / asghar);\r\n h2 = 60 / asghar;\r\n cout << fixed << setprecision(4);\r\n cout << h1 - h2 << \" \" << h1 << \" \" << h1 + h2 << endl;\r\n }\r\n}"
},
{
"alpha_fraction": 0.7195767164230347,
"alphanum_fraction": 0.7830687761306763,
"avg_line_length": 30.5,
"blob_id": "63639d4f9347883e408bd65b77de1619717c5fe9",
"content_id": "86f411b6344664b12a5a9677053868a8467b7fb3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 189,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 6,
"path": "/README.md",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "# Kattis_Solution_2018\nkattis solution 2017-2018\n\nThe name of the variables is very bad\n\nThese questions have been solved by the \"Wrong An$wer\" team at New Year's Eve and have been funable\n"
},
{
"alpha_fraction": 0.41801387071609497,
"alphanum_fraction": 0.4399538040161133,
"avg_line_length": 12.711864471435547,
"blob_id": "52eead7601ea50c2d59866af181a6831fa4d718c",
"content_id": "18842afe2dc65d5fd4cd1377e030f869a3411981",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 866,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 59,
"path": "/canonical.cpp",
"repo_name": "rezafarhang/Kattis_Solution_2018",
"src_encoding": "UTF-8",
"text": "#include <iostream>\r\n#include <algorithm>\r\n#include <vector>\r\n\r\nusing namespace std;\r\n\r\nint main() {\r\n\tint n;\r\n\tbool sas = true;\r\n\tcin >> n;\r\n\r\n\tvector<int> a(n);\r\n\r\n\tfor (auto &i : a)\r\n\t{\r\n\t\tcin >> i;\r\n\t}\r\n\r\n\tint mohseni = a[n - 1] + a[n - 2];\r\n\r\n\tvector<int> nakon(mohseni + 1);\r\n\tvector<int> shalvar(mohseni + 1);\r\n\r\n\tfor (int i = 1; i <= mohseni; i++)\r\n\t{\r\n\t\tnakon[i] = 1000013;\r\n\t\tfor (int j = 0; j < n; j++)\r\n\t\t{\r\n\t\t\tif (a[j] <= i)\r\n\t\t\t{\r\n\t\t\t\tnakon[i] = min(nakon[i], nakon[i - a[j]] + 1);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tfor (int j = n - 1; j >= 0; j--)\r\n\t\t{\r\n\t\t\tif (a[j] <= i)\r\n\t\t\t{\r\n\t\t\t\tshalvar[i] = shalvar[i - a[j]] + 1;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\tfor (int i = 0; sas && i <= mohseni; i++) {\r\n\t\tif (nakon[i] < shalvar[i])\r\n\t\t{\r\n\t\t\tsas = false;\r\n\t\t}\r\n\t}\r\n\tif (sas)\r\n\t{\r\n\t\tcout << \"canonical\" << endl;\r\n\t}\r\n\telse\r\n\t{\r\n\t\tcout << \"non-canonical\" << endl;\r\n\t}\r\n\treturn 0;\r\n}"
}
] | 17 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.