text
stringlengths 1
2.05k
|
---|
import numpy as np\n",
" |
import os\n",
" |
import matplotlib.pyplot as plt\n",
"\n",
"
"SEED = 42\n",
"np.random.seed(SEED)\n",
"\n",
"\n",
"X = np.linspace(-0.5, 0.5, 150).astype('float64')\n",
"noise = np.random.normal(0, 0.1, len(X)).astype('float64')\n",
"y = 2 * X + 5 + noise
"\n",
"print('X values = ',X)\n",
"print('y values = ', y)\n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": 59,
"id": "fa6fc5c0",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Text(0, 0.5, 'y values')"
]
},
"execution_count": 59,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGwCAYAAABVdURTAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABKrklEQVR4nO3deXxU9b3/8feEhARiEjZlAkWgEJHIJnoFBKXSUFG0YHtrjcGtiK1Vi1ot0lt/gLQCt1qkdddbaatAXR6AK20QlwJxxSiIC0QQhARlSyBogOT8/qAnnSRzZs6ZOTNnZvJ6Ph55PMzknDPfTL3mc7/fz+IzDMMQAABAikjzegEAAABuIrgBAAApheAGAACkFIIbAACQUghuAABASiG4AQAAKYXgBgAApJR0rxcQbw0NDdq5c6dycnLk8/m8Xg4AALDBMAwdOHBA3bp1U1pa6L2ZVhfc7Ny5Uz169PB6GQAAIALbt2/Xt771rZDXtLrgJicnR9KxDyc3N9fj1QAAADtqamrUo0ePxr/jobS64MY8isrNzSW4AQAgydhJKSGhGAAApBSCGwAAkFIIbgAAQEohuAEAACmF4AYAAKQUghsAAJBSCG4AAEBKIbgBAAApheAGAACklFbXoRgAALinvsHQW1v26ssD3+iEnCyd0buT2qR5O5ia4AYAAERkxYZKzXpuoyqrv2l8rVN2hi4a0l1FhX7PAh2fYRhG3N/VQzU1NcrLy1N1dTWzpQAArYrTXZZQ16/YUKlrH1+nUEFEfl6WZlxYqHED8qNeu5O/3+zcAADQCgTbZQkVfIS6fmyhX7Oe2xgysJGkqupvdO3j6/TApKGuBDh2kVAMAECKM3dZAgMV6T/Bx4oNlY6uv3fVphY/C8YMfmY9t1H1DfE7KCK4AQAghdU3GJa7LMGCDzvXP7Zmq+33NyRVVn+jt7bstb/oKBHcAACQwt7asjfkLkvz4MPO9fu/PuJ4HV8eCL/T4xaCGwAAUpjdoMK8zu71HdplyEkd1Ak5WQ6ujo7nwc2OHTs0adIkde7cWe3atdPAgQP1zjvvhLzn1Vdf1dChQ5WZmam+fftq4cKF8VksAABJxm5QYV5n9/qrRva2dZ1PxxKRz+jdydb1bvA0uNm3b59GjhypjIwMvfTSS9q4caPuvvtudezY0fKeLVu2aPz48TrnnHNUXl6uG2+8UVdffbX+8Y9/xHHlAAAkhzN6d1J+XpblLkvz4MPu9deP6asHJg1Vfp51MGQ+Y8aFhXHtd+Npn5vbbrtNa9as0b/+9S/b90ybNk0vvPCCNmzY0PjaJZdcov3792vFihUtrq+rq1NdXV3j9z |
U1NerRowd9bgAArYZZ/SSpSaKwGW40L9V2cr3ZC6d0Y5WWle/U3trDjdd71efG0+CmsLBQ5557rr744gu99tpr6t69u37+859rypQplvecffbZGjp0qO65557G1x577DHdeOONqq6ubnH9zJkzNWvWrBavE9wAAFoTN/vcWAUrsRzFkDTBTVbWsa2sm2++WT/60Y/09ttva+rUqXrwwQd1xRVXBL3npJNO0lVXXaXp06c3vvbiiy9q/PjxOnTokNq1a9fkenZuAADJzM2Awc0OxfGWNB2KGxoadPrpp+vOO++UJJ166qnasGFDyODGqczMTGVmZrryLAAA4imS3ZNQ2qT5NKJP55DXJFJAEylPg5v8/HwVFhY2ea1
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.scatter(X,y)\n",
"plt.xlabel('X values')\n",
"plt.ylabel('y values')"
]
},
{
"cell_type": "markdown",
"id": "68a91d93",
"metadata": {},
"source": [
"
"\n",
"<br>\n",
"\n",
"<img src=\"line_equation.gif\" width=\"200px\" height=\"200px\" align=\"left\"> <br><br>\n",
"\n",
"**y:** y values \n",
"**x:** x values \n",
"**b:** gradient \n",
"**a:** y intercept \n"
]
},
{
"cell_type": "markdown",
"id": "4999fbae",
"metadata": {},
"source": [
"
"<br>\n",
"\n",
"<img src=\"gradient.png\" width=\"230px\" height=\"230px\" align=\"left\">"
]
},
{
"cell_type": "code",
"execution_count": 60,
"id": "7fe2a45b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The slope of regression line: 2.0133337976122685\n"
]
}
],
"source": [
"numerator = sum((X - X.mean()) * (y - y.mean()))\n",
"denominator = sum((X - X.mean())**2)\n",
"\n",
"beta = numerator / denominator\n",
"print('The slope of regression line:', beta)"
]
},
{
"cell_type": "markdown",
"id": "5d126c20",
"metadata": {},
"source": [
"
"\n",
"<br>\n",
"\n",
"<img src=\"intercept.png\" width=\"150px\" height=\"150px\" align=\"left\">"
]
},
{
"cell_type": "code",
"execution_count": 61,
"id": "4f3979d7",
"metadata": {},
"outputs": [
{
"name": "std |
out",
"output_type": "stream",
"text": [
"The y intercept of our regression line: 4.991767313284746\n"
]
}
],
"source": [
"intercept = y.mean() - beta * X.mean()\n",
"print('The y intercept of our regression line:', intercept)"
]
},
{
"cell_type": "code",
"execution_count": 62,
"id": "88c5f347",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Given x=0.17 , our model predicts the corresponing y value shoud be 5.3340340588788315\n"
]
}
],
"source": [
"
"predicted_y_value = beta * 0.17 + intercept \n",
"print(f'Given x=0.17 , our model predicts the corresponing y value shoud be {predicted_y_value}')"
]
},
{
"cell_type": "code",
"execution_count": 63,
"id": "d73113cf",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHHCAYAAABDUnkqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACK70lEQVR4nO3dd3hT5RfA8W+66aJltkBZZZY9RIYIMguI8FNRhjJERGTLEhdT2TIFFGQ4wQGIyCpLlC1QtghYdssqdNKV3N8fMbFpMzuSjvN5Hh7JzXvvfXMJ9PiOc1SKoigIIYQQQhQQTo7ugBBCCCFETpLgRgghhBAFigQ3QgghhChQJLgRQgghRIEiwY0QQgghChQJboQQQghRoEhwI4QQQogCRYIbIYQQQhQoEtwIIYQQokCR4EaIAubq1auoVCrWrFnj6K6IbOjfvz8VK1Z0dDeEyJckuBEiH1mzZg0qlYo
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Calculated beta: 2.0133337976122685\n",
"Calculated intercept: 4.991767313284746\n"
]
}
],
"source": [
"plt.scatter(X, y, label='Data Points')\n",
"plt.plot(X, beta * X + intercept, color='red', label='Regression Line')\n",
"plt.scatter(0.17,predicted_y_value, color='green', label='pred for x = 0.17 ')\n",
"plt.xlabel('X')\n",
"plt.ylabel('y')\n",
"plt.title('Linear Regression')\n", |
"plt.legend()\n",
"plt.grid(True)\n",
"plt.show()\n",
"\n",
"\n",
"print(f\"Calculated beta: {beta}\")\n",
"print(f\"Calculated intercept: {intercept}\")\n"
]
},
{
"cell_type": "markdown",
"id": "07ce1abf",
"metadata": {},
"source": [
"
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 64,
"id": "e4a526a3",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Mean Squared Error (MSE): 0.008805873341370826\n",
"R-squared (R^2): 0.974921526753728\n"
]
}
],
"source": [
"y_pred = beta * X + intercept\n",
"\n",
"mse = np.mean((y - y_pred)**2)\n",
"y_mean = np.mean(y)\n",
"r_squared = 1 - np.sum((y - y_pred)**2) / np.sum((y - y_mean)**2)\n",
"\n",
"\n",
"print(\"Mean Squared Error (MSE):\", mse)\n",
"print(\"R-squared (R^2):\", r_squared)\n",
"\n"
]
},
{
"cell_type": "markdown",
"id": "f0e27741",
"metadata": {},
"source": [
"
"\n"
]
},
{
"cell_type": "markdown",
"id": "da10af23",
"metadata": {},
"source": [
"
"Scarb is the Cairo package manager specifically created to streamline our Cairo and Starknet development process. You can find all information about Scarb and Cairo installation <a src='https:
]
},
{
"cell_type": "code",
"execution_count": 65,
"id": "a07f433b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[31merror\u001b[0m: destination `verifiable_linear_regression` already exists\n",
"help: use `scarb init` to initialize the directory\n"
]
}
],
"source": [
"! scarb new verifiable_linear_regression"
]
},
{
"cell_type": "markdown",
"id": "b27b16d7",
"metadata": {},
"source": [
"A new project folder will be created for you and make sure to replace the content in Scarb.toml file with the following code:"
]
},
{
"c |
ell_type": "markdown",
"id": "06745de8",
"metadata": {},
"source": [
"```tom\n",
"[package]\n",
"name = \"verifiable_linear_regression\"\n",
"version = \"0.1.0\"\n",
"\n",
"[dependencies]\n",
"orion = { git = \"https:
"\n",
"[scripts]\n",
"test = \"scarb cairo-test -f linear_regression_test\"\n",
"\n",
"```\n"
]
},
{
"cell_type": "markdown",
"id": "1d2536ce",
"metadata": {},
"source": [
"
"\n"
]
},
{
"cell_type": "code",
"execution_count": 68,
"id": "c1b1057f",
"metadata": {},
"outputs": [],
"source": [
"tensor_name =['X_values', 'Y_values']\n",
"\n",
"base_path = os.path.expanduser(\"~/verifiable_linear_regression/src\")\n",
"\n",
"def generate_cairo_files(data, name):\n",
" generated_path = os.path.join(base_path, 'generated')\n",
" os.makedirs(generated_path, exist_ok=True)\n",
"\n",
" with open(os.path.join(generated_path, f\"{name}.cairo\"), \"w\") as f:\n",
" f.write(\n",
" \"use array::ArrayTrait;\\n\" +\n",
" \"use orion::operators::tensor::{{FP16x16Tensor, TensorTrait, Tensor}};\\n\" +\n",
" \"use orion::numbers::{{FixedTrait, FP16x16, FP16x16Impl}};\\n\"\n",
" \"\\nfn {0}() -> Tensor<FixedType> \".format(name) + \"{{\\n\" +\n",
" \" let mut shape = ArrayTrait::new();\\n\"\n",
" )\n",
" for dim in data.shape:\n",
" f.write(\" shape.append({0});\\n\".format(dim))\n",
" f.write(\n",
" \" let mut data = ArrayTrait::new();\\n\"\n",
" )\n",
" for val in np.nditer(data.flatten()):\n",
" f.write(\" data.append(FixedTrait::new({0}, {1} ));\\n\".format(abs(int(val * 2**16)), str(val < 0).lower()))\n",
" f.write(\n",
" \"let tensor = TensorTrait::<FixedType>::new(shape.span(), da |
ta.span()); \\n \\n\" +\n",
" \"return tensor;\\n\\n\"+\n",
" \"}\\n\"\n",
" )\n",
" with open(os.path.join(base_path, 'generated.cairo'), 'w') as f:\n",
" for param_name in tensor_name:\n",
" f.write(f\"mod {param_name};\\n\")"
]
},
{
"cell_type": "code",
"execution_count": 69,
"id": "e1f168e7",
"metadata": {},
"outputs": [],
"source": [
"generate_cairo_files(X, 'X_values')\n",
"generate_cairo_files(y, 'Y_values')"
]
},
{
"cell_type": "markdown",
"id": "31671139",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 75,
"id": "c7204a1b",
"metadata": {},
"outputs": [],
"source": [
"! touch verifiable_linear_regression/src/lin_reg_func.cairo"
]
},
{
"cell_type": "code",
"execution_count": 76,
"id": "4c0b8e16",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overwriting verifiable_linear_regression/src/lin_reg_func.cairo\n"
]
}
],
"source": [
"%%writefile verifiable_linear_regression/src/lin_reg_func.cairo\n",
"\n",
"use orion::operators::tensor::{Tensor, TensorTrait, FP16x16Tensor};\n",
"use orion::numbers::{FP16x16, FixedTrait};\n",
"\n",
"
"fn calculate_mean(tensor_data: Tensor<FP16x16>) -> FP16x16 {\n",
" let tensor_size = FixedTrait::<FP16x16>::new_unscaled(tensor_data.data.len(), false);\n",
"\n",
" let cumulated_sum = tensor_data.cumsum(0, Option::None(()), Option::None(()));\n",
" let sum_result = cumulated_sum.data[tensor_data.data.len() - 1];\n",
" let mean = *sum_result / tensor_size;\n",
"\n",
" return mean;\n",
"}\n",
"\n",
"
"fn deviation_from_mean(tensor_data: Tensor<FP16x16>) -> Tensor<FP16x16> {\n",
" let mean_value = calculate_mean(tensor_data);\n",
"\n",
" let mut tensor_shape = array::ArrayTrait::new();\ |
n",
" tensor_shape.append(tensor_data.data.len());\n",
"\n",
" let mut deviation_values = array::ArrayTrait::new();\n",
"\n",
" let mut i: u32 = 0;\n",
" loop {\n",
" if i >= tensor_data.data.len() {\n",
" break ();\n",
" }\n",
" let distance_from_mean = *tensor_data.data.at(i) - mean_value;\n",
" deviation_values.append(distance_from_mean);\n",
" i += 1;\n",
" };\n",
"\n",
" let distance_from_mean_tensor = TensorTrait::<FP16x16>::new(\n",
" tensor_shape.span(), deviation_values.span()\n",
" );\n",
"\n",
" return distance_from_mean_tensor;\n",
"}\n",
"\n",
"\n",
"
"fn compute_beta(x_values: Tensor<FP16x16>, y_values: Tensor<FP16x16>) -> FP16x16 {\n",
" let x_deviation = deviation_from_mean(x_values);\n",
" let y_deviation = deviation_from_mean(y_values);\n",
"\n",
" let x_y_covariance = x_deviation.matmul(@y_deviation);\n",
" let x_variance = x_deviation.matmul(@x_deviation);\n",
"\n",
" let beta_value = *x_y_covariance.data.at(0) / *x_variance.data.at(0);\n",
"\n",
" return beta_value;\n",
"}\n",
"\n",
"
"fn compute_intercept(\n",
" beta_value: FP16x16, x_values: Tensor<FP16x16>, y_values: Tensor<FP16x16>\n",
") -> FP16x16 {\n",
" let x_mean = calculate_mean(x_values);\n",
" let y_mean = calculate_mean(y_values);\n",
"\n",
" let mx = beta_value * x_mean;\n",
" let intercept = y_mean - mx;\n",
"\n",
" return intercept;\n",
"}\n",
"\n",
"
"fn predict_y_values(\n",
" beta_value: FP16x16, x_values: Tensor<FP16x16>, y_values: Tensor<FP16x16>\n",
") -> Tensor<FP16x16> {\n",
" let beta = compute_beta(x_values, y_values);\n",
" let intercept = compute_intercept(beta_value, x_values, y_values);\n",
"\n",
"
" let mut y_pred_shape = array::ArrayTrait::new();\n",
" y |
_pred_shape.append(y_values.data.len());\n",
"\n",
" let mut y_pred_vals = array::ArrayTrait::new();\n",
"\n",
" let mut i: u32 = 0;\n",
" loop {\n",
" if i >= y_values.data.len() {\n",
" break ();\n",
" }\n",
"
" let predicted_value = beta * *x_values.data.at(i) + intercept;\n",
" y_pred_vals.append(predicted_value);\n",
" i += 1;\n",
" };\n",
"\n",
" let y_pred_tensor = TensorTrait::<FP16x16>::new(y_pred_shape.span(), y_pred_vals.span());\n",
"\n",
" return y_pred_tensor;\n",
"}\n",
"\n",
"\n",
"
"fn compute_mse(y_values: Tensor<FP16x16>, y_pred_values: Tensor<FP16x16>) -> FP16x16 {\n",
" let mut squared_diff_shape = array::ArrayTrait::new();\n",
" squared_diff_shape.append(y_values.data.len());\n",
"\n",
" let mut squared_diff_vals = array::ArrayTrait::new();\n",
"\n",
" let mut i: u32 = 0;\n",
" loop {\n",
" if i >= y_values.data.len() {\n",
" break ();\n",
" }\n",
" let diff = *y_values.data.at(i) - *y_pred_values.data.at(i);\n",
" let squared_diff = diff * diff;\n",
" squared_diff_vals.append(squared_diff);\n",
" i += 1;\n",
" };\n",
"\n",
" let squared_diff_tensor = TensorTrait::<FP16x16>::new(\n",
" squared_diff_shape.span(), squared_diff_vals.span()\n",
" );\n",
"\n",
" let mse = calculate_mean(squared_diff_tensor);\n",
"\n",
" return mse;\n",
"}\n",
"\n",
"
"fn calculate_r_score(y_values: Tensor<FP16x16>, y_pred_values: Tensor<FP16x16>) -> FP16x16 {\n",
" let mean_y_value = calculate_mean(y_values);\n",
"\n",
"
" let mut squared_diff_shape = array::ArrayTrait::new();\n",
" squared_diff_shape.append(y_values.data.len());\n",
" let mut squared_diff_vals = array::ArrayTrait::new();\n",
"\n",
" let mut squ |
ared_mean_diff_shape = array::ArrayTrait::new();\n",
" squared_mean_diff_shape.append(y_values.data.len());\n",
" let mut squared_mean_diff_vals = array::ArrayTrait::new();\n",
"\n",
" let mut i: u32 = 0;\n",
" loop {\n",
" if i >= y_values.data.len() {\n",
" break ();\n",
" }\n",
" let diff_pred = *y_values.data.at(i) - *y_pred_values.data.at(i);\n",
" let squared_diff = diff_pred * diff_pred;\n",
" squared_diff_vals.append(squared_diff);\n",
"\n",
" let diff_mean = *y_values.data.at(i) - mean_y_value;\n",
" let squared_mean_diff = diff_mean * diff_mean;\n",
" squared_mean_diff_vals.append(squared_mean_diff);\n",
" i += 1;\n",
" };\n",
"\n",
" let squared_diff_tensor = TensorTrait::<FP16x16>::new(\n",
" squared_diff_shape.span(), squared_diff_vals.span()\n",
" );\n",
" let squared_mean_diff_tensor = TensorTrait::<FP16x16>::new(\n",
" squared_mean_diff_shape.span(), squared_mean_diff_vals.span()\n",
" );\n",
"\n",
" let sum_squared_diff = squared_diff_tensor.cumsum(0, Option::None(()), Option::None(()));\n",
" let sum_squared_mean_diff = squared_mean_diff_tensor\n",
" .cumsum(0, Option::None(()), Option::None(()));\n",
"\n",
" let r_score = FixedTrait::new_unscaled(1, false)\n",
" - *sum_squared_diff.data.at(y_values.data.len() - 1)\n",
" / *sum_squared_mean_diff.data.at(y_values.data.len() - 1);\n",
"\n",
" return r_score;\n",
"}\n"
]
},
{
"cell_type": "markdown",
"id": "cdc968d1",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 77,
"id": "cd53bcfb",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Writing verifiable_linear_regression/src/test.cairo\n"
]
}
],
"source": [
"%% |
writefile verifiable_linear_regression/src/test.cairo\n",
"use debug::PrintTrait;\n",
"\n",
"use verifiable_linear_regression::generated::X_values::X_values;\n",
"use verifiable_linear_regression::generated::Y_values::Y_values;\n",
"use verifiable_linear_regression::lin_reg_func::{\n",
" calculate_mean, deviation_from_mean, compute_beta, compute_intercept, predict_y_values,\n",
" compute_mse, calculate_r_score\n",
"};\n",
"\n",
"\n",
"
"
" |
fn linear_regression_test() {\n",
"
" let y_values = Y_values();\n",
" let x_values = X_values();\n",
"\n",
"
"\n",
" let beta_value = compute_beta(x_values, y_values);\n",
"
"\n",
" let intercept_value = compute_intercept(beta_value, x_values, y_values);\n",
"
"\n",
" let y_pred = predict_y_values(beta_value, x_values, y_values);\n",
"\n",
" let mse = compute_mse(y_values, y_pred);\n",
"
"\n",
" let r_score = calculate_r_score(y_values, y_pred);\n",
" r_score.print();
"\n",
" assert(beta_value.mag > 0, 'x & y not positively correlated');\n",
" assert(r_score.mag > 0, 'R-Squared needs to be above 0');\n",
" assert(\n",
" r_score.mag < 65536, 'R-Squared has to be below 65536'\n",
" );
" assert(r_score.mag > 32768, 'Accuracy below 50% ');\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": 78,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overwriting verifiable_linear_regression/src/lib.cairo\n"
]
}
],
"source": [
"%%writefile verifiable_linear_regression/src/lib.cairo\n",
"\n",
"mod generated;\n",
"mod lin_reg_func;\n",
"mod test;"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 81,
"id": "ae8a18aa",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[2K\u001b[32m\u001b[1m Compiling\u001b[0m test(verifiable_linear_regression_unittest) verifiable_linear_regression v0.1.0 (/Users/raphaeldoukhan/Desktop/Orion-Giza/Academy/Tutorials/orion_tutorials/basic/verifiable_linear_regression_model/Scarb.toml)e --recurse-submodules /Users/ 0s\n",
"\u001b[32m\u001b[1m Finished\u001b[0m release target(s) in 7 seconds\n",
"testing verifiable_linear_regressio |
n ...\n",
"running 1 tests\n",
"[DEBUG]\tfalse \t(raw: 0x66616c7365\n",
"\n",
"[DEBUG]\t \t(raw: 0xf996 \n",
"\n",
"test verifiable_linear_regression::test::linear_regression_test ... \u001b[92mok\u001b[0m (gas usage est.: 273795540)\n",
"test result: \u001b[92mok\u001b[0m. 1 passed; 0 failed; 0 ignored; 0 filtered out;\n",
"\n"
]
}
],
"source": [
"! cd verifiable_linear_regression\n",
"! scarb cairo-test -f linear_regression_test"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cfbcc71c",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
} |
mod X_values;
mod Y_values;
|
use array::ArrayTrait;
use orion::operators::tensor::{FP16x16Tensor, TensorTrait, Tensor};
use orion::numbers::{FixedTrait, FP16x16, FP16x16Impl};
fn X_values() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(150);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(32768, true));
data.append(FixedTrait::new(32328, true));
data.append(FixedTrait::new(31888, true));
data.append(FixedTrait::new(31448, true));
data.append(FixedTrait::new(31008, true));
data.append(FixedTrait::new(30568, true));
data.append(FixedTrait::new(30128, true));
data.append(FixedTrait::new(29689, true));
data.append(FixedTrait::new(29249, true));
data.append(FixedTrait::new(28809, true));
data.append(FixedTrait::new(28369, true));
data.append(FixedTrait::new(27929, true));
data.append(FixedTrait::new(27489, true));
data.append(FixedTrait::new(27050, true));
data.append(FixedTrait::new(26610, true));
data.append(FixedTrait::new(26170, true));
data.append(FixedTrait::new(25730, true));
data.append(FixedTrait::new(25290, true));
data.append(FixedTrait::new(24850, true));
data.append(FixedTrait::new(24411, true));
data.append(FixedTrait::new(23971, true));
data.append(FixedTrait::new(23531, true));
data.append(FixedTrait::new(23091, true));
data.append(FixedTrait::new(22651, true));
data.append(FixedTrait::new(22211, true));
data.append(FixedTrait::new(21772, true));
data.append(FixedTrait::new(21332, true));
data.append(FixedTrait::new(20892, true));
data.append(FixedTrait::new(20452, true));
data.append(FixedTrait::new(20012, true));
data.append(FixedTrait::new(19572, true));
data.append(FixedTrait::new(19132, true));
data.append(FixedTrait::new(18693, true));
data.append(FixedTrait::new(18253, true));
data.append(FixedTrait::new(17813, true));
data.append(FixedTrait::new(17373, true));
data.append(FixedTrait::new(16933, true));
data.append(FixedTrait |
::new(16493, true));
data.append(FixedTrait::new(16054, true));
data.append(FixedTrait::new(15614, true));
data.append(FixedTrait::new(15174, true));
data.append(FixedTrait::new(14734, true));
data.append(FixedTrait::new(14294, true));
data.append(FixedTrait::new(13854, true));
data.append(FixedTrait::new(13415, true));
data.append(FixedTrait::new(12975, true));
data.append(FixedTrait::new(12535, true));
data.append(FixedTrait::new(12095, true));
data.append(FixedTrait::new(11655, true));
data.append(FixedTrait::new(11215, true));
data.append(FixedTrait::new(10776, true));
data.append(FixedTrait::new(10336, true));
data.append(FixedTrait::new(9896, true));
data.append(FixedTrait::new(9456, true));
data.append(FixedTrait::new(9016, true));
data.append(FixedTrait::new(8576, true));
data.append(FixedTrait::new(8137, true));
data.append(FixedTrait::new(7697, true));
data.append(FixedTrait::new(7257, true));
data.append(FixedTrait::new(6817, true));
data.append(FixedTrait::new(6377, true));
data.append(FixedTrait::new(5937, true));
data.append(FixedTrait::new(5497, true));
data.append(FixedTrait::new(5058, true));
data.append(FixedTrait::new(4618, true));
data.append(FixedTrait::new(4178, true));
data.append(FixedTrait::new(3738, true));
data.append(FixedTrait::new(3298, true));
data.append(FixedTrait::new(2858, true));
data.append(FixedTrait::new(2419, true));
data.append(FixedTrait::new(1979, true));
data.append(FixedTrait::new(1539, true));
data.append(FixedTrait::new(1099, true));
data.append(FixedTrait::new(659, true));
data.append(FixedTrait::new(219, true));
data.append(FixedTrait::new(219, false));
data.append(FixedTrait::new(659, false));
data.append(FixedTrait::new(1099, false));
data.append(FixedTrait::new(1539, false));
data.append(FixedTrait::new(1979, false));
data.append(FixedTrait::new(2419, false));
data.append(FixedTrait::new(2 |
858, false));
data.append(FixedTrait::new(3298, false));
data.append(FixedTrait::new(3738, false));
data.append(FixedTrait::new(4178, false));
data.append(FixedTrait::new(4618, false));
data.append(FixedTrait::new(5058, false));
data.append(FixedTrait::new(5497, false));
data.append(FixedTrait::new(5937, false));
data.append(FixedTrait::new(6377, false));
data.append(FixedTrait::new(6817, false));
data.append(FixedTrait::new(7257, false));
data.append(FixedTrait::new(7697, false));
data.append(FixedTrait::new(8137, false));
data.append(FixedTrait::new(8576, false));
data.append(FixedTrait::new(9016, false));
data.append(FixedTrait::new(9456, false));
data.append(FixedTrait::new(9896, false));
data.append(FixedTrait::new(10336, false));
data.append(FixedTrait::new(10776, false));
data.append(FixedTrait::new(11215, false));
data.append(FixedTrait::new(11655, false));
data.append(FixedTrait::new(12095, false));
data.append(FixedTrait::new(12535, false));
data.append(FixedTrait::new(12975, false));
data.append(FixedTrait::new(13415, false));
data.append(FixedTrait::new(13854, false));
data.append(FixedTrait::new(14294, false));
data.append(FixedTrait::new(14734, false));
data.append(FixedTrait::new(15174, false));
data.append(FixedTrait::new(15614, false));
data.append(FixedTrait::new(16054, false));
data.append(FixedTrait::new(16493, false));
data.append(FixedTrait::new(16933, false));
data.append(FixedTrait::new(17373, false));
data.append(FixedTrait::new(17813, false));
data.append(FixedTrait::new(18253, false));
data.append(FixedTrait::new(18693, false));
data.append(FixedTrait::new(19132, false));
data.append(FixedTrait::new(19572, false));
data.append(FixedTrait::new(20012, false));
data.append(FixedTrait::new(20452, false));
data.append(FixedTrait::new(20892, false));
data.append(FixedTrait::new(21332, false));
data.append(FixedTrait::new(21 |
772, false));
data.append(FixedTrait::new(22211, false));
data.append(FixedTrait::new(22651, false));
data.append(FixedTrait::new(23091, false));
data.append(FixedTrait::new(23531, false));
data.append(FixedTrait::new(23971, false));
data.append(FixedTrait::new(24411, false));
data.append(FixedTrait::new(24850, false));
data.append(FixedTrait::new(25290, false));
data.append(FixedTrait::new(25730, false));
data.append(FixedTrait::new(26170, false));
data.append(FixedTrait::new(26610, false));
data.append(FixedTrait::new(27050, false));
data.append(FixedTrait::new(27489, false));
data.append(FixedTrait::new(27929, false));
data.append(FixedTrait::new(28369, false));
data.append(FixedTrait::new(28809, false));
data.append(FixedTrait::new(29249, false));
data.append(FixedTrait::new(29689, false));
data.append(FixedTrait::new(30128, false));
data.append(FixedTrait::new(30568, false));
data.append(FixedTrait::new(31008, false));
data.append(FixedTrait::new(31448, false));
data.append(FixedTrait::new(31888, false));
data.append(FixedTrait::new(32328, false));
data.append(FixedTrait::new(32768, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
use array::ArrayTrait;
use orion::operators::tensor::{FP16x16Tensor, TensorTrait, Tensor};
use orion::numbers::{FixedTrait, FP16x16, FP16x16Impl};
fn Y_values() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(150);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(265399, false));
data.append(FixedTrait::new(262117, false));
data.append(FixedTrait::new(268148, false));
data.append(FixedTrait::new(274764, false));
data.append(FixedTrait::new(264128, false));
data.append(FixedTrait::new(265007, false));
data.append(FixedTrait::new(277771, false));
data.append(FixedTrait::new(273331, false));
data.append(FixedTrait::new(266104, false));
data.append(FixedTrait::new(273616, false));
data.append(FixedTrait::new(267903, false));
data.append(FixedTrait::new(268768, false));
data.append(FixedTrait::new(274285, false));
data.append(FixedTrait::new(261040, false));
data.append(FixedTrait::new(263155, false));
data.append(FixedTrait::new(271654, false));
data.append(FixedTrait::new(269581, false));
data.append(FixedTrait::new(279157, false));
data.append(FixedTrait::new(272027, false));
data.append(FixedTrait::new(269602, false));
data.append(FixedTrait::new(289342, false));
data.append(FixedTrait::new(279137, false));
data.append(FixedTrait::new(281939, false));
data.append(FixedTrait::new(273039, false));
data.append(FixedTrait::new(279688, false));
data.append(FixedTrait::new(284862, false));
data.append(FixedTrait::new(277472, false));
data.append(FixedTrait::new(288357, false));
data.append(FixedTrait::new(282838, false));
data.append(FixedTrait::new(285743, false));
data.append(FixedTrait::new(284590, false));
data.append(FixedTrait::new(301553, false));
data.append(FixedTrait::new(290205, false));
data.append(FixedTrait::new(284241, false));
data.append(FixedTrait::new(297443, false));
data.append(FixedTrait::new(284931, false)); |
data.append(FixedTrait::new(295181, false));
data.append(FixedTrait::new(281849, false));
data.append(FixedTrait::new(286867, false));
data.append(FixedTrait::new(297741, false));
data.append(FixedTrait::new(302170, false));
data.append(FixedTrait::new(299333, false));
data.append(FixedTrait::new(298332, false));
data.append(FixedTrait::new(297996, false));
data.append(FixedTrait::new(291160, false));
data.append(FixedTrait::new(297011, false));
data.append(FixedTrait::new(299590, false));
data.append(FixedTrait::new(310416, false));
data.append(FixedTrait::new(306620, false));
data.append(FixedTrait::new(293693, false));
data.append(FixedTrait::new(308251, false));
data.append(FixedTrait::new(304483, false));
data.append(FixedTrait::new(303450, false));
data.append(FixedTrait::new(312775, false));
data.append(FixedTrait::new(316403, false));
data.append(FixedTrait::new(316629, false));
data.append(FixedTrait::new(305906, false));
data.append(FixedTrait::new(310259, false));
data.append(FixedTrait::new(315336, false));
data.append(FixedTrait::new(320438, false));
data.append(FixedTrait::new(311784, false));
data.append(FixedTrait::new(314587, false));
data.append(FixedTrait::new(309433, false));
data.append(FixedTrait::new(309724, false));
data.append(FixedTrait::new(323768, false));
data.append(FixedTrait::new(328211, false));
data.append(FixedTrait::new(319730, false));
data.append(FixedTrait::new(327659, false));
data.append(FixedTrait::new(324332, false));
data.append(FixedTrait::new(318613, false));
data.append(FixedTrait::new(326089, false));
data.append(FixedTrait::new(334680, false));
data.append(FixedTrait::new(325246, false));
data.append(FixedTrait::new(336614, false));
data.append(FixedTrait::new(310071, false));
data.append(FixedTrait::new(333506, false));
data.append(FixedTrait::new(329569, false));
data.append(FixedTrait::new(327919, |
false));
data.append(FixedTrait::new(331360, false));
data.append(FixedTrait::new(318612, false));
data.append(FixedTrait::new(331078, false));
data.append(FixedTrait::new(335738, false));
data.append(FixedTrait::new(343963, false));
data.append(FixedTrait::new(331760, false));
data.append(FixedTrait::new(330738, false));
data.append(FixedTrait::new(333628, false));
data.append(FixedTrait::new(343795, false));
data.append(FixedTrait::new(340830, false));
data.append(FixedTrait::new(336083, false));
data.append(FixedTrait::new(343799, false));
data.append(FixedTrait::new(341951, false));
data.append(FixedTrait::new(348542, false));
data.append(FixedTrait::new(338473, false));
data.append(FixedTrait::new(341806, false));
data.append(FixedTrait::new(342263, false));
data.append(FixedTrait::new(336122, false));
data.append(FixedTrait::new(348533, false));
data.append(FixedTrait::new(349183, false));
data.append(FixedTrait::new(348385, false));
data.append(FixedTrait::new(347694, false));
data.append(FixedTrait::new(340836, false));
data.append(FixedTrait::new(348234, false));
data.append(FixedTrait::new(349625, false));
data.append(FixedTrait::new(347493, false));
data.append(FixedTrait::new(352573, false));
data.append(FixedTrait::new(357158, false));
data.append(FixedTrait::new(367751, false));
data.append(FixedTrait::new(357413, false));
data.append(FixedTrait::new(358837, false));
data.append(FixedTrait::new(357540, false));
data.append(FixedTrait::new(346333, false));
data.append(FixedTrait::new(359614, false));
data.append(FixedTrait::new(361062, false));
data.append(FixedTrait::new(377690, false));
data.append(FixedTrait::new(361166, false));
data.append(FixedTrait::new(365283, false));
data.append(FixedTrait::new(363959, false));
data.append(FixedTrait::new(357407, false));
data.append(FixedTrait::new(373435, false));
data.append(FixedTrait::ne |
w(371753, false));
data.append(FixedTrait::new(372889, false));
data.append(FixedTrait::new(362625, false));
data.append(FixedTrait::new(378658, false));
data.append(FixedTrait::new(361157, false));
data.append(FixedTrait::new(375070, false));
data.append(FixedTrait::new(386459, false));
data.append(FixedTrait::new(366491, false));
data.append(FixedTrait::new(370151, false));
data.append(FixedTrait::new(375395, false));
data.append(FixedTrait::new(372322, false));
data.append(FixedTrait::new(366339, false));
data.append(FixedTrait::new(377831, false));
data.append(FixedTrait::new(371299, false));
data.append(FixedTrait::new(382244, false));
data.append(FixedTrait::new(373995, false));
data.append(FixedTrait::new(391058, false));
data.append(FixedTrait::new(376647, false));
data.append(FixedTrait::new(380549, false));
data.append(FixedTrait::new(388871, false));
data.append(FixedTrait::new(376352, false));
data.append(FixedTrait::new(386789, false));
data.append(FixedTrait::new(394745, false));
data.append(FixedTrait::new(376523, false));
data.append(FixedTrait::new(389147, false));
data.append(FixedTrait::new(390520, false));
data.append(FixedTrait::new(394821, false));
data.append(FixedTrait::new(382470, false));
data.append(FixedTrait::new(382802, false));
data.append(FixedTrait::new(395756, false));
data.append(FixedTrait::new(395162, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
mod generated;
mod test;
mod lin_reg_func; |
use orion::operators::tensor::{Tensor, TensorTrait, FP16x16Tensor};
use orion::numbers::{FP16x16, FixedTrait};
fn calculate_mean(tensor_data: Tensor<FP16x16>) -> FP16x16 {
let tensor_size = FixedTrait::<FP16x16>::new_unscaled(tensor_data.data.len(), false);
let cumulated_sum = tensor_data.cumsum(0, Option::None(()), Option::None(()));
let sum_result = cumulated_sum.data[tensor_data.data.len() - 1];
let mean = *sum_result / tensor_size;
return mean;
}
fn deviation_from_mean(tensor_data: Tensor<FP16x16>) -> Tensor<FP16x16> {
let mean_value = calculate_mean(tensor_data);
let mut tensor_shape = array::ArrayTrait::new();
tensor_shape.append(tensor_data.data.len());
let mut deviation_values = array::ArrayTrait::new();
let mut i: u32 = 0;
loop {
if i >= tensor_data.data.len() {
break ();
}
let distance_from_mean = *tensor_data.data.at(i) - mean_value;
deviation_values.append(distance_from_mean);
i += 1;
};
let distance_from_mean_tensor = TensorTrait::<FP16x16>::new(
tensor_shape.span(), deviation_values.span()
);
return distance_from_mean_tensor;
}
fn compute_beta(x_values: Tensor<FP16x16>, y_values: Tensor<FP16x16>) -> FP16x16 {
let x_deviation = deviation_from_mean(x_values);
let y_deviation = deviation_from_mean(y_values);
let x_y_covariance = x_deviation.matmul(@y_deviation);
let x_variance = x_deviation.matmul(@x_deviation);
let beta_value = *x_y_covariance.data.at(0) / *x_variance.data.at(0);
return beta_value;
}
fn compute_intercept(
beta_value: FP16x16, x_values: Tensor<FP16x16>, y_values: Tensor<FP16x16>
) -> FP16x16 {
let x_mean = calculate_mean(x_values);
let y_mean = calculate_mean(y_values);
let mx = beta_value * x_mean;
let intercept = y_mean - mx;
return intercept;
}
fn predict_y_values(
beta_value: FP16x16, x_values: Tensor<FP16x16>, y_values: Tensor |
<FP16x16>
) -> Tensor<FP16x16> {
let beta = compute_beta(x_values, y_values);
let intercept = compute_intercept(beta_value, x_values, y_values);
let mut y_pred_shape = array::ArrayTrait::new();
y_pred_shape.append(y_values.data.len());
let mut y_pred_vals = array::ArrayTrait::new();
let mut i: u32 = 0;
loop {
if i >= y_values.data.len() {
break ();
}
let predicted_value = beta * *x_values.data.at(i) + intercept;
y_pred_vals.append(predicted_value);
i += 1;
};
let y_pred_tensor = TensorTrait::<FP16x16>::new(y_pred_shape.span(), y_pred_vals.span());
return y_pred_tensor;
}
fn compute_mse(y_values: Tensor<FP16x16>, y_pred_values: Tensor<FP16x16>) -> FP16x16 {
let mut squared_diff_shape = array::ArrayTrait::new();
squared_diff_shape.append(y_values.data.len());
let mut squared_diff_vals = array::ArrayTrait::new();
let mut i: u32 = 0;
loop {
if i >= y_values.data.len() {
break ();
}
let diff = *y_values.data.at(i) - *y_pred_values.data.at(i);
let squared_diff = diff * diff;
squared_diff_vals.append(squared_diff);
i += 1;
};
let squared_diff_tensor = TensorTrait::<FP16x16>::new(
squared_diff_shape.span(), squared_diff_vals.span()
);
let mse = calculate_mean(squared_diff_tensor);
return mse;
}
fn calculate_r_score(y_values: Tensor<FP16x16>, y_pred_values: Tensor<FP16x16>) -> FP16x16 {
let mean_y_value = calculate_mean(y_values);
let mut squared_diff_shape = array::ArrayTrait::new();
squared_diff_shape.append(y_values.data.len());
let mut squared_diff_vals = array::ArrayTrait::new();
let mut squared_mean_diff_shape = array::ArrayTrait::new();
squared_mean_diff_shape.append(y_values.data.len());
let mut squared_mean_diff_vals = array::ArrayTrait::new();
let mut i: u32 = 0;
loop {
if |
i >= y_values.data.len() {
break ();
}
let diff_pred = *y_values.data.at(i) - *y_pred_values.data.at(i);
let squared_diff = diff_pred * diff_pred;
squared_diff_vals.append(squared_diff);
let diff_mean = *y_values.data.at(i) - mean_y_value;
let squared_mean_diff = diff_mean * diff_mean;
squared_mean_diff_vals.append(squared_mean_diff);
i += 1;
};
let squared_diff_tensor = TensorTrait::<FP16x16>::new(
squared_diff_shape.span(), squared_diff_vals.span()
);
let squared_mean_diff_tensor = TensorTrait::<FP16x16>::new(
squared_mean_diff_shape.span(), squared_mean_diff_vals.span()
);
let sum_squared_diff = squared_diff_tensor.cumsum(0, Option::None(()), Option::None(()));
let sum_squared_mean_diff = squared_mean_diff_tensor
.cumsum(0, Option::None(()), Option::None(()));
let r_score = FixedTrait::new_unscaled(1, false)
- *sum_squared_diff.data.at(y_values.data.len() - 1)
/ *sum_squared_mean_diff.data.at(y_values.data.len() - 1);
return r_score;
} |
use debug::PrintTrait;
use verifiable_linear_regression::generated::X_values::X_values;
use verifiable_linear_regression::generated::Y_values::Y_values;
use verifiable_linear_regression::lin_reg_func::{
calculate_mean, deviation_from_mean, compute_beta, compute_intercept, predict_y_values,
compute_mse, calculate_r_score
};
#[test]
#[available_gas(99999999999999999)]
fn linear_regression_test() {
// Fetching the x and y values
let y_values = Y_values();
let x_values = X_values();
// (*x_values.data.at(18)).print();
let beta_value = compute_beta(x_values, y_values);
// beta_value.print(); // calculated gradient value
let intercept_value = compute_intercept(beta_value, x_values, y_values);
// intercept_value.print(); // calculated intercept value
let y_pred = predict_y_values(beta_value, x_values, y_values);
let mse = compute_mse(y_values, y_pred);
// mse.print(); // mean squared error ouput
let r_score = calculate_r_score(y_values, y_pred);
r_score.print(); // accuracy of model around 0.97494506835
assert(beta_value.mag > 0, 'x & y not positively correlated');
assert(r_score.mag > 0, 'R-Squared needs to be above 0');
assert(
r_score.mag < 65536, 'R-Squared has to be below 65536'
); // 65536 represents ONE in fp16x16.
assert(r_score.mag > 32768, 'Accuracy below 50% ');
}
|
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n",
"The Principal Component Analysis (PCA) method is an unsupervised learning algorithm that aims to reduce the dimensionality of a dataset consisting of a large number of interrelated variables, while at the same time preserving as much of the variation present in the original dataset as possible. This is achieved by transforming to a new set of variables, the principal components (PC), which are uncorrelated and are ordered in such a way that the first ones retain most of the variation present in all the original variables. More formally, with PCA, given \n",
"$n$ observations of $p$ variables, it seeks the possibility of adequately representing this information with a smaller number of variables, constructed as linear combinations of the original variables."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Below, we provide a brief review of the implementation of a Principal Component Analysis (PCA) in Python, which we will then convert to Cairo to transform it into a verifiable ZKML (Principal Component Analysis), using the Orion library. This provides an opportunity to become familiar with the main functions and operators that the framework offers for the implementation of PCA."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the purposes of this tutorial, we will use the iris dataset from sklearn.datasets."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
" |
import numpy as np\n",
" |
import math\n",
" |
import matplotlib.pyplot as plt\n",
" |
import pandas as pd\n",
"from sklearn.datasets |
import load_iris"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"
"data = load_iris()\n",
"X = data['data']\n",
"\n",
"y = data['target']"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the purposes of this tutorial, we will not take into account the total number of records in the original dataset. In this sense, we will only focus on the first 105 individuals and the first 3 variables, in order to have comparable results between the python and cairo implementations, taking into consideration the same number of iterations in both programs to achieve orthogonality between the components at the computational level. Therefore, we will have the total number of individuals of the species versicolor and virginica, partially the individuals of the species setosa and with the exclusion of the variable petal width."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"X, y = X[:105,0:3], y[:105]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Before applying PCA, it is important to standardize the data. This ensures that each feature has an equal weight in the calculation of principal components."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"mu = np.mean(X, axis=0)\n",
"sigma = np.std(X, axis=0)\n",
"X_std = (X - mu)/sigma"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We print the first 20 rows"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[-0.62902748, 0.86993347, -1.04142584],\n",
" [-0.93199944, -0.19965686, -1.04142584],\n",
" [-1.23497139, 0.22817927, -1.10670925],\n",
" [-1.38645737, 0.0142612 , -0.97614243],\n",
" |
[-0.78051346, 1.08385154, -1.04142584],\n",
" [-0.17456955, 1.72560573, -0.84557561],\n",
" [-1.38645737, 0.6560154 , -1.04142584],\n",
" [-0.78051346, 0.6560154 , -0.97614243],\n",
" [-1.68942932, -0.41357493, -1.04142584],\n",
" [-0.93199944, 0.0142612 , -0.97614243],\n",
" [-0.17456955, 1.2977696 , -0.97614243],\n",
" [-1.08348542, 0.6560154 , -0.91085902],\n",
" [-1.08348542, -0.19965686, -1.04142584],\n",
" [-1.8409153 , -0.19965686, -1.23727607],\n",
" [ 0.43137435, 1.9395238 , -1.17199266],\n",
" [ 0.27988838, 2.79519606, -0.97614243],\n",
" [-0.17456955, 1.72560573, -1.10670925],\n",
" [-0.62902748, 0.86993347, -1.04142584],\n",
" [ 0.27988838, 1.51168767, -0.84557561],\n",
" [-0.62902748, 1.51168767, -0.97614243]])"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_std[0:20,:]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The Jacobi algorithm is an iterative method for finding the eigenvalues and eigenvectors of a symmetric matrix, which in our case is the correlation matrix over $\\mathbf{X_{std}}$. With this method, the aim is to identify pairs of elements off the main diagonal of the matrix and \"rotate\" them to zero using orthogonal transformations. The idea is that, after enough rotations, the matrix will converge to a diagonal matrix whose diagonal elements will be the eigenvalues of the original matrix. The eigenvectors, on the other hand, are constructed from the rotation matrices applied during the process."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def extract_diagonal(matrix):\n",
" return [matrix[i][i] |
for i in range(len(matrix))]"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def find_max_off_diag(A):\n",
" n = A.shape[0]\n",
" p, q = 0, 1\n",
" max_val = abs(A[p, q])\n",
" for i in range(n-1):\n",
" for j in range(i+1, n):\n",
" if abs(A[i, j]) > max_val:\n",
" max_val = abs(A[i, j])\n",
" p, q = i, j\n",
" return p, q\n",
"\n",
"def jacobi_eigensystem(A, tol=1e-2, max_iter=500): \n",
" if len(A.shape) != 2 or A.shape[0] != A.shape[1]:\n",
" raise ValueError(\"A must be a square matrix\")\n",
" \n",
" n = A.shape[0]\n",
" V = np.eye(n)\n",
"\n",
" for _ in range(max_iter):\n",
" p, q = find_max_off_diag(A)\n",
" \n",
" if abs(A[p, q]) < tol:\n",
" break\n",
" \n",
" if A[p, p] == A[q, q]:\n",
" theta = math.pi/4\n",
" else:\n",
" theta = 0.5 * math.atan(2 * A[p, q] / (A[p, p] - A[q, q]))\n",
" \n",
" J = np.eye(n)\n",
" J[p, p], J[q, q] = math.cos(theta), math.cos(theta)\n",
" J[p, q], J[q, p] = math.sin(theta), -math.sin(theta)\n",
" \n",
" A = np.matmul(np.matmul(J.T,A),J)\n",
" V = np.matmul(V,J)\n",
"\n",
" return extract_diagonal(A), V"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"At this point, we determine the correlation matrix, based on the calculations of the variance and covariance matrix for **$\\mathbf{X_{std}}$** :\n",
"\n",
"We compute the covariance matrix\n",
"\n",
" $\\mathbf{S}$ = $\\mathbf{X}^{T}$ $\\mathbf{X}$ $/\\mathbf{n-1}$ \n",
"\n",
"Then, we determine the correlation matrix :\n",
"\n",
"$\\mathbf{r}$ = $\\mathb |
f{Cov(X,Y)}$ $/\\mathbf{S_{X} S_{Y}}$,\n",
"\n",
"where $\\mathbf{S_{X} S_{Y}}$ are the standard deviations of X and Y respectively. "
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"n = (X_std.shape[0]-1) \n",
"cov_matrix = np.dot(X_std.T,X_std)/n \n",
"stddevs = np.sqrt(extract_diagonal(cov_matrix))"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 1.00961538, -0.20656485, 0.83469099],\n",
" [-0.20656485, 1.00961538, -0.57369635],\n",
" [ 0.83469099, -0.57369635, 1.00961538]])"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"cov_matrix"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"corr_matrix = cov_matrix / np.matmul(stddevs.reshape(-1, 1),stddevs.reshape(1, -1))"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 1. , -0.20459756, 0.82674155],\n",
" [-0.20459756, 1. , -0.56823258],\n",
" [ 0.82674155, -0.56823258, 1. ]])"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"corr_matrix"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"evalu, evec = jacobi_eigensystem(corr_matrix)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"At this point, we have the eigenvalues and eigenvectors associated with the correlation matrix. Now, we sort the eigenvalues in decreasing order, as the largest of these will be associated with the component that explains the most variability in the data. Consequently, |
the principal components will be sorted in the same order as the eigenvalues and eigenvectors."
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"idx = np.argsort(evalu)[::-1]\n",
"evec = evec[:,idx]\n",
"evalu = np.sort(evalu)[::-1]"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(array([2.10545934, 0.81005256, 0.0844881 ]),\n",
" array([[-0.58699831, 0.55819468, -0.58638867],\n",
" [ 0.45714577, 0.82631669, 0.32896575],\n",
" [-0.66816968, 0.07496276, 0.74022284]]))"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"evalu, evec"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Once the aforementioned order is established, we find the loadings which are represented by the discovered eigenvectors (evec). These loadings represent the coefficients of each variable in each of the principal components."
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>PC1</th>\n",
" <th>PC2</th>\n",
" <th>PC3</th>\n",
" </tr>\n", |
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>sepal length (cm)</th>\n",
" <td>-0.586998</td>\n",
" <td>0.558195</td>\n",
" <td>-0.586389</td>\n",
" </tr>\n",
" <tr>\n",
" <th>sepal width (cm)</th>\n",
" <td>0.457146</td>\n",
" <td>0.826317</td>\n",
" <td>0.328966</td>\n",
" </tr>\n",
" <tr>\n",
" <th>petal length (cm)</th>\n",
" <td>-0.668170</td>\n",
" <td>0.074963</td>\n",
" <td>0.740223</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" PC1 PC2 PC3\n",
"sepal length (cm) -0.586998 0.558195 -0.586389\n",
"sepal width (cm) 0.457146 0.826317 0.328966\n",
"petal length (cm) -0.668170 0.074963 0.740223"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"loadings = pd.DataFrame(evec,columns=['PC1','PC2','PC3'], index = data['feature_names'][:3])\n",
"loadings"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, we identify the new axes or principal components, which are obtained as a linear combination of the standardized original variables."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"$\\mathbf{PC_{i}}$ = $\\mathbf{a_{i1}X_1} + \\mathbf{a_{i2}X_2} + \\mathbf{...} + \\mathbf{a_{in}X_n}$,\n",
"\n",
"Where,\n",
"\n",
"$\\mathbf{X_1}, \\mathbf{X_2}, \\mathbf{...} + \\mathbf{X_n}$ are the standardized original variables.\n",
"\n",
"$\\mathbf{a_{i1}}, \\mathbf{a_{i2}}, \\mathbf{...} + \\mathbf{a_{in}}$ are the coefficients or loadings of the $\\mathbf{i-th}$ eigenvector."
]
},
{
"cell_type": "code", |
"execution_count": 16,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>principal component 1</th>\n",
" <th>principal component 2</th>\n",
" <th>principal component 3</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.462774</td>\n",
" <td>0.289653</td>\n",
" <td>-0.115854</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>1.151659</td>\n",
" <td>-0.763285</td>\n",
" <td>-0.290054</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1.568707</td>\n",
" <td>-0.583768</td>\n",
" <td>-0.019975</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>1.472596</td>\n",
" <td>-0.835303</td>\n",
" <td>0.095131</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>1.649487</td>\n",
" <td>0.381858</td>\n",
" <td>0.043347</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>100</th>\n",
" <td>-1.806412</td>\n",
" |
<td>1.175944</td>\n",
" <td>0.900363</td>\n",
" </tr>\n",
" <tr>\n",
" <th>101</th>\n",
" <td>-1.555969</td>\n",
" <td>-0.351478</td>\n",
" <td>0.487362</td>\n",
" </tr>\n",
" <tr>\n",
" <th>102</th>\n",
" <td>-2.767543</td>\n",
" <td>1.317228</td>\n",
" <td>-0.069714</td>\n",
" </tr>\n",
" <tr>\n",
" <th>103</th>\n",
" <td>-2.023098</td>\n",
" <td>0.449313</td>\n",
" <td>0.425579</td>\n",
" </tr>\n",
" <tr>\n",
" <th>104</th>\n",
" <td>-2.190391</td>\n",
" <td>0.804982</td>\n",
" <td>0.414940</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>105 rows × 3 columns</p>\n",
"</div>"
],
"text/plain": [
" principal component 1 principal component 2 principal component 3\n",
"0 1.462774 0.289653 -0.115854\n",
"1 1.151659 -0.763285 -0.290054\n",
"2 1.568707 -0.583768 -0.019975\n",
"3 1.472596 -0.835303 0.095131\n",
"4 1.649487 0.381858 0.043347\n",
".. ... ... ...\n",
"100 -1.806412 1.175944 0.900363\n",
"101 -1.555969 -0.351478 0.487362\n",
"102 -2.767543 1.317228 -0.069714\n",
"103 -2.023098 0.449313 0.425579\n",
"104 -2.190391 0.804982 0.414940\n",
"\n",
"[105 rows x 3 columns]"
]
},
"execution_count |
": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"principalDf = pd.DataFrame(np.dot(X_std,loadings))\n",
"principalDf.columns = [\"principal component {}\".format(i+1) for i in range(principalDf.shape[1])]\n",
"principalDf"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next, we validate the orthogonality between the principal components, as we observe the lack of correlation between these new variables (Principal Components)."
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
"n = principalDf.shape[0] - 1\n",
"cov_new = np.dot(np.array(principalDf).T,np.array(principalDf))/n \n",
"stddevs = np.sqrt(extract_diagonal(cov_new))\n",
"corr_new = cov_new / np.matmul(stddevs.reshape(-1, 1),stddevs.reshape(1, -1))"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 1. , -0.002632 , -0.01975951],\n",
" [-0.002632 , 1. , 0.0200618 ],\n",
" [-0.01975951, 0.0200618 , 1. ]])"
]
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"corr_new"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n" |
,
" <th>0</th>\n",
" <th>1</th>\n",
" <th>2</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0.0</td>\n",
" <td>1.0</td>\n",
" <td>0.0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>1.0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" 0 1 2\n",
"0 1.0 0.0 0.0\n",
"1 0.0 1.0 0.0\n",
"2 0.0 0.0 1.0"
]
},
"execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"new_corr = round(abs(pd.DataFrame(corr_new)))\n",
"new_corr"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The implementation of Jacobi in Python is carried out considering 500 iterations, in order to optimize its implementation at the Cairo level. That is why rounding is applied when checking for orthogonality."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Each principal component accounts for a proportion of the total variance, and such proportion can be determined by the ratio of each eigenvalue to the total sum of all eigenvalues. Thus, the percentage of variance explained by the i-th component is given by:\n",
"\n",
"$\\frac{\\lambda_i}{\\sum_{j=1}^{p} \\lambda_j}$"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG |
90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABgM0lEQVR4nO3deVhUZf8G8HsYmIFhGUB2RXDFlUUN0iw1SVwybTHFUvNVK99Mjcqkn3slZmmuZYumLW6lLW8aariUS2oCbrmL4sKuMCyyzTy/P8ypCUQGB88s9+e65oI585wz9zMDzJdznvMcmRBCgIiIiMiG2EkdgIiIiOheYwFERERENocFEBEREdkcFkBERERkc1gAERERkc1hAUREREQ2hwUQERER2Rx7qQOYI51Oh6tXr8LV1RUymUzqOERERFQLQggUFhYiICAAdnY17+NhAVSNq1evIjAwUOoYREREVAeXLl1Co0aNamzDAqgarq6uAG6+gG5ubhKnISIiotrQaDQIDAzUf47XhAVQNW4d9nJzc2MBREREZGFqM3yFg6CJiIjI5rAAIiIiIpvDAoiIiIhsDgsgIiIisjksgIiIiMjmsAAiIiIim8MCiIiIiGwOCyAiIiKyOSyAiIiIyOawACIiIiKbI2kB9Ouvv6J
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.plot(np.cumsum(evalu)/np.sum(evalu))\n",
"plt.xlabel('number of components')\n",
"plt.ylabel('cumulative explained variance');"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"97.0"
]
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"select_pc = round(((evalu)/np.sum(evalu))[:2].sum(),2)*100\n",
"select_pc"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"As observed in the previous graph, we decided to keep the **first 2 components**, which explain **97%** of the total variability of the data."
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArcAAALLCAYAAADjfarNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAACpVklEQVR4nOzdeViU9fo/8PcsLKM4Km6Iu2loRWqRaSNoddyPIRyz0k6lHn8HwSywo/mttDqV1VFoBSpTKzUtCxHXzPTEkJZrRzO13HBBMjVHdIRh5vn98TQjMCvDrM+8X9fFBcx8ZuaGh+Weez7PfcsEQRBARERERCQBcn8HQERERETkKUxuiYiIiEgymNwSERERkWQwuSUiIiIiyWByS0RERESSweSWiIiIiCSDyS0RERERSQaTWyIiIiKS |
DCa3RERERCQZTG4pZC1evBgymQwymQzHjx/3dzhERF5z/vx5PPXUU+jZsydUKpXlb98bb7zh79CIPI7JLQW9rVu3Wv5Qu/K2ePFif4dMXnLlyhV8+OGHGDt2LLp3745mzZohPDwcrVu3Rv/+/ZGVlYXt27f7O0win7p06RL69++P+fPn4+DBg7h27Zpb9/P888/b/Jsql8uhVqvRs2dPTJgwAd9++63L9/nzzz/jhRdeQFJSEjp06ACVSoWoqCh06tQJo0aNQnZ2Ns6ePevy/X300UeWuBQKBU6dOuXOl0rBTiAKclu2bBEAuPy2aNEiQRAEYdGiRZbLjh075rf4H330UQGA0KlTJ4/cX6dOnQQAwqOPPuqR+wsW+fn5QuvWrV36GbjjjjuEkpISf4dMASpQ/jZ4yssvv2z5embMmCEUFxcL+/btE/bt2yf8/vvvLt/PnDlzXP47+9hjjwnV1dV27+v8+fPC3
"text/plain": [
"<Figure size 800x800 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig = plt.figure(figsize = (8,8))\n",
"ax = fig.add_subplot(1,1,1) \n",
"ax.set_xlabel('Principal Component 1', fontsize = 15)\n",
"ax.set_ylabel('Principal Component 2', fontsize = 15)\n",
"ax.set_title('First two Components of PCA', fontsize = 20)\n",
"\n",
"targets = [0, 1, 2]\n",
"names = ['setosa', 'versicolor','virginica']\n",
"colors = ['r', 'g', 'b'] \n",
"for target, color, name in zip(targets, colors, names):\n",
" indicesToKeep = y == target\n",
" ax.scatter(principalDf.loc[indicesToKeep, 'principal component 1']\n",
" , principalDf.loc[indicesToKeep, 'principal component 2']\n",
" , c = color\n",
" , s = 50)\n",
"ax.legend(names)\n",
"ax.grid()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Based on what is observed in the graph of the first 2 principal components, we notice how the setosa species differentiates from the versicolor and virginica species in principal component 1, which is attributed to the variables petal length (cm), and sepal length (cm)."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Among its other applications, here we were able to use PCA to describe a dataset in a dimension smaller than that of the original dataset. As previously discussed, we noticed how we can |
describe interesting aspects of the original data without the need to address separately all the dimensions of such data."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
"\n",
"Now let's generate Cairo files for each tensor in the object."
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [],
"source": [
"def decimal_to_fp16x16(num):\n",
"\n",
" whole_num = int(num)\n",
" fractional_part = int((num - whole_num) * 65536)\n",
" fp_number = (whole_num << 16) + fractional_part\n",
" return fp_number"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [],
"source": [
" |
import os"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"current_directory = os.getcwd()\n",
"parent_directory = os.path.dirname(current_directory)\n",
"new_directory_path = os.path.join(parent_directory, \"src/generated\")"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"os.makedirs('src/generated', exist_ok=True) "
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [],
"source": [
"tensor_name = [\"X\",\"X_std\",\"y\"]\n",
"\n",
"def generate_cairo_files(data, name):\n",
"\n",
" with open(os.path.join('src', 'generated', f\"{name}.cairo\"), \"w\") as f:\n",
" f.write(\n",
" \"use array::{ArrayTrait, SpanTrait};\\n\" +\n",
" \"use orion::operators::tensor::{core::{Tensor, TensorTrait}};\\n\" +\n",
" \"use orion::operators::tensor::FP16x16Tensor;\\n\" +\n",
" \"use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};\\n\" +\n",
" \"\\n\" + f\"fn {name}() -> Tensor<FP16x16>\" + \"{\\n\\n\" + \n",
" \"let mut shape = ArrayTrait::new();\\n\"\n",
" )\n",
" for dim in data.shape:\n",
" f.write(f\"shape.append({dim});\\n\")\n",
" \n",
" f.write(\"let mut data = ArrayTrait::new();\\n\")\n",
" for val in np.nditer(data.flatten()):\n",
" f.write(f\"data.append(FixedTrait::new({abs(int(decimal_to_fp16x16(val)))}, {str(val < 0).lower()}));\\n\")\n",
" f.write(\n",
" \"let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());\\n\" +\n",
" \"return tensor;\\n}\"\n",
" )\n",
"\n",
"with open(f\"src/generated.cairo\", \"w\") as f:\n",
" for n in tensor_name:\n",
" f.write(f\"mod {n};\\n\")\n",
"\n", |
"generate_cairo_files(X, \"X\")\n",
"generate_cairo_files(X_std, \"X_std\")\n",
"generate_cairo_files(y, \"y\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"<span style='color:Black'> **Convert some hyperparameters to FP16x16** </span>"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [],
"source": [
"tol=1e-2\n",
"max_iter=500"
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"655\n",
"32768000\n"
]
}
],
"source": [
"print(decimal_to_fp16x16(tol))\n",
"print(decimal_to_fp16x16(max_iter))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"<span style='color:Black'> **Get an estimate for the first two values of eigenvalus and first eigenvector in FP16x16** </span>"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"First Eigenvalue: 137983\n",
"Second Eigenvalue: 53087\n",
"First Eigenvector: [-38469 36581 -38429]\n"
]
}
],
"source": [
"evec_1 = np.array([decimal_to_fp16x16(evec[0][0]),\n",
" decimal_to_fp16x16(evec[0][1]),\n",
" decimal_to_fp16x16(evec[0][2])])\n",
"\n",
"print(\"First Eigenvalue: {}\".format(decimal_to_fp16x16(evalu[0])))\n",
"print(\"Second Eigenvalue: {}\".format(decimal_to_fp16x16(evalu[1])))\n",
"\n",
"print(\"First Eigenvector: {}\".format(evec_1))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the implementation of PCA in Cairo with Orion, please visit the Convert your model section within the **Verifiable Principal Componentes Analysis tutorial**"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python", |
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
} |
mod X_std;
mod X;
mod y;
mod evalu_sort;
mod evec_sort;
|
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{core::{Tensor, TensorTrait}};
use orion::operators::tensor::FP16x16Tensor;
use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};
fn X() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(105);
shape.append(3);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(235929, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(255590, false));
data.append(FixedTrait::new(111411, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(242483, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(314572, false));
data.app |
end(FixedTrait::new(222822, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(281804, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(72089, false));
data.append(FixedTrait::new(380108, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(78643, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(255590, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(249036, false));
data.append(FixedTrait::new(111411, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(249036, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(111411, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(242483, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(235929, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(111411, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(124518, false));
data.append(FixedTrait::new(327680, false));
data.ap |
pend(FixedTrait::new(196608, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(340787, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(340787, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(340787, false));
data.append(FixedTrait::new(268697, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(360448, false));
data.append(FixedTrait::new(275251, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(78643, false));
data.append(FixedTrait::new(360448, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(235929, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(334233, false));
data.ap |
pend(FixedTrait::new(222822, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(150732, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(85196, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(249036, false));
data.append(FixedTrait::new(124518, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(249036, false));
data.append(FixedTrait::new(104857, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(347340, false));
data.append(FixedTrait::new(242483, false));
data.append(FixedTrait::new(98304, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(91750, false));
data.append(FixedTrait::new(458752, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(419430, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(452198, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(360448, false));
data. |
append(FixedTrait::new(150732, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(425984, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(412876, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(157286, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(432537, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(340787, false));
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(255590, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(131072, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(386662, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(275251, false));
data.append(FixedTrait::new(393216, false));
data.append(FixedTrait::new(144179, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(399769, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(235929, false));
data.append(FixedTrait::new(439091, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(380108, false)); |
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(268697, false));
data.append(FixedTrait::new(406323, false));
data.append(FixedTrait::new(144179, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(163840, false));
data.append(FixedTrait::new(255590, false));
data.append(FixedTrait::new(386662, false));
data.append(FixedTrait::new(209715, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(399769, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(412876, false));
data.append(FixedTrait::new(163840, false));
data.append(FixedTrait::new(321126, false));
data.append(FixedTrait::new(399769, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(419430, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(281804, false));
data.append(FixedTrait::new(432537, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(445644, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(314572, false));
data.append(FixedTrait::new(439091, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(393216, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(170393, false));
data.append(FixedTrait::new(229376, false));
data.append(FixedTrait::new(360448, false));
data.append(FixedTrait::new(157286, false));
data.append(FixedTrait::new(249036, false));
data.append(FixedTrait::new(360448 |
, false));
data.append(FixedTrait::new(157286, false));
data.append(FixedTrait::new(242483, false));
data.append(FixedTrait::new(380108, false));
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(255590, false));
data.append(FixedTrait::new(393216, false));
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(353894, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(393216, false));
data.append(FixedTrait::new(222822, false));
data.append(FixedTrait::new(294912, false));
data.append(FixedTrait::new(439091, false));
data.append(FixedTrait::new(203161, false));
data.append(FixedTrait::new(308019, false));
data.append(FixedTrait::new(412876, false));
data.append(FixedTrait::new(150732, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(268697, false));
data.append(FixedTrait::new(360448, false));
data.append(FixedTrait::new(163840, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(360448, false));
data.append(FixedTrait::new(170393, false));
data.append(FixedTrait::new(288358, false));
data.append(FixedTrait::new(399769, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(301465, false));
data.append(FixedTrait::new(380108, false));
data.append(FixedTrait::new(170393, false));
data.append(FixedTrait::new(262144, false));
data.append(FixedTrait::new(327680, false));
data.append(FixedTrait::new(150732, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(275251, false));
data.append(FixedTrait:: |
new(373555, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(275251, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(275251, false));
data.append(FixedTrait::new(406323, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(281804, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(163840, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(373555, false));
data.append(FixedTrait::new(183500, false));
data.append(FixedTrait::new(268697, false));
data.append(FixedTrait::new(412876, false));
data.append(FixedTrait::new(216268, false));
data.append(FixedTrait::new(393216, false));
data.append(FixedTrait::new(380108, false));
data.append(FixedTrait::new(176947, false));
data.append(FixedTrait::new(334233, false));
data.append(FixedTrait::new(465305, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(386662, false));
data.append(FixedTrait::new(412876, false));
data.append(FixedTrait::new(190054, false));
data.append(FixedTrait::new(367001, false));
data.append(FixedTrait::new(425984, false));
data.append(FixedTrait::new(196608, false));
data.append(FixedTrait::new(380108, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{core::{Tensor, TensorTrait}};
use orion::operators::tensor::FP16x16Tensor;
use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};
fn X_std() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(105);
shape.append(3);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(57011, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(61079, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(80935, true));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(90862, true));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(71031, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(113089, false));
data.append(FixedTrait::new(55415, true));
data.append(FixedTrait::new(90862, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(110718, true));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(61079, true));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(85050, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(71007, true));
data.append(FixedTrait::new(42992, false));
data.ap |
pend(FixedTrait::new(59694, true));
data.append(FixedTrait::new(71007, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(120646, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(81086, true));
data.append(FixedTrait::new(28270, false));
data.append(FixedTrait::new(127108, false));
data.append(FixedTrait::new(76807, true));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(183185, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(113089, false));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(57011, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(99069, false));
data.append(FixedTrait::new(55415, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(99069, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(55415, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(85050, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(90862, true));
data.append(FixedTrait::new(71031, false));
data.append(FixedTrait::new(85364, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(28973, false));
data.append(FixedTrait::new(55415, true));
data.append(FixedTrait::new(71007, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(46858, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(59694, true));
data.append(Fixe |
dTrait::new(51151, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(59694, true));
data.append(FixedTrait::new(31296, true));
data.append(FixedTrait::new(57011, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(31296, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(80935, true));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(59694, true));
data.append(FixedTrait::new(71007, true));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(59694, true));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(31296, true));
data.append(FixedTrait::new(141127, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(155147, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(61079, true));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(76807, true));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(57011, false));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(61079, true));
data.append(FixedTrait::new(71031, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(110718, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(57011 |
, false));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(100790, true));
data.append(FixedTrait::new(111220, true));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(110718, true));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(72529, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(57011, false));
data.append(FixedTrait::new(59694, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(99069, false));
data.append(FixedTrait::new(46858, true));
data.append(FixedTrait::new(71007, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(99069, false));
data.append(FixedTrait::new(59694, true));
data.append(FixedTrait::new(90862, true));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(21368, true));
data.append(FixedTrait::new(85050, false));
data.append(FixedTrait::new(63972, true));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(28973, false));
data.append(FixedTrait::new(68250, true));
data.append(FixedTrait::new(147403, false));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(72936, false));
data.append(FixedTrait::new(87837, false));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(137476, false));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(81493, false));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(111220, true));
data.append(FixedTrait::new(42987, false));
data.append(FixedTrait::new(97765, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(68658, fal |
se));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(77909, false));
data.append(FixedTrait::new(28973, false));
data.append(FixedTrait::new(72936, false));
data.append(FixedTrait::new(61079, true));
data.append(FixedTrait::new(97200, true));
data.append(FixedTrait::new(13038, false));
data.append(FixedTrait::new(107692, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(68658, false));
data.append(FixedTrait::new(31296, true));
data.append(FixedTrait::new(55142, true));
data.append(FixedTrait::new(38709, false));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(153278, true));
data.append(FixedTrait::new(21595, false));
data.append(FixedTrait::new(38198, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(51544, false));
data.append(FixedTrait::new(48126, false));
data.append(FixedTrait::new(125239, true));
data.append(FixedTrait::new(42987, false));
data.append(FixedTrait::new(58053, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(72936, false));
data.append(FixedTrait::new(8414, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(25874, false));
data.append(FixedTrait::new(117620, false));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(60101, false));
data.append(FixedTrait::new(8414, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(28270, false));
data.append(FixedTrait::new(55142, true));
data.append(FixedTrait::new(47266, false));
data.append(FixedTrait::new(67981, false));
data.append(FixedTrait::new(125239, true));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(8414, fa |
lse));
data.append(FixedTrait::new(83181, true));
data.append(FixedTrait::new(38709, false));
data.append(FixedTrait::new(38198, false));
data.append(FixedTrait::new(14953, false));
data.append(FixedTrait::new(77215, false));
data.append(FixedTrait::new(58053, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(42987, false));
data.append(FixedTrait::new(77909, false));
data.append(FixedTrait::new(83181, true));
data.append(FixedTrait::new(81493, false));
data.append(FixedTrait::new(58053, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(72936, false));
data.append(FixedTrait::new(87837, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(55823, false));
data.append(FixedTrait::new(107692, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(60101, false));
data.append(FixedTrait::new(127548, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(77215, false));
data.append(FixedTrait::new(117620, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(85772, false));
data.append(FixedTrait::new(48126, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(69162, true));
data.append(FixedTrait::new(21595, false));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(97200, true));
data.append(FixedTrait::new(34431, false));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(97200, true));
data.append(FixedTrait::new(30152, false));
data.append(FixedTrait::new(28270, false));
data.append(FixedTrait::new(55142, true));
data.append(FixedTrait::new(38709, false));
data.append(FixedTrait::new(48126, false));
data.append(FixedTrait::new(55142, |
true));
data.append(FixedTrait::new(90050, false));
data.append(FixedTrait::new(11440, true));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(48126, false));
data.append(FixedTrait::new(42992, false));
data.append(FixedTrait::new(64379, false));
data.append(FixedTrait::new(117620, false));
data.append(FixedTrait::new(934, false));
data.append(FixedTrait::new(72936, false));
data.append(FixedTrait::new(77909, false));
data.append(FixedTrait::new(111220, true));
data.append(FixedTrait::new(60101, false));
data.append(FixedTrait::new(8414, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(47266, false));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(83181, true));
data.append(FixedTrait::new(42987, false));
data.append(FixedTrait::new(1512, true));
data.append(FixedTrait::new(69162, true));
data.append(FixedTrait::new(60101, false));
data.append(FixedTrait::new(58053, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(68658, false));
data.append(FixedTrait::new(28270, false));
data.append(FixedTrait::new(69162, true));
data.append(FixedTrait::new(42987, false));
data.append(FixedTrait::new(51151, true));
data.append(FixedTrait::new(111220, true));
data.append(FixedTrait::new(13038, false));
data.append(FixedTrait::new(8414, false));
data.append(FixedTrait::new(55142, true));
data.append(FixedTrait::new(51544, false));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(51544, false));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(51544, false));
data.append(FixedTrait::new(67981, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(55823, fals |
e));
data.append(FixedTrait::new(41223, true));
data.append(FixedTrait::new(83181, true));
data.append(FixedTrait::new(203, false));
data.append(FixedTrait::new(18342, false));
data.append(FixedTrait::new(41123, true));
data.append(FixedTrait::new(47266, false));
data.append(FixedTrait::new(77909, false));
data.append(FixedTrait::new(28973, false));
data.append(FixedTrait::new(128556, false));
data.append(FixedTrait::new(28270, false));
data.append(FixedTrait::new(55142, true));
data.append(FixedTrait::new(90050, false));
data.append(FixedTrait::new(157331, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(124277, false));
data.append(FixedTrait::new(77909, false));
data.append(FixedTrait::new(27104, true));
data.append(FixedTrait::new(111442, false));
data.append(FixedTrait::new(97765, false));
data.append(FixedTrait::new(13084, true));
data.append(FixedTrait::new(119999, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{core::{Tensor, TensorTrait}};
use orion::operators::tensor::FP16x16Tensor;
use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};
fn evalu_sort() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(3);
let mut data = ArrayTrait::new();
// evalu Original [ 52513 137534 5393]
// evalu Sorted [137534 52513 5393]
data.append(FixedTrait::new(137534, false));
data.append(FixedTrait::new(52513, false));
data.append(FixedTrait::new(5393, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
}
|
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{core::{Tensor, TensorTrait}};
use orion::operators::tensor::FP16x16Tensor;
use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};
fn evec_sort() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(3);
shape.append(3);
// evec Original
// [[ 36422 -38024 -38467]
// [ 53777 30012 21440]
// [ 5195 -43789 48143]]
// evec Sorted
// [[-38024 36422 -38467]
// [ 30012 53777 21440]
// [-43789 5195 48143]]
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(38024, true));
data.append(FixedTrait::new(36422, false));
data.append(FixedTrait::new(38467, true));
data.append(FixedTrait::new(30012, false));
data.append(FixedTrait::new(53777, false));
data.append(FixedTrait::new(21440, false));
data.append(FixedTrait::new(43789, true));
data.append(FixedTrait::new(5195, false));
data.append(FixedTrait::new(48143, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
}
|
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{core::{Tensor, TensorTrait}};
use orion::operators::tensor::FP16x16Tensor;
use orion::numbers::fixed_point::implementations::fp16x16::core::{FP16x16, FixedTrait};
fn y() -> Tensor<FP16x16> {
let mut shape = ArrayTrait::new();
shape.append(105);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(F |
ixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(0, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false)); |
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(131072, false));
data.append(FixedTrait::new(131072, false));
data.append(FixedTrait::new(131072, false));
data.append(FixedTrait::new(131072, false));
data.append(FixedTrait::new(131072, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
use traits::TryInto;
use alexandria_data_structures::array_ext::{SpanTraitExt};
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{Tensor, TensorTrait};
use orion::numbers::fixed_point::{core::{FixedTrait}};
use orion::operators::tensor::{FP16x16Tensor, FP16x16TensorDiv};
use orion::numbers::fixed_point::implementations::fp16x16::core::{
FP16x16, FP16x16Impl, FP16x16Add, FP16x16AddEq, FP16x16Sub, FP16x16Mul, FP16x16MulEq,
FP16x16TryIntoU128, FP16x16PartialEq, FP16x16PartialOrd, FP16x16SubEq, FP16x16Neg, FP16x16Div,
FP16x16IntoFelt252, FP16x16Print, HALF
};
use orion::numbers::fixed_point::implementations::fp16x16::math::trig;
struct EigenValues<FP16x16> {
p_index: usize,
q_index: usize,
theta: FP16x16,
}
fn div_by_scalar(self: @Tensor<FP16x16>, divisor: u32) -> Tensor<FP16x16> {
let mut data = (*self).data;
let mut data_array = ArrayTrait::new();
loop {
match data.pop_front() {
Option::Some(elem) => {
data_array.append(FixedTrait::new(*elem.mag / divisor, *elem.sign));
},
Option::None(_) => {
break TensorTrait::<FP16x16>::new((*self).shape, data_array.span());
}
};
}
}
fn div_by_fp(self: @Tensor<FP16x16>, divisor: FP16x16) -> Tensor<FP16x16> {
let mut data = (*self).data;
let mut data_array = ArrayTrait::new();
loop {
match data.pop_front() {
Option::Some(elem) => { data_array.append(FP16x16Div::div(*elem, divisor)); },
Option::None(_) => {
break TensorTrait::<FP16x16>::new((*self).shape, data_array.span());
}
};
}
}
fn find_max_off_diag(a: @Tensor<FP16x16>) -> (usize, usize) {
let mut data = *a.data;
let mut shape = *a.shape;
let n = *(*a).shape.at(0);
let mut i = 0_usize;
let mut j = 0_usize;
let mut p = 0_usize;
let mut q = 1_usize;
let mut max_val = FixedTrait::abs((*a).at(indices: array![p, q].span()));
loop {
if i == n |
{
break (p, q);
};
j = i + 1;
loop {
if j == n {
break;
};
if FixedTrait::abs((a).at(indices: array![i, j].span())) > max_val {
max_val = FixedTrait::abs((a).at(indices: array![i, j].span()));
p = i;
q = j;
};
j += 1;
};
i += 1;
}
}
fn jacobi_eigensystem(
mut a: Tensor<FP16x16>, tol: FP16x16, max_iter: usize
) -> (Tensor<FP16x16>, Tensor<FP16x16>) {
assert(
!((a).shape.len() != 2_usize || ((a).shape.at(0_usize) != (a).shape.at(1_usize))),
'a must be a square matrix'
);
let two = FixedTrait::ONE() + FixedTrait::ONE();
let four = two * two;
let half = FixedTrait::<FP16x16>::new(HALF, false);
let pi = FixedTrait::<FP16x16>::new(trig::PI, false);
let mut data = a.data;
let mut shape = a.shape;
let numRows = *((shape).at(0));
let mut v = eye(numRows: numRows);
let mut i: usize = 0;
loop {
let (p, q) = find_max_off_diag(@a);
if i == max_iter || FixedTrait::abs((a).at(indices: array![p, q].span())) < tol {
break (extract_diagonal(@a), v);
};
let theta = if (a)
.at(indices: array![p, p].span()) == (a)
.at(indices: array![q, q].span()) {
FP16x16Div::div(pi, four)
} else {
half
* trig::atan(
FP16x16Div::div(
two * (a).at(indices: array![p, q].span()),
(FP16x16Sub::sub(
(a).at(indices: array![p, p].span()),
(a).at(indices: array![q, q].span())
))
)
)
};
let eigensystem = EigenValues { p_index: p, q_index: q, theta: theta };
let j_eye = eye(numRows: numRows);
let j = update_eigen_values(self: @j_eye, eigensystem: eigensystem);
l |
et transpose_j = j.transpose(axes: array![1, 0].span());
a = transpose_j.matmul(@a).matmul(@j);
v = v.matmul(@j);
i += 1;
}
}
fn eye(numRows: usize) -> Tensor<FP16x16> {
let mut data_array = ArrayTrait::new();
let mut x: usize = 0;
loop {
if x == numRows {
break;
};
let mut y: usize = 0;
loop {
if y == numRows {
break;
};
if x == y {
data_array.append(FixedTrait::ONE());
} else {
data_array.append(FixedTrait::ZERO());
};
y += 1;
};
x += 1;
};
Tensor::<FP16x16> { shape: array![numRows, numRows].span(), data: data_array.span() }
}
fn extract_diagonal(self: @Tensor<FP16x16>) -> Tensor<FP16x16> {
let mut data = (*self).data;
let mut data_array = ArrayTrait::new();
let dims = (*self).shape.at(0);
let mut x: usize = 0;
loop {
if x == *dims {
break;
};
let mut y: usize = 0;
loop {
if y == *dims {
break;
};
match data.pop_front() {
Option::Some(elem) => { if x == y {
data_array.append(*elem);
}; },
Option::None(_) => { break; }
};
y += 1;
};
x += 1;
};
Tensor::<FP16x16> { shape: array![*dims].span(), data: data_array.span() }
}
fn update_eigen_values(
self: @Tensor<FP16x16>, eigensystem: EigenValues<FP16x16>
) -> Tensor<FP16x16> {
let mut data = (*self).data;
let mut data_array = ArrayTrait::new();
let mut x: usize = 0;
let mut y: usize = 0;
let mut index: usize = 0;
let dims = (*self).shape.at(0);
let items = *dims * *dims;
let dims_y = (*self).shape.at(1);
loop {
if index == items {
break;
};
if y == *dims_y {
x += 1;
y = 0;
}; |
match data.pop_front() {
Option::Some(elem) => {
let eigen_values = eigensystem;
let value = if (eigen_values.p_index, eigen_values.p_index) == (x, y) {
trig::cos(eigen_values.theta)
} else if (eigen_values.q_index, eigen_values.q_index) == (x, y) {
trig::cos(eigen_values.theta)
} else if (eigen_values.p_index, eigen_values.q_index) == (x, y) {
trig::sin(eigen_values.theta)
} else if (eigen_values.q_index, eigen_values.p_index) == (x, y) {
trig::sin(-eigen_values.theta)
} else {
*elem
};
data_array.append(value);
y += 1;
index += 1;
},
Option::None(_) => { break; }
};
};
Tensor::<FP16x16> { shape: *self.shape, data: data_array.span() }
}
fn check_unit_diagonal_tensor(self: @Tensor<FP16x16>) -> bool {
let mut x: usize = 0;
let mut valid: bool = true;
let dim_x = (*self).shape.at(0);
let dim_y = (*self).shape.at(1);
loop {
if x == *dim_x || !valid {
break valid;
};
let mut y: usize = 0;
loop {
if y == *dim_y {
break;
};
if x == y {
if (self).at(indices: array![x, y].span()) != FixedTrait::ONE() {
valid = false;
break;
}
} else {
if (self).at(indices: array![x, y].span()) != FixedTrait::ZERO() {
valid = false;
break;
}
};
y += 1;
};
x += 1;
}
} |
mod generated;
mod helper;
mod test;
|
mod tests {
use traits::TryInto;
use alexandria_data_structures::array_ext::{SpanTraitExt};
use array::{ArrayTrait, SpanTrait};
use orion::operators::tensor::{Tensor, TensorTrait};
use orion::numbers::fixed_point::{core::{FixedTrait}};
use orion::operators::tensor::{FP16x16Tensor, FP16x16TensorDiv, FP16x16TensorSub};
use orion::numbers::fixed_point::implementations::fp16x16::core::{
FP16x16, FP16x16Impl, FP16x16Add, FP16x16AddEq, FP16x16Sub, FP16x16Mul, FP16x16MulEq,
FP16x16TryIntoU128, FP16x16PartialEq, FP16x16PartialOrd, FP16x16SubEq, FP16x16Neg,
FP16x16Div, FP16x16IntoFelt252, FP16x16Print
};
use pca::{
helper::{
EigenValues, extract_diagonal, eye, find_max_off_diag, jacobi_eigensystem,
update_eigen_values, check_unit_diagonal_tensor, div_by_scalar, div_by_fp
}
};
use pca::{generated::{X_std::X_std, X::X, y::y, evalu_sort::evalu_sort, evec_sort::evec_sort}}; |
fn pca_test() {
let tol = FixedTrait::<FP16x16>::new(655, false);
let max_iter = 500_usize;
let X_std = X_std();
let X = X();
let y = y();
let mut n: usize = *((X_std).shape.at(0)) - 1;
let size = *(X_std.shape.at(1));
let X_std_transpose = X_std.transpose(axes: array![1, 0].span());
let mut cov_matrix = div_by_scalar(@(X_std_transpose.matmul(@X_std)), n);
let mut stddevs = extract_diagonal(@cov_matrix).sqrt();
let mut stddevs_left = stddevs.reshape(array![size, 1].span());
let mut stddevs_right = stddevs.reshape(array![1, size].span());
let corr_matrix = cov_matrix / stddevs_left.matmul(@stddevs_right);
let (evalu, evec) = jacobi_eigensystem(a: corr_matrix, tol: tol, max_iter: max_iter);
let (evalu, evec) = (evalu_sort(), evec_sort());
let loadings = evec;
let principal_component = X_std.matmul(@loadings);
n = *((principal_component).shape.at(0)) - 1;
let principal_component_transpose = principal_component
.transpose(axes: array![1, 0].span());
let cov_new = div_by_scalar(
@(principal_component_transpose.matmul(@principal_component)), n
);
stddevs = extract_diagonal(@cov_new).sqrt();
stddevs_left = stddevs.reshape(array![size, 1].span());
stddevs_right = stddevs.reshape(array![1, size].span());
let corr_new = cov_new / stddevs_left.matmul(@stddevs_right);
let new_corr = (@corr_new.abs()).round();
assert(check_unit_diagonal_tensor(@new_corr), 'orthogonality is invalid');
let evalu_cumsum = evalu.cumsum(0, Option::None(()), Option::None(()));
let sum = evalu_cumsum.data.at(evalu_cumsum.data.len() - 1);
let evalu_div_sum = div_by_fp(@evalu, *sum);
let pc = (*evalu_div_sum.data.at(0) + *evalu_div_sum.data.at(1))
* FixedTrait::<FP16x16>::new_unscaled(100, false);
assert(
FixedTrait::round(pc) |
.mag == 0x610000, 'no match with notebook version'
);
}
} |
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The Support Vector Machines (SVM) model is a supervised learning technique used for classification and regression. It is employed to solve binary classification problems where it identifies the hyperplane that best divides a data set into classes. This hyperplane results from maximizing the margin between the two classes. By determining this optimal hyperplane, predictions can be made for new data points and understand how the input attributes influence classification.\n",
"\n",
"Below, we provide a brief review of implementing an SVM model using the Gradient Descent method for the linear kernel in Python, which we will later convert to Cairo to transform it into a verifiable ZKML (support vector machine model), using Orion's library. This allows an opportunity to familiarize oneself with the main functions and operators that the framework offers for the implementation of the SVM."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the purposes of this tutorial, we generated linearly separable data using make_blobs from Scikit-learn"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(100, 3) (100,) (50, 3) (50,)\n"
]
}
],
"source": [
" |
import numpy as np\n",
" |
import matplotlib.pyplot as plt\n",
"from sklearn.datasets |
import make_blobs\n",
"\n",
"X, y = make_blobs(n_samples=150, centers=2,\n",
" random_state=0, cluster_std=0.60)\n",
"y[y == 0] = -1\n",
"\n",
"X = np.hstack((X, np.ones((X.shape[0], 1))))\n",
"\n",
"X_train, y_train = X[:100, :], y[:100]\n",
"X_test, y_test = X[100:, :], y[100:]\n",
"\n",
"print(X_train.shape, y_train.shape, X_test.shape, y_test.shape)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we will visualize the training data using a scatter plot, where the points are colored based on their class labels, which in our case will be 1 and -1"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.collections.PathCollection at 0x145c39360>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGdCAYAAABO2DpVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9iUlEQVR4nO3deXhU9dn/8fdkIWFJAiggS2QRRUVFBBUQVxBFa8HWulQRqtZHi1r11ypqraLWaJ8+Lq0tKqJW24pWRHGjorJZRAVBcEOqIMgOhSQEHJLJ+f1xZ5gMmUAmOTNn5szndV1zkXOYzLk5c5hzz3e5vwHHcRxEREREXJDldQAiIiLiH0osRERExDVKLERERMQ1SixERETENUosRERExDVKLERERMQ1SixERETENUosRERExDU5yT5gdXU1a9eupaCggEAgkOzDi4iISCM4jkN5eTmdOnUiK6v+domkJxZr166luLg42YcVERERF6xevZouXbrU+/dJTywKCgoAC6ywsDDZhxcREZFGKCsro7i4ePd9vD5JTyzC3R+FhYVKLERERNLMvoYxaPCmiIiIuEaJhYiIiLhGiYWIiIi4RomFiIiIuEaJhYiIiLhGiYWIiIi4RomFiIiIuEaJhYiIiLhGiYWIiIi4RomFiIiIuCbpJb0ToqIC3nzT/jzlFOja1euIYtu5E777Djp1gpYtvY5GRETEdenfYrF5M/TtCz/5CYwZA4cdBrNnex1VXS+/DPvvD4ccAvvtB3/7m9cRiYiIuC79E4s774RvvolsB4Nw6aWehRPTypVw/vmwY4dtB4MwejR89pmnYYmkhbVr4fe/h/HjYcECr6MRkX1I/66Qr76CUCiyXV0Nq1bZvuxs7+KqbcECqKyM3lddDfPnQ+/e3sQkkg6+/hqOOw5KSyEQgLvughdegB
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
" |
output_type": "display_data"
}
],
"source": [
"plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, s=10, cmap='autumn')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We will start by generating the key functions for SVM.\n",
"\n",
"Next, we'll define the loss functions and its gradient, with $\\mathbf{L2}$ regularization, both necessary to train our SVM.\n",
"\n",
"In the case of the loss function in SVM, the Hinge Loss ($\\max(0, 1 - y_i \\times (\\mathbf{w} \\cdot \\mathbf{x}_i))$) is used, which measures how far a sample is on the \"wrong side\" of the margin. If the sample is on the correct side of the margin, the loss is 0.\n",
"\n",
"$\\text{Loss Function}$ = $ \\frac{1}{N} \\sum_{i=1}^{N} \\max(0, 1 - y_i \\times (\\mathbf{w} \\cdot \\mathbf{x}_i)) + C \\times \\frac{1}{2} \\times \\mathbf{w} \\cdot \\mathbf{w}$\n",
"\n",
"$\\text{Gradient}$ = $\\frac{1}{N} \\sum_{i=1}^{N} \\left( -y_i \\times \\mathbf{x}_i \\text{ (si } y_i \\times (\\mathbf{w} \\cdot \\mathbf{x}_i) < 1 \\text{) } \\right) + C \\times \\mathbf{w}$\n",
"\n",
"For the purposes of this tutorial, we initialize $\\mathbf{w}$ as an array of $\\mathbf{0's}$"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def loss_function(w, X, y, C): \n",
" hinge_loss = np.maximum(0, 1 - y * np.dot(X, w)) \n",
" regularization_term = 0.5 * np.dot(w, w)
" total_loss = np.mean(hinge_loss) + C * regularization_term
" return total_loss\n",
"\n",
"def loss_gradient(w, X, y, C): \n",
" mask = (y * (np.dot(X, w))) < 1
" gradient = (-np.dot(mask * y, X) / len(y)) + C*w\n",
" return gradient\n",
"\n",
"
"losses = []\n",
"w = np.zeros(3)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_ |
type": "markdown",
"metadata": {},
"source": [
"Now, we declare the hyperparameters: learning rate (learning_rate), the number of epochs (num_epochs), and the regularization parameter (C). Then, we will use gradient descent to adjust the weights of the SVM model. For the purposes of this tutorial, we stick with the following hyperparameters; however, the hyperplane acquisition could be improved with their adjustment."
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"learning_rate = 0.01\n",
"num_epochs = 100\n",
"C = 1"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 0, Loss: 1.0000\n",
"Epoch 25, Loss: 0.5300\n",
"Epoch 50, Loss: 0.4594\n",
"Epoch 75, Loss: 0.4238\n",
"Epoch 99, Loss: 0.4092\n"
]
}
],
"source": [
"for epoch in range(num_epochs):\n",
" loss = loss_function(w,X_train, y_train, C)\n",
" losses.append(loss)\n",
"\n",
" if epoch % 25 == 0 or epoch == 99:\n",
" print(f\"Epoch {epoch}, Loss: {loss:.4f}\")\n",
"\n",
" gradient_w = loss_gradient(w, X_train, y_train,C)\n",
" w -= learning_rate * gradient_w"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[<matplotlib.lines.Line2D at 0x145d7edd0>]"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA7wklEQVR4nO3deXicdb3
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
}, |
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.plot(losses)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"After training the model and observing the decrease of the loss function, we evaluate its performance on both the training and test data. We will calculate the accuracy and display the final loss on the training data. In our case, the weights $\\mathbf{w}$ and the accuracies will be the values against which we compare the SVM implementation in Cairo with Orion."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy: 0.99\n",
"Final loss: 0.4089273002134721\n"
]
}
],
"source": [
"def predict(X, w):\n",
" return np.sign(np.dot(X, w))\n",
"\n",
"predictions = predict(X_train, w)\n",
"final_loss = loss_function(w, X_train, y_train,C)\n",
"\n",
"print(\"Accuracy: {}\".format((predictions == y_train).mean()))\n",
"print(\"Final loss: {}\".format(final_loss))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy: 0.98\n"
]
}
],
"source": [
"predictions = predict(X_test, w)\n",
"\n",
"print(\"Accuracy: {}\".format((predictions == y_test).mean()))"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 0.36715632, -0.35873007, 0.12536368])"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"w"
]
},
{
"cell_type": "markdown",
"metadata": |
{},
"source": [
"Next, we will visualize the obtained hyperplane, determined by $\\mathbf{w} = (0.367, -0.358, 0.125)$ and the way it separates the classes in the test data."
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[<matplotlib.lines.Line2D at 0x145e11cf0>]"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAGdCAYAAAAvwBgXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABanklEQVR4nO3dd3hU1dbH8e8kgdBDR5EIKLYkdJByKRYEuV7FdlW8lpvQBaRcREVfC4pRUaT3JICIoCBVegcDSO8ioBRFmmACCIFMzvvHJvQkkzJzZia/z/PMI5k5OWdlhDkra++9tsOyLAsRERERGwTYHYCIiIjkXkpERERExDZKRERERMQ2SkRERETENkpERERExDZKRERERMQ2SkRERETENkpERERExDZBdgeQnpSUFA4dOkThwoVxOBx2hyMiIiIusCyLU6dOUbZsWQIC0q95eHUicujQIUJDQ+0OQ0RERLLg4MGDlCtXLt1jvDoRKVy4MGB+kCJFitgcjYiIiLgiMTGR0NDQS/fx9Hh1IpI6HFOkSBElIiIiIj7GlWkVmqwqIiIitlEiIiIiIrZRIiIiIiK2USIiIiIitlEiIiIiIrZRIiIiIiK2USIiIiIitvHqPiIifuPCBTh6FPLkgVKlQFsWiIgAqoiIuNfvv8P
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, s=50, cmap='autumn')\n",
"\n",
"x_plot = np.linspace(X_test[:, 0].min() - 1, X_test[:, 0].max() + 1, 100)\n",
"y_plot = (-w[0] / w[1]) * x_plot - (w[2]/w[1])
"plt.plot(x_plot, y_plot, 'k-')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The equation of the line obtained is $\\mathbf{Y} = 1.023\\mathbf{X} + 0.349$"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"
"\n",
"Now let's generate Cairo files for each tensor in the object."
]
},
{
"cell_type": "c |
ode",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"def decimal_to_fp16x16(num):\n",
"\n",
" whole_num = int(num)\n",
" fractional_part = int((num - whole_num) * 65536)\n",
" fp_number = (whole_num << 16) + fractional_part\n",
" return fp_number"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
" |
import os"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [],
"source": [
"os.makedirs(\"src/generated\", exist_ok=True)"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [],
"source": [
"tensor_name = [\"X_train\", \"Y_train\", \"X_test\", \"Y_test\"]\n",
"\n",
"def generate_cairo_files(data, name):\n",
"\n",
" with open(os.path.join('src', 'generated', f\"{name}.cairo\"), \"w\") as f:\n",
" f.write(\n",
" \"use array::ArrayTrait;\\n\" +\n",
" \"use orion::operators::tensor::{Tensor, TensorTrait, FP16x16Tensor};\\n\" +\n",
" \"use orion::numbers::{FixedTrait, FP16x16, FP16x16Impl};\\n\" +\n",
" \"\\n\" + f\"fn {name}() -> Tensor<FP16x16>\" + \"{\\n\\n\" + \n",
" \"let mut shape = ArrayTrait::new();\\n\"\n",
" )\n",
" for dim in data.shape:\n",
" f.write(f\"shape.append({dim});\\n\")\n",
" \n",
" f.write(\"let mut data = ArrayTrait::new();\")\n",
" for val in np.nditer(data.flatten()):\n",
" f.write(f\"data.append(FixedTrait::new({abs(int(decimal_to_fp16x16(val)))}, {str(val < 0).lower()}));\\n\")\n",
" f.write(\n",
" \"let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());\\n\" +\n",
" \"return tensor;\\n}\"\n",
" )\n",
"\n",
"with open(f\"src/generated.cairo\", \"w\") as f:\n",
" for n in tensor_name:\n",
" f.write(f\"mod {n};\\n\")\n",
"\n",
"generate_cairo_files(X_train, \"X_train\")\n",
"generate_cairo_files(X_test, \"X_test\")\n",
"generate_cairo_files(y_train, \"Y_train\")\n",
"generate_cairo_files(y_test, \"Y_test\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
" **Convert hyperparameters to FP16x16**\n"
]
},
{
"cell_type": "code",
"execution_count": 22, |
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"655\n",
"65536\n",
"6553600\n"
]
}
],
"source": [
"print(decimal_to_fp16x16(learning_rate))\n",
"print(decimal_to_fp16x16(C))\n",
"print(decimal_to_fp16x16(num_epochs))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
" **Get an estimate for the initial and final loss value, and final weights in FP16x16** "
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Initial loss: 65536\n",
"Final loss: 26799\n",
"Weights: [ 24061 -23509 8215]\n"
]
}
],
"source": [
"w = np.array([decimal_to_fp16x16(w[0]),\n",
"decimal_to_fp16x16(w[1]),\n",
"decimal_to_fp16x16(w[2])])\n",
"\n",
"print(\"Initial loss: {}\".format(decimal_to_fp16x16(losses[0])))\n",
"print(\"Final loss: {}\".format(decimal_to_fp16x16(final_loss)))\n",
"print(\"Weights: {}\".format(w))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the implementation of SVM in Cairo with Orion, please visit the Convert your model section within the [Verifiable Support Vector Machine tutorial][def]\n",
"\n",
"[def]: ../tutorial/VerifiableSupportVectorMachineTutorial.md"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "cairo",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
} |
mod X_train;
mod Y_train;
mod X_test;
mod Y_test;
|
use array::ArrayTrait;
use orion::operators::tensor::{Tensor, TensorTrait, FP16x16Tensor};
use orion::numbers::{FixedTrait, FP16x16, FP16x16Impl};
fn X_test() -> Tensor<FP16x16>{
let mut shape = ArrayTrait::new();
shape.append(50);
shape.append(3);
let mut data = ArrayTrait::new();data.append(FixedTrait::new(87946, false));
data.append(FixedTrait::new(38900, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(34695, false));
data.append(FixedTrait::new(249556, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(159279, false));
data.append(FixedTrait::new(4166, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(83319, false));
data.append(FixedTrait::new(124029, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(154108, false));
data.append(FixedTrait::new(54263, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(81434, false));
data.append(FixedTrait::new(295173, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(101339, false));
data.append(FixedTrait::new(276101, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(51730, false));
data.append(FixedTrait::new(284261, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(130047, false));
data.append(FixedTrait::new(32083, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(112357, false));
data.append(FixedTrait::new(329332, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(103102, false));
data.append(FixedTrait::new(31715, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(111245, false));
data.append(FixedTrait::new(56762, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(77993, false));
data.append(FixedTrait::new(309836, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(180005, |
false));
data.append(FixedTrait::new(101281, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(62872, false));
data.append(FixedTrait::new(298895, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(36406, true));
data.append(FixedTrait::new(307754, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(150385, false));
data.append(FixedTrait::new(50193, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(106162, false));
data.append(FixedTrait::new(4433, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(79801, false));
data.append(FixedTrait::new(255125, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(95363, false));
data.append(FixedTrait::new(1913, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(121, true));
data.append(FixedTrait::new(300250, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(14718, false));
data.append(FixedTrait::new(312625, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(89573, false));
data.append(FixedTrait::new(41613, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(30116, false));
data.append(FixedTrait::new(357159, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(159865, false));
data.append(FixedTrait::new(4752, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(61297, false));
data.append(FixedTrait::new(349424, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(116464, false));
data.append(FixedTrait::new(77761, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(70534, false));
data.append(FixedTrait::new(307023, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(155183, false));
data.append(FixedTrait::new(36188, false));
data.append |
(FixedTrait::new(65536, false));
data.append(FixedTrait::new(133737, false));
data.append(FixedTrait::new(29808, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(145701, false));
data.append(FixedTrait::new(54969, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(105613, false));
data.append(FixedTrait::new(119503, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(138536, false));
data.append(FixedTrait::new(81751, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(89231, false));
data.append(FixedTrait::new(89547, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(37538, false));
data.append(FixedTrait::new(267914, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(211603, false));
data.append(FixedTrait::new(74168, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(157689, false));
data.append(FixedTrait::new(319191, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(145953, false));
data.append(FixedTrait::new(82769, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(69644, false));
data.append(FixedTrait::new(339237, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(108783, false));
data.append(FixedTrait::new(233497, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(120774, false));
data.append(FixedTrait::new(4764, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(38978, false));
data.append(FixedTrait::new(308651, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(122543, false));
data.append(FixedTrait::new(7073, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(111206, false));
data.append(FixedTrait::new(49473, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrai |
t::new(130347, false));
data.append(FixedTrait::new(98944, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(161886, false));
data.append(FixedTrait::new(86147, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(86653, false));
data.append(FixedTrait::new(273862, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(86507, false));
data.append(FixedTrait::new(92030, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(124252, false));
data.append(FixedTrait::new(339830, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(113347, false));
data.append(FixedTrait::new(75189, false));
data.append(FixedTrait::new(65536, false));
let tensor = TensorTrait::<FP16x16>::new(shape.span(), data.span());
return tensor;
} |
use array::ArrayTrait;
use orion::operators::tensor::{Tensor, TensorTrait, FP16x16Tensor};
use orion::numbers::{FixedTrait, FP16x16, FP16x16Impl};
fn X_train() -> Tensor<FP16x16>{
let mut shape = ArrayTrait::new();
shape.append(100);
shape.append(3);
let mut data = ArrayTrait::new();
data.append(FixedTrait::new(165613, false));
data.append(FixedTrait::new(40488, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(101228, false));
data.append(FixedTrait::new(275957, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(65780, false));
data.append(FixedTrait::new(274692, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(82291, false));
data.append(FixedTrait::new(221645, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(122469, false));
data.append(FixedTrait::new(62659, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(83856, false));
data.append(FixedTrait::new(69330, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(70073, false));
data.append(FixedTrait::new(296922, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(70957, false));
data.append(FixedTrait::new(266254, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(193459, false));
data.append(FixedTrait::new(22565, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(50300, false));
data.append(FixedTrait::new(288200, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(29071, false));
data.append(FixedTrait::new(204164, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(140848, false));
data.append(FixedTrait::new(67959, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(165042, false));
data.append(FixedTrait::new(91210, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(13314 |
9, false));
data.append(FixedTrait::new(12897, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(172026, false));
data.append(FixedTrait::new(62271, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(519, false));
data.append(FixedTrait::new(273687, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(3112, true));
data.append(FixedTrait::new(358760, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(130822, false));
data.append(FixedTrait::new(32740, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(78697, false));
data.append(FixedTrait::new(39431, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(155270, false));
data.append(FixedTrait::new(52083, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(122730, false));
data.append(FixedTrait::new(273985, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(59922, false));
data.append(FixedTrait::new(298198, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(39038, false));
data.append(FixedTrait::new(267789, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(109040, false));
data.append(FixedTrait::new(43456, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(109628, false));
data.append(FixedTrait::new(43207, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(18165, false));
data.append(FixedTrait::new(317474, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(161818, false));
data.append(FixedTrait::new(110019, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(151455, false));
data.append(FixedTrait::new(85446, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(54844, false));
data.append(FixedTrait::new(140008, false)); |
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(129348, false));
data.append(FixedTrait::new(103533, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(134024, false));
data.append(FixedTrait::new(73738, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(37464, false));
data.append(FixedTrait::new(283304, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(43940, false));
data.append(FixedTrait::new(264827, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(109385, false));
data.append(FixedTrait::new(28598, true));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(219755, false));
data.append(FixedTrait::new(111383, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(120177, false));
data.append(FixedTrait::new(49416, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(66596, false));
data.append(FixedTrait::new(293946, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(91267, false));
data.append(FixedTrait::new(60880, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(72170, false));
data.append(FixedTrait::new(320456, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(223533, false));
data.append(FixedTrait::new(57167, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(138029, false));
data.append(FixedTrait::new(229056, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(102711, false));
data.append(FixedTrait::new(1167, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(58556, false));
data.append(FixedTrait::new(66252, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(94025, false));
data.append(FixedTrait::new(85630, false));
data.append(FixedTrait::new(65536, false));
data.append |
(FixedTrait::new(5342, false));
data.append(FixedTrait::new(299330, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(97971, false));
data.append(FixedTrait::new(252869, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(46869, false));
data.append(FixedTrait::new(354769, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(137416, false));
data.append(FixedTrait::new(243624, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(139593, false));
data.append(FixedTrait::new(340269, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(110332, false));
data.append(FixedTrait::new(274978, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(48127, false));
data.append(FixedTrait::new(330121, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(17851, false));
data.append(FixedTrait::new(358479, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(155294, false));
data.append(FixedTrait::new(62306, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(97104, false));
data.append(FixedTrait::new(45224, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(28769, false));
data.append(FixedTrait::new(297266, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(47716, false));
data.append(FixedTrait::new(252661, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(107481, false));
data.append(FixedTrait::new(119242, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(28304, false));
data.append(FixedTrait::new(284095, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(36669, false));
data.append(FixedTrait::new(276169, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(93592, false));
data.append(FixedT |
rait::new(106453, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(140275, false));
data.append(FixedTrait::new(46272, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(170483, false));
data.append(FixedTrait::new(71302, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(32006, false));
data.append(FixedTrait::new(214172, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(129468, false));
data.append(FixedTrait::new(47119, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(178995, false));
data.append(FixedTrait::new(16364, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(64393, false));
data.append(FixedTrait::new(352276, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(137416, false));
data.append(FixedTrait::new(317680, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(76291, false));
data.append(FixedTrait::new(248468, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(22750, false));
data.append(FixedTrait::new(226215, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(92649, false));
data.append(FixedTrait::new(287124, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(74107, false));
data.append(FixedTrait::new(61316, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(53442, false));
data.append(FixedTrait::new(313606, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(14023, false));
data.append(FixedTrait::new(320171, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(170903, false));
data.append(FixedTrait::new(71362, false));
data.append(FixedTrait::new(65536, false));
data.append(FixedTrait::new(29739, false));
data.append(FixedTrait::new(259291, false));
data.append(FixedTrait::ne |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.