Spaces:
Sleeping
Sleeping
File size: 7,327 Bytes
1f6ce2a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 |
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"id": "806d60dd-cc27-4d23-add1-7cd28bbaa0fa",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\utils.py:953: UserWarning: Expected 1 arguments for function <function predict_user_profile at 0x000001DFE61E2840>, received 7.\n",
" warnings.warn(\n",
"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\utils.py:961: UserWarning: Expected maximum 1 arguments for function <function predict_user_profile at 0x000001DFE61E2840>, received 7.\n",
" warnings.warn(\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Running on local URL: http://127.0.0.1:7861\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Traceback (most recent call last):\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\queueing.py\", line 527, in process_events\n",
" response = await route_utils.call_process_api(\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\route_utils.py\", line 270, in call_process_api\n",
" output = await app.get_blocks().process_api(\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\blocks.py\", line 1847, in process_api\n",
" result = await self.call_function(\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\blocks.py\", line 1433, in call_function\n",
" prediction = await anyio.to_thread.run_sync(\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\anyio\\to_thread.py\", line 56, in run_sync\n",
" return await get_async_backend().run_sync_in_worker_thread(\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 2134, in run_sync_in_worker_thread\n",
" return await future\n",
" ^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 851, in run\n",
" result = context.run(func, *args)\n",
" ^^^^^^^^^^^^^^^^^^^^^^^^\n",
" File \"C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\utils.py\", line 788, in wrapper\n",
" response = f(*args, **kwargs)\n",
" ^^^^^^^^^^^^^^^^^^\n",
"TypeError: predict_user_profile() takes 1 positional argument but 7 were given\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Could not create share link. Missing file: C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\\frpc_windows_amd64_v0.2. \n",
"\n",
"Please check your internet connection. This can happen if your antivirus software blocks the download of this file. You can install manually by following these steps: \n",
"\n",
"1. Download this file: https://cdn-media.huggingface.co/frpc-gradio-0.2/frpc_windows_amd64.exe\n",
"2. Rename the downloaded file to: frpc_windows_amd64_v0.2\n",
"3. Move the file to this location: C:\\Users\\mistr\\anaconda3\\Lib\\site-packages\\gradio\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import gradio as gr\n",
"import pandas as pd\n",
"import pickle\n",
"from sklearn.preprocessing import LabelEncoder\n",
"\n",
"# Load the trained model from data.pkl\n",
"def load_model():\n",
" with open('data.pkl', 'rb') as file:\n",
" model = pickle.load(file)\n",
" return model\n",
"\n",
"# Define the prediction function using the loaded model\n",
"def predict_user_profile(inputs):\n",
" # Preprocess the input data\n",
" lang_encoder = LabelEncoder()\n",
" lang_code = lang_encoder.fit_transform([inputs['Language']])[0]\n",
"\n",
" # Create a DataFrame from the user input dictionary\n",
" df = pd.DataFrame.from_dict([inputs])\n",
"\n",
" # Select the relevant feature columns used during model training\n",
" feature_columns_to_use = ['statuses_count', 'followers_count', 'friends_count',\n",
" 'favourites_count', 'listed_count', 'lang_code']\n",
" df_features = df[feature_columns_to_use]\n",
"\n",
" # Load the pre-trained model\n",
" model = load_model()\n",
"\n",
" # Make predictions using the loaded model\n",
" prediction = model.predict(df_features)\n",
"\n",
" # Return the predicted class label (0 for fake, 1 for genuine)\n",
" return \"Genuine\" if prediction[0] == 1 else \"Fake\"\n",
"\n",
"# Define the Gradio interface\n",
"inputs = [\n",
" gr.Textbox(label=\"statuses_count\"),\n",
" gr.Textbox(label=\"followers_count\"),\n",
" gr.Textbox(label=\"friends_count\"),\n",
" gr.Textbox(label=\"favourites_count\"),\n",
" gr.Textbox(label=\"listed_count\"),\n",
" gr.Textbox(label=\"name\"),\n",
" gr.Textbox(label=\"Language\"),\n",
"]\n",
"\n",
"outputs = gr.Textbox(label=\"Prediction\")\n",
"\n",
"# Create the Gradio interface\n",
"interface = gr.Interface(fn=predict_user_profile, inputs=inputs, outputs=outputs,\n",
" title='User Profile Classifier',\n",
" description='Predict whether a user profile is genuine or fake.')\n",
"\n",
"interface.launch(share=True)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "02a483bc-0d49-45e5-908e-eab4769ac7af",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "c1443145-496e-4cc4-a516-0ea70e4cc1fb",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "58478763-b66d-4841-a965-089b1681b3c0",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.7"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
|