hexsha
stringlengths 40
40
| size
int64 6
14.9M
| ext
stringclasses 1
value | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 6
260
| max_stars_repo_name
stringlengths 6
119
| max_stars_repo_head_hexsha
stringlengths 40
41
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 6
260
| max_issues_repo_name
stringlengths 6
119
| max_issues_repo_head_hexsha
stringlengths 40
41
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 6
260
| max_forks_repo_name
stringlengths 6
119
| max_forks_repo_head_hexsha
stringlengths 40
41
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | avg_line_length
float64 2
1.04M
| max_line_length
int64 2
11.2M
| alphanum_fraction
float64 0
1
| cells
sequence | cell_types
sequence | cell_type_groups
sequence |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d099987250f26c19a7d3890a6c9b9a288fe2a248 | 151,383 | ipynb | Jupyter Notebook | multi-modal_concatenate_fusion.ipynb | depshad/Deep-Learning-Framework-for-Multi-modal-Product-Classification | 78bab91255817a7f816a5ee2669a50f395f10001 | [
"MIT"
] | 22 | 2020-07-30T06:53:16.000Z | 2022-03-25T19:38:03.000Z | multi-modal_concatenate_fusion.ipynb | depshad/Deep-Learning-Framework-for-Multi-modal-Product-Classification | 78bab91255817a7f816a5ee2669a50f395f10001 | [
"MIT"
] | 1 | 2020-08-08T16:46:49.000Z | 2020-08-08T16:51:41.000Z | multi-modal_concatenate_fusion.ipynb | depshad/Deep-Learning-Framework-for-Multi-modal-Product-Classification | 78bab91255817a7f816a5ee2669a50f395f10001 | [
"MIT"
] | 6 | 2020-07-30T06:53:36.000Z | 2022-03-07T05:07:17.000Z | 32.845086 | 144 | 0.554382 | [
[
[
"import os, time, datetime\nimport numpy as np\nimport pandas as pd\nfrom tqdm.notebook import tqdm\nimport random\nimport logging\ntqdm.pandas()\nimport seaborn as sns\nfrom sklearn.model_selection import train_test_split\n\n#NN Packages\nimport torch\nimport torch.nn as nn\nfrom torch.utils.data import TensorDataset, random_split,DataLoader, RandomSampler, SequentialSampler\n\nlogger = logging.getLogger(__name__)\n\n\nif torch.cuda.is_available(): \n\n # Tell PyTorch to use the GPU. \n device = torch.device(\"cuda\")\n\n print('There are %d GPU(s) available.' % torch.cuda.device_count())\n\n print('We will use the GPU:', torch.cuda.get_device_name(0))\n\n# If not...\nelse:\n print('No GPU available, using the CPU instead.')\n device = torch.device(\"cpu\")\n",
"_____no_output_____"
],
[
"def format_time(elapsed):\n '''\n Takes a time in seconds and returns a string hh:mm:ss\n '''\n # Round to the nearest second.\n elapsed_rounded = int(round((elapsed)))\n \n # Format as hh:mm:ss\n return str(datetime.timedelta(seconds=elapsed_rounded))\n\nclass SigirPreprocess():\n \n def __init__(self, text_data_path):\n self.text_data_path = text_data_path\n self.train = None\n self.dict_code_to_id = {}\n self.dict_id_to_code = {}\n self.list_tags = {}\n self.sentences = []\n self.labels = []\n self.text_col = None\n self.X_test = None\n def prepare_data(self ):\n catalog_eng= pd.read_csv(self.text_data_path+\"data/catalog_english_taxonomy.tsv\",sep=\"\\t\")\n X_train= pd.read_csv(self.text_data_path+\"data/X_train.tsv\",sep=\"\\t\")\n Y_train= pd.read_csv(self.text_data_path+\"data/Y_train.tsv\",sep=\"\\t\")\n \n self.list_tags = list(Y_train['Prdtypecode'].unique())\n for i,tag in enumerate(self.list_tags):\n self.dict_code_to_id[tag] = i \n self.dict_id_to_code[i]=tag\n print(self.dict_code_to_id)\n \n Y_train['labels']=Y_train['Prdtypecode'].map(self.dict_code_to_id)\n train=pd.merge(left=X_train,right=Y_train,\n how='left',left_on=['Integer_id','Image_id','Product_id'],\n right_on=['Integer_id','Image_id','Product_id'])\n prod_map=pd.Series(catalog_eng['Top level category'].values,\n index=catalog_eng['Prdtypecode']).to_dict()\n\n train['product'] = train['Prdtypecode'].map(prod_map)\n train['title_len']=train['Title'].progress_apply(lambda x : len(x.split()) if pd.notna(x) else 0)\n train['desc_len']=train['Description'].progress_apply(lambda x : len(x.split()) if pd.notna(x) else 0)\n train['title_desc_len']=train['title_len'] + train['desc_len']\n train.loc[train['Description'].isnull(), 'Description'] = \" \"\n train['title_desc'] = train['Title'] + \" \" + train['Description']\n \n self.train = train\n \n def get_sentences(self, text_col, remove_null_rows=False):\n self.text_col = text_col\n if remove_null_rows==True:\n new_train = self.train[self.train[text_col].notnull()]\n\n else:\n new_train = self.train.copy()\n \n self.sentences = new_train[text_col].values\n self.labels = new_train['labels'].values\n \n def prepare_test(self, text_col):\n X_test=pd.read_csv(self.text_data_path+\"data/x_test_task1_phase1.tsv\",sep=\"\\t\")\n X_test.loc[X_test['Description'].isnull(), 'Description'] = \" \"\n X_test['title_desc'] = X_test['Title'] + \" \" + X_test['Description']\n self.X_test = X_test\n self.test_sentences = X_test[text_col].values\n ",
"_____no_output_____"
],
[
"text_col = 'title_desc'\nmax_len = 256\nval_size = 0.1",
"_____no_output_____"
],
[
"Preprocess = SigirPreprocess(\"/kaggle/input/textphase1/\")\nPreprocess.prepare_data()\nPreprocess.get_sentences(text_col, True)",
"_____no_output_____"
],
[
"sentences = Preprocess.sentences\nlabels = Preprocess.labels\nprint(\"Total number of sentences:{}, labels:{}\".format(len(sentences), len(labels)))",
"_____no_output_____"
],
[
"#function to prepare input for model training\ndef prep_input(sentences,labels, max_len,tokenizer):\n input_ids = []\n attention_masks = []\n\n # For every sentence...\n for sent in tqdm(sentences):\n # `encode_plus` will:\n # (1) Tokenize the sentence.\n # (2) Prepend the `[CLS]` token to the start.\n # (3) Append the `[SEP]` token to the end.\n # (4) Map tokens to their IDs.\n # (5) Pad or truncate the sentence to `max_length`\n # (6) Create attention masks for [PAD] tokens.\n encoded_dict = tokenizer.encode_plus(\n sent, # Sentence to encode.\n add_special_tokens = True, # Add '[CLS]' and '[SEP]'\n max_length = max_len, # Pad & truncate all sentences.\n pad_to_max_length = True,\n return_attention_mask = True, # Construct attn. masks.\n return_tensors = 'pt', # Return pytorch tensors.\n )\n\n # Add the encoded sentence to the list. \n input_ids.append(encoded_dict['input_ids'])\n\n # And its attention mask (simply differentiates padding from non-padding).\n attention_masks.append(encoded_dict['attention_mask'])\n\n # Convert the lists into tensors.\n input_ids = torch.cat(input_ids, dim=0)\n attention_masks = torch.cat(attention_masks, dim=0)\n if labels is not None:\n labels = torch.tensor(labels)\n return input_ids,attention_masks,labels\n else:\n return input_ids,attention_masks\n ",
"_____no_output_____"
],
[
"text_input='../input/multi-modal-input-text/'\ntr_inputs_cam=torch.load(text_input+\"tr_inputs_cam.pt\")\nval_inputs_cam=torch.load(text_input+\"val_inputs_cam.pt\")\ntr_masks_cam=torch.load( text_input+\"tr_masks_cam.pt\")\nval_masks_cam=torch.load( text_input+\"val_masks_cam.pt\")\n\ntr_inputs_flau=torch.load(text_input+\"tr_inputs_flau.pt\")\nval_inputs_flau=torch.load(text_input+\"val_inputs_flau.pt\")\ntr_masks_flau=torch.load(text_input+\"tr_masks_flau.pt\")\nval_masks_flau=torch.load(text_input+\"val_masks_flau.pt\")",
"_____no_output_____"
],
[
"!pip install pretrainedmodels",
"_____no_output_____"
],
[
"from transformers import CamembertConfig, CamembertTokenizer, CamembertModel, CamembertForSequenceClassification, AdamW\nfrom transformers import FlaubertModel, FlaubertTokenizer,FlaubertForSequenceClassification,AdamW, FlaubertConfig \nfrom transformers.modeling_roberta import RobertaClassificationHead\nfrom transformers.modeling_utils import SequenceSummary",
"_____no_output_____"
],
[
"from torch.nn import functional as F\nimport torch.nn as nn\nimport pretrainedmodels\nclass SEResnext50_32x4d(nn.Module):\n def __init__(self, pretrained='imagenet'):\n super(SEResnext50_32x4d, self).__init__()\n \n self.base_model = pretrainedmodels.__dict__[\"se_resnext50_32x4d\"](pretrained=None)\n if pretrained is not None:\n self.base_model.load_state_dict(\n torch.load(\"../input/pretrained-model-weights-pytorch/se_resnext50_32x4d-a260b3a4.pth\"\n )\n )\n self.l0 = nn.Linear(2048, 27)\n \n def forward(self, image):\n batch_size, _, _, _ = image.shape\n \n x = self.base_model.features(image)\n x = F.adaptive_avg_pool2d(x, 1).reshape(batch_size, -1)\n \n out = self.l0(x)\n\n return out",
"_____no_output_____"
],
[
"class Identity(nn.Module):\n def __init__(self):\n super(Identity, self).__init__()\n \n def forward(self, x):\n return x",
"_____no_output_____"
],
[
"class vec_output_CamembertForSequenceClassification(CamembertModel):\n config_class = CamembertConfig\n\n def __init__(self, config):\n super().__init__(config)\n self.num_labels = config.num_labels\n\n self.roberta = CamembertModel(config)\n self.dense = nn.Linear(256*config.hidden_size, config.hidden_size)\n self.dropout = nn.Dropout(0.1)\n self.out_proj = nn.Linear(config.hidden_size, config.num_labels)\n self.init_weights()\n\n\n def forward(\n self,\n input_ids=None,\n attention_mask=None,\n token_type_ids=None,\n position_ids=None,\n head_mask=None,\n inputs_embeds=None,\n labels=None,\n output_attentions=None,\n output_hidden_states=None,\n ):\n outputs = self.roberta(\n input_ids,\n attention_mask=attention_mask,\n token_type_ids=token_type_ids,\n position_ids=position_ids,\n head_mask=head_mask,\n inputs_embeds=inputs_embeds,\n# output_attentions=output_attentions,\n# output_hidden_states=output_hidden_states,\n )\n sequence_output = outputs[0] #(B,256,768)\n x = sequence_output.view(sequence_output.shape[0], 256*768)\n# x = sequence_output[:, 0, :] # take <s> token (equiv. to [CLS])-> #(B,768) Image -> (B,2048)\n x = self.dense(x) # 768 -> 768\n feat= torch.tanh(x) \n logits = self.out_proj(feat) # 768 -> 27\n outputs = (logits,) + outputs[2:]\n\n return outputs,feat # (loss), logits, (hidden_states), (attentions)",
"_____no_output_____"
],
[
"num_classes = 27\n\nclass vec_output_FlaubertForSequenceClassification(FlaubertModel):\n \n config_class = FlaubertConfig\n \n\n def __init__(self, config):\n super().__init__(config)\n self.transformer = FlaubertModel(config)\n self.sequence_summary = SequenceSummary(config)\n self.init_weights()\n self.dropout = torch.nn.Dropout(0.1)\n self.classifier = torch.nn.Linear(config.hidden_size, num_classes)\n\n\n def forward(\n self,\n input_ids=None,\n attention_mask=None,\n langs=None,\n token_type_ids=None,\n position_ids=None,\n lengths=None,\n cache=None,\n head_mask=None,\n inputs_embeds=None,\n labels=None,\n ):\n \n \n transformer_outputs = self.transformer(\n input_ids,\n attention_mask=attention_mask,\n langs=langs,\n token_type_ids=token_type_ids,\n position_ids=position_ids,\n lengths=lengths,\n cache=cache,\n head_mask=head_mask,\n inputs_embeds=inputs_embeds,\n )\n\n #output = self.dropout(output)\n output = transformer_outputs[0]\n vec = output[:,0]\n \n \n #logits\n dense = self.dropout(vec)\n \n #classifier\n logits = self.classifier(dense)\n \n outputs = (logits,) + transformer_outputs[1:] # Keep new_mems and attention/hidden states if they are here\n \n \n return outputs,dense\n",
"_____no_output_____"
]
],
[
[
"### Image data prep",
"_____no_output_____"
]
],
[
[
"catalog_eng= pd.read_csv(\"/kaggle/input/textphase1/data/catalog_english_taxonomy.tsv\",sep=\"\\t\")\nX_train= pd.read_csv(\"/kaggle/input/textphase1/data/X_train.tsv\",sep=\"\\t\")\nY_train= pd.read_csv(\"/kaggle/input/textphase1/data/Y_train.tsv\",sep=\"\\t\")\nX_test=pd.read_csv(\"/kaggle/input/textphase1/data/x_test_task1_phase1.tsv\",sep=\"\\t\")\ndict_code_to_id = {}\ndict_id_to_code={}\nlist_tags = list(Y_train['Prdtypecode'].unique())\n\nfor i,tag in enumerate(list_tags):\n dict_code_to_id[tag] = i \n dict_id_to_code[i]=tag\nY_train['labels']=Y_train['Prdtypecode'].map(dict_code_to_id)\ntrain=pd.merge(left=X_train,right=Y_train,\n how='left',left_on=['Integer_id','Image_id','Product_id'],\n right_on=['Integer_id','Image_id','Product_id'])\nprod_map=pd.Series(catalog_eng['Top level category'].values,index=catalog_eng['Prdtypecode']).to_dict()\ntrain['product']=train['Prdtypecode'].map(prod_map)\n\ndef get_img_path(img_id,prd_id,path):\n \n pattern = 'image'+'_'+str(img_id)+'_'+'product'+'_'+str(prd_id)+'.jpg'\n return path + pattern\ntrain_img = train[['Image_id','Product_id','labels','product']]\n\ntrain_img['image_path']=train_img.progress_apply(lambda x: get_img_path(x['Image_id'],x['Product_id'],\n path = '/kaggle/input/imagetrain/image_training/'),axis=1)\nX_test['image_path']=X_test.progress_apply(lambda x: get_img_path(x['Image_id'],x['Product_id'],\n path='/kaggle/input/imagetest/image_test/image_test_task1_phase1/'),axis=1)\ntrain_df, val_df, _, _ = train_test_split(train_img, train_img['labels'],random_state=2020, test_size = 0.1, stratify=train_img['labels'])",
"_____no_output_____"
],
[
"input_size = 224 # for Resnt\n# Applying Transforms to the Data\nfrom torchvision import datasets, models, transforms\n\nimage_transforms = { \n 'train': transforms.Compose([\n transforms.RandomResizedCrop(size=256, scale=(0.8, 1.0)),\n transforms.RandomRotation(degrees=15),\n transforms.RandomHorizontalFlip(),\n transforms.Resize(size=256),\n transforms.CenterCrop(size=input_size),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406],\n [0.229, 0.224, 0.225])\n ]),\n 'valid': transforms.Compose([\n transforms.Resize(size=256),\n transforms.CenterCrop(size=input_size),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406],\n [0.229, 0.224, 0.225])\n ]),\n 'test': transforms.Compose([\n transforms.Resize(size=256),\n transforms.CenterCrop(size=input_size),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406],\n [0.229, 0.224, 0.225])\n ])\n}",
"_____no_output_____"
],
[
"from torch.utils.data import Dataset, DataLoader, Subset\nimport cv2\nfrom PIL import Image\n\nclass FusionDataset(Dataset):\n \n def __init__(self,df,inputs_cam,masks_cam,inputs_flau,masks_flau,transform=None,mode='train'):\n self.df = df\n self.transform=transform\n self.mode=mode\n self.inputs_cam=inputs_cam\n self.masks_cam=masks_cam\n self.inputs_flau=inputs_flau\n self.masks_flau=masks_flau\n \n def __len__(self):\n return len(self.df)\n \n def __getitem__(self,idx):\n \n im_path = self.df.iloc[idx]['image_path']\n img = cv2.imread(im_path)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img=Image.fromarray(img)\n if self.transform is not None:\n img = self.transform(img)\n img=img.cuda()\n input_id_cam=self.inputs_cam[idx].cuda()\n input_mask_cam=self.masks_cam[idx].cuda()\n input_id_flau=self.inputs_flau[idx].cuda()\n input_mask_flau=self.masks_flau[idx].cuda()\n \n if self.mode=='test':\n return img,input_id_cam,input_mask_cam,input_id_flau,input_mask_flau\n else:\n# labels = torch.tensor(self.df.iloc[idx]['labels'])\n labels = torch.tensor(self.df.iloc[idx]['labels']).cuda() \n\n return img,input_id_cam,input_mask_cam,input_id_flau,input_mask_flau,labels",
"_____no_output_____"
],
[
"a1 = torch.randn(3,10,10)\n",
"_____no_output_____"
],
[
"reduce_dim=nn.Conv1d(in_channels = 10 , out_channels = 1 , kernel_size= 1)",
"_____no_output_____"
],
[
"reduce_dim(a1).view(3,10).shape",
"_____no_output_____"
],
[
"class vector_fusion(nn.Module):\n \n def __init__(self):\n super(vector_fusion, self).__init__()\n self.img_model = SEResnext50_32x4d(pretrained=None)\n self.img_model.load_state_dict(torch.load('../input/seresnext2048/best_model.pt'))\n self.img_model.l0=Identity()\n for params in self.img_model.parameters():\n params.requires_grad=False\n\n self.cam_model= vec_output_CamembertForSequenceClassification.from_pretrained(\n 'camembert-base', # Use the 12-layer BERT model, with an uncased vocab.\n num_labels = len(Preprocess.dict_code_to_id), # The number of output labels--2 for binary classification.\n # You can increase this for multi-class tasks. \n output_attentions = False, # Whether the model returns attentions weights.\n output_hidden_states = False,) # Whether the model returns all hidden-states.\n \n \n cam_model_path = '../input/camembert-vec-256m768-10ep/best_model.pt'\n checkpoint = torch.load(cam_model_path)\n # model = checkpoint['model']\n self.cam_model.load_state_dict(checkpoint)\n for param in self.cam_model.parameters():\n param.requires_grad=False\n self.cam_model.out_proj=Identity()\n \n self.flau_model=vec_output_FlaubertForSequenceClassification.from_pretrained(\n 'flaubert/flaubert_base_cased', \n num_labels = len(Preprocess.dict_code_to_id), \n output_attentions = False,\n output_hidden_states = False,)\n flau_model_path='../input/flaubert-8933/best_model.pt'\n checkpoint = torch.load(flau_model_path)\n self.flau_model.load_state_dict(checkpoint)\n for param in self.flau_model.parameters():\n param.requires_grad=False\n self.flau_model.classifier=Identity()\n \n\n self.reduce_dim=nn.Conv1d(in_channels = 2048 , out_channels = 768 , kernel_size= 1)\n self.reduce_dim2=nn.Conv1d(in_channels = 768 , out_channels = 1 , kernel_size= 1)\n self.out=nn.Linear(768*3, 27)\n \n #gamma\n# self.w1 = nn.Parameter(torch.zeros(1))\n# self.w2 = nn.Parameter(torch.zeros(1))\n# self.w3 = nn.Parameter(torch.zeros(1))\n \n def forward(self,img,input_id_cam,input_mask_cam,input_id_flau,input_mask_flau):\n \n cam_emb,vec1 =self.cam_model(input_id_cam, \n token_type_ids=None, \n attention_mask=input_mask_cam)\n flau_emb,vec2 =self.flau_model(input_id_flau, \n token_type_ids=None, \n attention_mask=input_mask_flau)\n \n #Projecting the image embedding to lower dimension\n img_emb=self.img_model(img)\n img_emb=img_emb.view(img_emb.shape[0],img_emb.shape[1],1)\n img_emb=self.reduce_dim(img_emb)\n img_emb=img_emb.view(img_emb.shape[0],img_emb.shape[1]) ###### bs * 768 \n \n #summing up the vectors\n #text_emb = cam_emb[0] + flau_emb[0]\n \n #Bilinear\n #text_emb = text_emb.view(text_emb.shape[0],1,text_emb.shape[1]) ##### bs * 1 * 768\n \n #Bilinear Pooling\n #pool_emb = torch.bmm(img_emb,text_emb) ### bs * 768 * 768\n #pool_emb = self.reduce_dim2(pool_emb).view(text_emb.shape[0],768) #### bs * 1 * 768\n fuse= torch.cat([img_emb,cam_emb[0],flau_emb[0]],axis=1)\n \n logits=self.out(fuse)\n return logits\n \n \n ",
"_____no_output_____"
],
[
"model=vector_fusion()",
"_____no_output_____"
],
[
"model.cuda()",
"_____no_output_____"
],
[
"train_dataset=FusionDataset(train_df,tr_inputs_cam,tr_masks_cam,tr_inputs_flau,tr_masks_flau,transform=image_transforms['test'])\nval_dataset=FusionDataset(val_df,val_inputs_cam,val_masks_cam,val_inputs_flau,val_masks_flau,transform=image_transforms['test'])\n# test_dataset=FusionDataset(X_test,test_inputs,test_makss,transform=image_transforms['test'],mode='test')",
"_____no_output_____"
],
[
"batch_size=64\ntrain_dataloader=DataLoader(train_dataset,batch_size=batch_size,shuffle=True)\nvalidation_dataloader=DataLoader(val_dataset,batch_size=batch_size,shuffle=False)\n# test_data=DataLoader(test_dataset,batch_size=batch_size,shuffle=False)",
"_____no_output_____"
],
[
"optimizer = AdamW(model.parameters(),\n lr = 2e-5, # args.learning_rate - default is 5e-5, our notebook had 2e-5\n eps = 1e-8 # args.adam_epsilon - default is 1e-8.\n )",
"_____no_output_____"
],
[
"def count_parameters(model):\n return sum(p.numel() for p in model.parameters() if p.requires_grad)\ncount_parameters(model)",
"_____no_output_____"
],
[
"from transformers import get_linear_schedule_with_warmup\n\n# Number of training epochs. The BERT authors recommend between 2 and 4. \n# We chose to run for 4, but we'll see later that this may be over-fitting the\n# training data.\nepochs = 3\n\n# Total number of training steps is [number of batches] x [number of epochs]. \n# (Note that this is not the same as the number of training samples).\ntotal_steps = len(train_dataloader) * epochs\n\n# Create the learning rate scheduler.\nscheduler = get_linear_schedule_with_warmup(optimizer, \n num_warmup_steps = 0, # Default value in run_glue.py\n num_training_steps = total_steps)",
"_____no_output_____"
],
[
"import torch.nn as nn\nloss_criterion = nn.CrossEntropyLoss()",
"_____no_output_____"
],
[
"def flat_accuracy(preds, labels):\n pred_flat = np.argmax(preds, axis=1).flatten()\n labels_flat = labels.flatten()\n return np.sum(pred_flat == labels_flat) / len(labels_flat)",
"_____no_output_____"
],
[
"from sklearn.metrics import f1_score\n\nseed_val = 42\n\nrandom.seed(seed_val)\nnp.random.seed(seed_val)\ntorch.manual_seed(seed_val)\ntorch.cuda.manual_seed_all(seed_val)\n\n# We'll store a number of quantities such as training and validation loss, \n# validation accuracy, and timings.\ntraining_stats = []\n\n# Measure the total training time for the whole run.\ntotal_t0 = time.time()\n\n\n# For each epoch...\nfor epoch_i in range(0, epochs):\n \n # ========================================\n # Training\n # ========================================\n \n # Perform one full pass over the training set.\n\n print(\"\")\n print('======== Epoch {:} / {:} ========'.format(epoch_i + 1, epochs))\n print('Training...')\n \n #tr and val\n# vec_output_tr = []\n# vec_output_val =[]\n\n # Measure how long the training epoch takes.\n t0 = time.time()\n\n # Reset the total loss for this epoch.\n total_train_loss = 0\n\n # Put the model into training mode. Don't be mislead--the call to \n # `train` just changes the *mode*, it doesn't *perform* the training.\n # `dropout` and `batchnorm` layers behave differently during training\n # vs. test (source: https://stackoverflow.com/questions/51433378/what-does-model-train-do-in-pytorch)\n best_f1 = 0\n model.train()\n\n # For each batch of training data...\n for step, batch in tqdm(enumerate(train_dataloader)):\n \n # Unpack this training batch from our dataloader. \n #\n \n # As we unpack the batch, we'll also copy each tensor to the GPU using the \n # `to` method.\n #\n # `batch` contains three pytorch tensors:\n # [0]: input ids \n # [1]: attention masks\n # [2]: labels \n# return img,input_id_cam,input_mask_cam,input_id_flau,input_mask_flau\n\n b_img=batch[0].to(device)\n\n b_input_id_cam = batch[1].to(device)\n b_input_mask_cam = batch[2].to(device)\n b_input_id_flau = batch[3].to(device)\n b_input_mask_flau = batch[4].to(device)\n\n b_labels = batch[5].to(device)\n \n \n model.zero_grad() \n\n \n logits = model(b_img,b_input_id_cam ,b_input_mask_cam,b_input_id_flau,b_input_mask_flau)\n \n #Defining the loss\n loss = loss_criterion(logits, b_labels)\n \n #saving the features_tr\n# vec = vec.detach().cpu().numpy()\n# vec_output_tr.extend(vec)\n \n # Accumulate the training loss over all of the batches so that we can\n # calculate the average loss at the end. `loss` is a Tensor containing a\n # single value; the `.item()` function just returns the Python value \n # from the tensor.\n total_train_loss += loss.item()\n\n # Perform a backward pass to calculate the gradients.\n loss.backward()\n\n # Clip the norm of the gradients to 1.0.\n # This is to help prevent the \"exploding gradients\" problem.\n torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n\n # Update parameters and take a step using the computed gradient.\n # The optimizer dictates the \"update rule\"--how the parameters are\n # modified based on their gradients, the learning rate, etc.\n optimizer.step()\n\n # Update the learning rate.\n scheduler.step()\n \n \n \n\n # Calculate the average loss over all of the batches.\n avg_train_loss = total_train_loss / len(train_dataloader) \n \n # Measure how long this epoch took.\n training_time = format_time(time.time() - t0)\n\n print(\"\")\n print(\" Average training loss: {0:.2f} \".format(avg_train_loss))\n print(\" Training epcoh took: {:} \".format(training_time))\n \n # ========================================\n # Validation\n # ========================================\n # After the completion of each training epoch, measure our performance on\n # our validation set.\n\n print(\"\")\n print(\"Running Validation...\")\n\n t0 = time.time()\n\n # Put the model in evaluation mode--the dropout layers behave differently\n # during evaluation.\n model.eval()\n\n # Tracking variables \n total_eval_accuracy = 0\n total_eval_loss = 0\n nb_eval_steps = 0\n predictions=[]\n true_labels=[]\n \n\n # Evaluate data for one epoch\n for batch in tqdm(validation_dataloader):\n \n # Unpack this training batch from our dataloader. \n #\n # As we unpack the batch, we'll also copy each tensor to the GPU using \n # the `to` method.\n #\n # `batch` contains three pytorch tensors:\n # [0]: input ids \n # [1]: attention masks\n # [2]: labels \n b_img=batch[0].to(device)\n\n b_input_id_cam = batch[1].to(device)\n b_input_mask_cam = batch[2].to(device)\n b_input_id_flau = batch[3].to(device)\n b_input_mask_flau = batch[4].to(device)\n\n b_labels = batch[5].to(device)\n \n \n # Tell pytorch not to bother with constructing the compute graph during\n # the forward pass, since this is only needed for backprop (training).\n with torch.no_grad(): \n \n\n # Forward pass, calculate logit predictions.\n # token_type_ids is the same as the \"segment ids\", which \n # differentiates sentence 1 and 2 in 2-sentence tasks.\n # The documentation for this `model` function is here: \n # https://huggingface.co/transformers/v2.2.0/model_doc/bert.html#transformers.BertForSequenceClassification\n # Get the \"logits\" output by the model. The \"logits\" are the output\n # values prior to applying an activation function like the softmax.\n logits = model(b_img,b_input_id_cam ,b_input_mask_cam,b_input_id_flau,b_input_mask_flau)\n \n #new\n \n #defining the val loss\n loss = loss_criterion(logits, b_labels)\n \n \n # Accumulate the validation loss.\n total_eval_loss += loss.item()\n\n # Move logits and labels to CPU\n logits = logits.detach().cpu().numpy()\n\n # Move logits and labels to CPU\n predicted_labels=np.argmax(logits,axis=1)\n predictions.extend(predicted_labels)\n label_ids = b_labels.to('cpu').numpy()\n true_labels.extend(label_ids)\n \n #saving the features_tr\n# vec = vec.detach().cpu().numpy()\n# vec_output_val.extend(vec)\n \n\n # Calculate the accuracy for this batch of test sentences, and\n # accumulate it over all batches.\n total_eval_accuracy += flat_accuracy(logits, label_ids)\n \n\n # Report the final accuracy for this validation run.\n avg_val_accuracy = total_eval_accuracy / len(validation_dataloader)\n print(\" Accuracy: {0:.2f}\".format(avg_val_accuracy))\n\n # Calculate the average loss over all of the batches.\n avg_val_loss = total_eval_loss / len(validation_dataloader)\n \n # Measure how long the validation run took.\n validation_time = format_time(time.time() - t0)\n \n print(\" Validation Loss: {0:.2f}\".format(avg_val_loss))\n print(\" Validation took: {:}\".format(validation_time))\n print(\"Validation F1-Score: {}\".format(f1_score(true_labels,predictions,average='macro')))\n curr_f1=f1_score(true_labels,predictions,average='macro')\n if curr_f1 > best_f1:\n best_f1=curr_f1\n torch.save(model.state_dict(), 'best_model.pt')\n# np.save('best_vec_train_model_train.npy',vec_output_tr)\n# np.save('best_vec_val.npy',vec_output_val)\n \n # Record all statistics from this epoch.\n# training_stats.append(\n# {\n# 'epoch': epoch_i + 1,\n# 'Training Loss': avg_train_loss,\n# 'Valid. Loss': avg_val_loss,\n# 'Valid. Accur.': avg_val_accuracy,\n# 'Training Time': training_time,\n# 'Validation Time': validation_time\n# }\n# )\n\nprint(\"\")\nprint(\"Training complete!\")\n\nprint(\"Total training took {:} (h:mm:ss)\".format(format_time(time.time()-total_t0)))",
"_____no_output_____"
],
[
"from sklearn.metrics import f1_score\nprint(\"Validation F1-Score: {}\".format(f1_score(true_labels,predictions,average='macro')))",
"Validation F1-Score: 0.9093939540881769\n"
]
]
] | [
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d099a382f3a1036ba077ed301be79a8a3108f0f6 | 292,446 | ipynb | Jupyter Notebook | lectures/l15-optimization-part1.ipynb | davidd-55/cs152fa21 | de0876195d8da74909416aef3dece1848179b777 | [
"CC0-1.0"
] | 1 | 2021-12-28T04:11:41.000Z | 2021-12-28T04:11:41.000Z | lectures/l15-optimization-part1.ipynb | davidd-55/cs152fa21 | de0876195d8da74909416aef3dece1848179b777 | [
"CC0-1.0"
] | null | null | null | lectures/l15-optimization-part1.ipynb | davidd-55/cs152fa21 | de0876195d8da74909416aef3dece1848179b777 | [
"CC0-1.0"
] | 6 | 2021-09-16T17:12:35.000Z | 2021-09-28T22:00:52.000Z | 570.070175 | 53,996 | 0.940416 | [
[
[
"# Optimization\n\nThings to try:\n\n- change the number of samples\n- without and without bias\n- with and without regularization\n- changing the number of layers\n- changing the amount of noise\n- change number of degrees\n- look at parameter values (high) in OLS\n- tarin network for many epochs",
"_____no_output_____"
]
],
[
[
"from fastprogress.fastprogress import progress_bar\n\nimport torch\n\nimport matplotlib.pyplot as plt\nfrom jupyterthemes import jtplot\n\njtplot.style(context=\"talk\")",
"_____no_output_____"
],
[
"def plot_regression_data(model=None, MSE=None, poly_deg=0):\n\n # Plot the noisy scatter points and the \"true\" function\n plt.scatter(x_train, y_train, label=\"Noisy Samples\")\n plt.plot(x_true, y_true, \"--\", label=\"True Function\")\n\n # Plot the model's learned regression function\n if model:\n x = x_true.unsqueeze(-1)\n x = x.pow(torch.arange(poly_deg + 1)) if poly_deg else x\n\n with torch.no_grad():\n yhat = model(x)\n\n plt.plot(x_true, yhat, label=\"Learned Function\")\n\n plt.xlim([min_x, max_x])\n plt.ylim([-5, 5])\n plt.legend()\n if MSE:\n plt.title(f\"MSE = ${MSE}$\")",
"_____no_output_____"
]
],
[
[
"# Create Fake Training Data",
"_____no_output_____"
]
],
[
[
"def fake_y(x, add_noise=False):\n y = 10 * x ** 3 - 5 * x\n return y + torch.randn_like(y) * 0.5 if add_noise else y\n\n\nN = 20\nmin_x, max_x = -1, 1\n\nx_true = torch.linspace(min_x, max_x, 100)\ny_true = fake_y(x_true)\n\nx_train = torch.rand(N) * (max_x - min_x) + min_x\ny_train = fake_y(x_train, add_noise=True)\n\nplot_regression_data()",
"_____no_output_____"
]
],
[
[
"# Train A Simple Linear Model Using Batch GD",
"_____no_output_____"
]
],
[
[
"# Hyperparameters\nlearning_rate = 0.1\nnum_epochs = 100\n\n# Model parameters\nm = torch.randn(1, requires_grad=True)\nb = torch.zeros(1, requires_grad=True)\n\nparams = (b, m)\n\n# Torch utils\ncriterion = torch.nn.MSELoss()\noptimizer = torch.optim.SGD(params, lr=learning_rate)\n\n# Regression\nfor epoch in range(num_epochs):\n # Model\n yhat = m * x_train + b\n\n # Update parameters\n optimizer.zero_grad()\n loss = criterion(yhat, y_train)\n loss.backward()\n optimizer.step()\n\nplot_regression_data(lambda x: m * x + b, MSE=loss.item())",
"_____no_output_____"
]
],
[
[
"# Train Linear Regression Model Using Batch GD",
"_____no_output_____"
]
],
[
[
"# Hyperparameters\nlearning_rate = 0.1\nnum_epochs = 1000\n\n# Model parameters\nw2 = torch.randn(1, requires_grad=True)\nw1 = torch.randn(1, requires_grad=True)\nb = torch.zeros(1, requires_grad=True)\n\nparams = (b, w1, w2)\n\n# Torch utils\ncriterion = torch.nn.MSELoss()\noptimizer = torch.optim.SGD(params, lr=learning_rate)\n\n# Regression\nfor epoch in range(num_epochs):\n # Model\n yhat = b + w1 * x_train + w2 * x_train ** 2\n\n # Update parameters\n optimizer.zero_grad()\n loss = criterion(yhat, y_train)\n loss.backward()\n optimizer.step()\n\nplot_regression_data(lambda x: b + w1 * x + w2 * x ** 2, MSE=loss.item())",
"_____no_output_____"
]
],
[
[
"# Train Complex Linear Regression Model Using Batch GD",
"_____no_output_____"
]
],
[
[
"# Hyperparameters\nlearning_rate = 0.1\nnum_epochs = 1000\n\n# Model parameters\ndegrees = 50 # 3, 4, 16, 32, 64, 128\npowers = torch.arange(degrees + 1)\nx_poly = x_train.unsqueeze(-1).pow(powers)\nparams = torch.randn(degrees + 1, requires_grad=True)\n\n# Torch utils\ncriterion = torch.nn.MSELoss()\noptimizer = torch.optim.SGD([params], lr=learning_rate)\n\n# Regression\nfor epoch in range(num_epochs):\n # Model\n yhat = x_poly @ params\n\n # Update parameters\n optimizer.zero_grad()\n loss = criterion(yhat, y_train)\n loss.backward()\n optimizer.step()\n\nplot_regression_data(lambda x: x @ params, poly_deg=degrees, MSE=loss.item())",
"_____no_output_____"
],
[
"params",
"_____no_output_____"
]
],
[
[
"# Compute Linear Regression Model Using Ordinary Least Squares",
"_____no_output_____"
]
],
[
[
"params = ((x_poly.T @ x_poly).inverse() @ x_poly.T) @ y_train\nmse = torch.nn.functional.mse_loss(x_poly @ params, y_train)\nplot_regression_data(lambda x: x @ params, poly_deg=degrees, MSE=mse)\n# params",
"_____no_output_____"
],
[
"params",
"_____no_output_____"
]
],
[
[
"# Train Neural Network Model Using Batch GD",
"_____no_output_____"
]
],
[
[
"# Hyperparameters\nlearning_rate = 0.01\nnum_epochs = 100000\nregularization = 1e-2\n\n# Model parameters\nmodel = torch.nn.Sequential(\n torch.nn.Linear(1, 100),\n torch.nn.ReLU(),\n torch.nn.Linear(100, 100),\n torch.nn.ReLU(),\n torch.nn.Linear(100, 100),\n torch.nn.ReLU(),\n torch.nn.Linear(100, 1),\n)\n\n# Torch utils\ncriterion = torch.nn.MSELoss()\noptimizer = torch.optim.SGD(\n model.parameters(), lr=learning_rate, weight_decay=regularization\n)\n\n# Training\nfor epoch in progress_bar(range(num_epochs)):\n # Model\n yhat = model(x_train.unsqueeze(-1))\n\n # Update parameters\n optimizer.zero_grad()\n loss = criterion(yhat.squeeze(), y_train)\n loss.backward()\n optimizer.step()\n\nplot_regression_data(model, loss.item())",
"_____no_output_____"
],
[
"for param in model.parameters():\n print(param.mean())",
"tensor(-0.0212, grad_fn=<MeanBackward0>)\ntensor(0.0280, grad_fn=<MeanBackward0>)\ntensor(-0.0008, grad_fn=<MeanBackward0>)\ntensor(-0.0142, grad_fn=<MeanBackward0>)\ntensor(0.0008, grad_fn=<MeanBackward0>)\ntensor(-0.0032, grad_fn=<MeanBackward0>)\ntensor(0.0359, grad_fn=<MeanBackward0>)\ntensor(-0.1043, grad_fn=<MeanBackward0>)\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d099b0c1c3a26033d67e28109b39c8ae915fcf69 | 24,547 | ipynb | Jupyter Notebook | Loo Boys/PythonNotebooks/betting.ipynb | ushnishray/Hackathon2021 | 2f9f74033ca6b0b54b44f393c3ef3de9910a5cd9 | [
"MIT"
] | 18 | 2021-07-26T13:45:16.000Z | 2022-01-26T09:15:23.000Z | Loo Boys/PythonNotebooks/betting.ipynb | maniashish3/Hackathon2021 | 96980da7d4aa04826874d961d81d17da6f47b0f5 | [
"MIT"
] | 1 | 2021-07-26T19:33:30.000Z | 2021-07-28T08:32:20.000Z | Loo Boys/PythonNotebooks/betting.ipynb | maniashish3/Hackathon2021 | 96980da7d4aa04826874d961d81d17da6f47b0f5 | [
"MIT"
] | 35 | 2021-07-26T13:10:40.000Z | 2022-03-31T05:23:48.000Z | 32.003911 | 212 | 0.437976 | [
[
[
"## Hybrid Neural Net to solve Regression Problem",
"_____no_output_____"
],
[
"We use a neural net with a quantum layer to predict the second half betting lines given the result of the first half and the opening line. The quantum layer is an 8 qubit layer and the model is from Keras. ",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\nimport tensorflow as tf\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.model_selection import train_test_split\nimport pennylane as qml\nimport warnings\nwarnings.filterwarnings('ignore')\ntf.keras.backend.set_floatx('float64')\n\nimport warnings\nwarnings.filterwarnings('ignore')",
"_____no_output_____"
],
[
"###predict 2nd half line using 1st half total and open ##\n\ndf1 = pd.read_csv(\"nfl_odds.csv\")\ndf1['1H'] = df1['1st'] + df1['2nd']\n\ndf2 = pd.read_csv('bet.csv')\ndf = df1.merge(df2, left_on = 'Team', right_on = 'Tm')\n\ndf = df[['1H','Open', 'TO%','PF','Yds','ML', '2H']]\ndf.head()",
"_____no_output_____"
],
[
"n_qubits = 8\ndev = qml.device(\"default.qubit\", wires=n_qubits)\n\[email protected](dev)\ndef qnode(inputs, weights):\n qml.templates.AngleEmbedding(inputs, wires=range(n_qubits))\n qml.templates.BasicEntanglerLayers(weights, wires=range(n_qubits))\n return [qml.expval(qml.PauliZ(wires=i)) for i in range(n_qubits)]\n\nn_layers = 4\nweight_shapes = {\"weights\": (n_layers, n_qubits)}\nqlayer = qml.qnn.KerasLayer(qnode, weight_shapes, output_dim=n_qubits)",
"_____no_output_____"
],
[
"clayer_1 = tf.keras.layers.Dense(8, activation=\"relu\")\nclayer_2 = tf.keras.layers.Dense(2, activation=\"relu\")\nmodel = tf.keras.models.Sequential([clayer_1, qlayer, clayer_2])",
"_____no_output_____"
],
[
"opt = tf.keras.optimizers.SGD(learning_rate=0.2)\nmodel.compile(opt, loss=\"mae\", metrics=[\"mean_absolute_error\"])",
"_____no_output_____"
],
[
"df = df[df.Open != 'pk']\ndf = df[df['2H'] != 'pk']\ndf['Open'] = df['Open'].astype(float)\ndf['2H'] = df['2H'].astype(float)\nX = df[['1H','Open','TO%','PF','Yds','ML']]\ny = df['2H']\n\nX = np.asarray(X).astype(np.float32)\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=0)\n\nscaler = MinMaxScaler(feature_range = (0,1))\nscaler.fit(X_train)\nX_train = scaler.transform(X_train)",
"_____no_output_____"
],
[
"fitting = model.fit(X_train, y_train, epochs=10, batch_size=5, validation_split=0.15, verbose=2)",
"2021-07-27 19:36:13.392982: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:116] None of the MLIR optimization passes are enabled (registered 2)\n"
],
[
"X_test = scaler.transform(X_test)\npreds = model.predict(X_test)",
"_____no_output_____"
],
[
"pred = pd.DataFrame(preds, columns =[ 'prediction1', 'prediction2'])\npred = pred[(pred.prediction1 > 0) & (pred.prediction1 < 30)]\ny_test = y_test.reset_index()",
"_____no_output_____"
],
[
"y_test = y_test[y_test['2H'] > 6]\n\ncompare = pd.concat([pred, y_test], axis=1)\ncompare = compare.drop('index', axis=1)",
"_____no_output_____"
],
[
"compare.dropna()",
"_____no_output_____"
]
],
[
[
"## Classical NN (Benchmarking)",
"_____no_output_____"
],
[
"The MAE is twice as large for the purely classical NN. The quantum layer is helping the solution converge more quickly! (As an aside, the quantum NN takes alot longer to run)",
"_____no_output_____"
]
],
[
[
"clayer_1 = tf.keras.layers.Dense(8, activation=\"relu\")\nclayer_2 = tf.keras.layers.Dense(2, activation=\"relu\")\nmodel = tf.keras.models.Sequential([clayer_1, clayer_2])",
"_____no_output_____"
],
[
"opt = tf.keras.optimizers.SGD(learning_rate=0.2)\nmodel.compile(opt, loss=\"mae\", metrics=[\"mean_absolute_error\"])",
"_____no_output_____"
],
[
"df = df[df.Open != 'pk']\ndf = df[df['2H'] != 'pk']\ndf['Open'] = df['Open'].astype(float)\ndf['2H'] = df['2H'].astype(float)\nX = df[['1H','Open','TO%','PF','Yds','ML']]\ny = df['2H']\n\nX = np.asarray(X).astype(np.float32)\n\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=0)\n\nscaler = MinMaxScaler(feature_range = (0,1))\nscaler.fit(X_train)\nX_train = scaler.transform(X_train)",
"_____no_output_____"
],
[
"fitting = model.fit(X_train, y_train, epochs=15, batch_size=10, validation_split=0.15, verbose=2)",
"Epoch 1/15\n37/37 - 1s - loss: 10.3219 - mean_absolute_error: 10.3219 - val_loss: 9.8135 - val_mean_absolute_error: 9.8135\nEpoch 2/15\n37/37 - 0s - loss: 10.2575 - mean_absolute_error: 10.2575 - val_loss: 9.8118 - val_mean_absolute_error: 9.8118\nEpoch 3/15\n37/37 - 0s - loss: 10.2742 - mean_absolute_error: 10.2742 - val_loss: 9.8250 - val_mean_absolute_error: 9.8250\nEpoch 4/15\n37/37 - 0s - loss: 10.2460 - mean_absolute_error: 10.2460 - val_loss: 10.3656 - val_mean_absolute_error: 10.3656\nEpoch 5/15\n37/37 - 0s - loss: 10.1914 - mean_absolute_error: 10.1914 - val_loss: 9.8487 - val_mean_absolute_error: 9.8487\nEpoch 6/15\n37/37 - 0s - loss: 10.2714 - mean_absolute_error: 10.2714 - val_loss: 10.2363 - val_mean_absolute_error: 10.2363\nEpoch 7/15\n37/37 - 0s - loss: 10.3317 - mean_absolute_error: 10.3317 - val_loss: 10.0592 - val_mean_absolute_error: 10.0592\nEpoch 8/15\n37/37 - 0s - loss: 10.2152 - mean_absolute_error: 10.2152 - val_loss: 9.8159 - val_mean_absolute_error: 9.8159\nEpoch 9/15\n37/37 - 0s - loss: 10.2130 - mean_absolute_error: 10.2130 - val_loss: 9.8297 - val_mean_absolute_error: 9.8297\nEpoch 10/15\n37/37 - 0s - loss: 10.2410 - mean_absolute_error: 10.2410 - val_loss: 9.8285 - val_mean_absolute_error: 9.8285\nEpoch 11/15\n37/37 - 0s - loss: 10.2607 - mean_absolute_error: 10.2607 - val_loss: 9.8165 - val_mean_absolute_error: 9.8165\nEpoch 12/15\n37/37 - 0s - loss: 10.2595 - mean_absolute_error: 10.2595 - val_loss: 10.1155 - val_mean_absolute_error: 10.1155\nEpoch 13/15\n37/37 - 0s - loss: 10.2000 - mean_absolute_error: 10.2000 - val_loss: 9.9506 - val_mean_absolute_error: 9.9506\nEpoch 14/15\n37/37 - 0s - loss: 10.2299 - mean_absolute_error: 10.2299 - val_loss: 10.1072 - val_mean_absolute_error: 10.1072\nEpoch 15/15\n37/37 - 0s - loss: 10.1927 - mean_absolute_error: 10.1927 - val_loss: 9.8151 - val_mean_absolute_error: 9.8151\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
]
] |
d099b4871dc1923675e358f34a437fa9c61f32c4 | 9,987 | ipynb | Jupyter Notebook | Python Absolute Beginner/Module_3_5_Absolute_Beginner.ipynb | sdavi187/pythonteachingcode | 98cdf3fddaf152854d91187408c7d32a6d36db0e | [
"MIT"
] | null | null | null | Python Absolute Beginner/Module_3_5_Absolute_Beginner.ipynb | sdavi187/pythonteachingcode | 98cdf3fddaf152854d91187408c7d32a6d36db0e | [
"MIT"
] | null | null | null | Python Absolute Beginner/Module_3_5_Absolute_Beginner.ipynb | sdavi187/pythonteachingcode | 98cdf3fddaf152854d91187408c7d32a6d36db0e | [
"MIT"
] | null | null | null | 25.607692 | 635 | 0.502754 | [
[
[
"# 1-5.2 Python Intro \n## conditionals, type, and mathematics extended\n- conditionals: `elif` \n- casting \n- **basic math operators** \n\n-----\n\n><font size=\"5\" color=\"#00A0B2\" face=\"verdana\"> <B>Student will be able to</B></font>\n- code more than two choices using `elif` \n- gather numeric input using type casting \n- **perform subtraction, multiplication and division operations in code** \n",
"_____no_output_____"
],
[
"# \n<font size=\"6\" color=\"#00A0B2\" face=\"verdana\"> <B>Concepts</B></font>\n## Math basic operators\n### `+` addition\n### `-` subtraction\n### `*` multiplication\n### `/` division \n[]( http://edxinteractivepage.blob.core.windows.net/edxpages/f7cff1a7-5601-48a1-95a6-fd1fdfabd20e.html?details=[{\"src\":\"http://jupyternootbookwams.streaming.mediaservices.windows.net/5bc97f7e-3015-4178-ac20-371a5302def1/Unit1_Section5.2-Math-operators.ism/manifest\",\"type\":\"application/vnd.ms-sstr+xml\"}],[{\"src\":\"http://jupyternootbookwams.streaming.mediaservices.windows.net/5bc97f7e-3015-4178-ac20-371a5302def1/Unit1_Section5.2-Math-operators.vtt\",\"srclang\":\"en\",\"kind\":\"subtitles\",\"label\":\"english\"}])",
"_____no_output_____"
],
[
"# \n<font size=\"6\" color=\"#00A0B2\" face=\"verdana\"> <B>Examples</B></font>",
"_____no_output_____"
]
],
[
[
"# [ ] review and run example\nprint(\"3 + 5 =\",3 + 5)\nprint(\"3 + 5 - 9 =\", 3 + 5 - 9)\nprint(\"48/9 =\", 48/9)\nprint(\"5*5 =\", 5*5)\nprint(\"(14 - 8)*(19/4) =\", (14 - 8)*(19/4))",
"3 + 5 = 8\n3 + 5 - 9 = -1\n48/9 = 5.333333333333333\n5*5 = 25\n(14 - 8)*(19/4) = 28.5\n"
],
[
"# [ ] review and run example - 'million_maker'\ndef million_maker():\n make_big = input(\"enter a non-decimal number you wish were bigger: \")\n return int(make_big)*1000000\n\nprint(\"Now you have\", million_maker())\n",
"enter a non-decimal number you wish were bigger: 5\nNow you have 5000000\n"
]
],
[
[
"# \n<font size=\"6\" color=\"#B24C00\" face=\"verdana\"> <B>Task 1</B></font>\n## use math operators to solve the set of tasks below",
"_____no_output_____"
]
],
[
[
"# [ ] print the result of subtracting 15 from 43\n\nprint (43 - 15)",
"28\n"
],
[
"# [ ] print the result of multiplying 15 and 43\n\nprint (15*43)",
"645\n"
],
[
"# [ ] print the result of dividing 156 by 12\nprint (156/12)\n",
"13.0\n"
],
[
"# [ ] print the result of dividing 21 by 0.5\n\nprint (21/0.5)",
"42.0\n"
],
[
"# [ ] print the result of adding 111 plus 84 and then subtracting 45\n\nprint (111+84-45)\n",
"150\n"
],
[
"# [ ] print the result of adding 21 and 4 and then multiplying that sum by 4\n\nprint ((21+4)*4)",
"100\n"
]
],
[
[
"# \n<font size=\"6\" color=\"#B24C00\" face=\"verdana\"> <B>Task 2</B></font>\n## Program: Multiplying Calculator Function\n- define function **`multiply()`**, and within the function:\n - gets user input() of 2 *strings* made of whole numbers\n - cast the input to **`int()`**\n - multiply the integers and **return** the equation with result as a **`str()`**\n - **return** example \n ```python\n 9 * 13 = 117\n ```",
"_____no_output_____"
]
],
[
[
"# [ ] create and test multiply() function\ndef multiply():\n num_1 = input (\"Enter a whole number:\")\n num_2 = input (\"Enter a second whole number:\")\n return (str(int(num_1)*int(num_2)))\n \n\nprint(multiply() + \" is a string\")",
"Enter a whole number:23\nEnter a second whole number:34\n782 is a string\n"
]
],
[
[
"# \n<font size=\"6\" color=\"#B24C00\" face=\"verdana\"> <B>Task 3</B></font>\n## Project: Improved Multiplying Calculator Function\n### putting together conditionals, input casting and math\n- #### update the multiply() function to multiply or divide \n - single parameter is **`operator`** with arguments of **`*`** or **`/`** operator\n - default operator is \"*\" (multiply)\n - **return** the result of multiplication or division\n - if operator other than **`\"*\"`** or **`\"/\"`** then **` return \"Invalid Operator\"`**",
"_____no_output_____"
]
],
[
[
"# [ ] create improved multiply() function and test with /, no argument, and an invalid operator ($)\ndef multiply(operator = \"*\"):\n num_1 = input (\"Enter a whole number:\")\n num_2 = input (\"Enter a second whole number:\")\n \n if operator == \"*\":\n return (str(int(num_1)*int(num_2)))\n elif operator ==\"/\":\n return (str(int(num_1)/int(num_2)))\n else:\n print (\"Corruption occurred\")\n \n\nops = input(\"Would you like to multiply (m) or divide (d)?\" )\n\nif ops == \"m\":\n print (multiply (\"*\"))\nelif ops == \"d\":\n print (multiply (\"/\"))\nelse:\n print (\"Invalid operator\")\n\n\n",
"Would you like to multiply (m) or divide (d)?d\nEnter a whole number:3\nEnter a second whole number:1\n3.0\n"
]
],
[
[
"# \n<font size=\"6\" color=\"#B24C00\" face=\"verdana\"> <B>Task 4</B></font>\n## Fix the Errors",
"_____no_output_____"
]
],
[
[
"# Review, run, fix \nstudent_name = input(\"enter name: \").capitalize()\nif student_name.startswith(\"F\"):\n print(student_name,\"Congratulations, names starting with 'F' get to go first today!\")\nelif student_name.startswith(\"G\"):\n print(student_name,\"Congratulations, names starting with 'G' get to go second today!\")\nelse:\n print(student_name, \"please wait for students with names staring with 'F' and 'G' to go first today.\")\n\n",
"enter name: frank\nFrank Congratulations, names starting with 'F' get to go first today!\n"
]
],
[
[
"[Terms of use](http://go.microsoft.com/fwlink/?LinkID=206977) [Privacy & cookies](https://go.microsoft.com/fwlink/?LinkId=521839) © 2017 Microsoft",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
d099d13e5f0aa2777a6dd5affe9457ddcb43fbfa | 10,856 | ipynb | Jupyter Notebook | 0-newbooks/faceswap-GAN/FaceSwap_GAN_v2_test_img.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | 1 | 2019-05-10T09:16:23.000Z | 2019-05-10T09:16:23.000Z | 0-newbooks/faceswap-GAN/FaceSwap_GAN_v2_test_img.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | null | null | null | 0-newbooks/faceswap-GAN/FaceSwap_GAN_v2_test_img.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | 1 | 2019-05-10T09:17:28.000Z | 2019-05-10T09:17:28.000Z | 27.004975 | 121 | 0.554256 | [
[
[
"<a id='1'></a>\n# 1. Import packages",
"_____no_output_____"
]
],
[
[
"from keras.models import Sequential, Model\nfrom keras.layers import *\nfrom keras.layers.advanced_activations import LeakyReLU\nfrom keras.activations import relu\nfrom keras.initializers import RandomNormal\nfrom keras.applications import *\nimport keras.backend as K\nfrom tensorflow.contrib.distributions import Beta\nimport tensorflow as tf\nfrom keras.optimizers import Adam",
"Using TensorFlow backend.\n"
],
[
"from image_augmentation import random_transform\nfrom image_augmentation import random_warp\nfrom utils import get_image_paths, load_images, stack_images\nfrom pixel_shuffler import PixelShuffler",
"_____no_output_____"
],
[
"import time\nimport numpy as np\nfrom PIL import Image\nimport cv2\nimport glob\nfrom random import randint, shuffle\nfrom IPython.display import clear_output\nfrom IPython.display import display\nimport matplotlib.pyplot as plt\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"<a id='4'></a>\n# 4. Config\n\nmixup paper: https://arxiv.org/abs/1710.09412\n\nDefault training data directories: `./faceA/` and `./faceB/`",
"_____no_output_____"
]
],
[
[
"K.set_learning_phase(1)",
"_____no_output_____"
],
[
"channel_axis=-1\nchannel_first = False",
"_____no_output_____"
],
[
"IMAGE_SHAPE = (64, 64, 3)\nnc_in = 3 # number of input channels of generators\nnc_D_inp = 6 # number of input channels of discriminators\n\nuse_perceptual_loss = False\nuse_lsgan = True\nuse_instancenorm = False\nuse_mixup = True\nmixup_alpha = 0.2 # 0.2\n\nbatchSize = 32\nlrD = 1e-4 # Discriminator learning rate\nlrG = 1e-4 # Generator learning rate\n\n# Path of training images\nimg_dirA = './faceA/*.*'\nimg_dirB = './faceB/*.*'",
"_____no_output_____"
]
],
[
[
"<a id='5'></a>\n# 5. Define models",
"_____no_output_____"
]
],
[
[
"from model_GAN_v2 import *",
"_____no_output_____"
],
[
"encoder = Encoder()\ndecoder_A = Decoder_ps()\ndecoder_B = Decoder_ps()\n\nx = Input(shape=IMAGE_SHAPE)\n\nnetGA = Model(x, decoder_A(encoder(x)))\nnetGB = Model(x, decoder_B(encoder(x)))",
"_____no_output_____"
],
[
"netDA = Discriminator(nc_D_inp)\nnetDB = Discriminator(nc_D_inp)",
"_____no_output_____"
]
],
[
[
"<a id='6'></a>\n# 6. Load Models",
"_____no_output_____"
]
],
[
[
"try:\n encoder.load_weights(\"models/encoder.h5\")\n decoder_A.load_weights(\"models/decoder_A.h5\")\n decoder_B.load_weights(\"models/decoder_B.h5\")\n #netDA.load_weights(\"models/netDA.h5\") \n #netDB.load_weights(\"models/netDB.h5\") \n print (\"model loaded.\")\nexcept:\n print (\"Weights file not found.\")\n pass",
"model loaded.\n"
]
],
[
[
"<a id='7'></a>\n# 7. Define Inputs/Outputs Variables\n\n distorted_A: A (batch_size, 64, 64, 3) tensor, input of generator_A (netGA).\n distorted_B: A (batch_size, 64, 64, 3) tensor, input of generator_B (netGB).\n fake_A: (batch_size, 64, 64, 3) tensor, output of generator_A (netGA).\n fake_B: (batch_size, 64, 64, 3) tensor, output of generator_B (netGB).\n mask_A: (batch_size, 64, 64, 1) tensor, mask output of generator_A (netGA).\n mask_B: (batch_size, 64, 64, 1) tensor, mask output of generator_B (netGB).\n path_A: A function that takes distorted_A as input and outputs fake_A.\n path_B: A function that takes distorted_B as input and outputs fake_B.\n path_mask_A: A function that takes distorted_A as input and outputs mask_A.\n path_mask_B: A function that takes distorted_B as input and outputs mask_B.\n path_abgr_A: A function that takes distorted_A as input and outputs concat([mask_A, fake_A]).\n path_abgr_B: A function that takes distorted_B as input and outputs concat([mask_B, fake_B]).\n real_A: A (batch_size, 64, 64, 3) tensor, target images for generator_A given input distorted_A.\n real_B: A (batch_size, 64, 64, 3) tensor, target images for generator_B given input distorted_B.",
"_____no_output_____"
]
],
[
[
"def cycle_variables(netG):\n distorted_input = netG.inputs[0]\n fake_output = netG.outputs[0]\n alpha = Lambda(lambda x: x[:,:,:, :1])(fake_output)\n rgb = Lambda(lambda x: x[:,:,:, 1:])(fake_output)\n \n masked_fake_output = alpha * rgb + (1-alpha) * distorted_input \n\n fn_generate = K.function([distorted_input], [masked_fake_output])\n fn_mask = K.function([distorted_input], [concatenate([alpha, alpha, alpha])])\n fn_abgr = K.function([distorted_input], [concatenate([alpha, rgb])])\n return distorted_input, fake_output, alpha, fn_generate, fn_mask, fn_abgr",
"_____no_output_____"
],
[
"distorted_A, fake_A, mask_A, path_A, path_mask_A, path_abgr_A = cycle_variables(netGA)\ndistorted_B, fake_B, mask_B, path_B, path_mask_B, path_abgr_B = cycle_variables(netGB)\nreal_A = Input(shape=IMAGE_SHAPE)\nreal_B = Input(shape=IMAGE_SHAPE)",
"_____no_output_____"
]
],
[
[
"<a id='11'></a>\n# 11. Helper Function: face_swap()\nThis function is provided for those who don't have enough VRAM to run dlib's CNN and GAN model at the same time.\n\n INPUTS:\n img: A RGB face image of any size.\n path_func: a function that is either path_abgr_A or path_abgr_B.\n OUPUTS:\n result_img: A RGB swapped face image after masking.\n result_mask: A single channel uint8 mask image.",
"_____no_output_____"
]
],
[
[
"def swap_face(img, path_func):\n input_size = img.shape\n img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) # generator expects BGR input \n ae_input = cv2.resize(img, (64,64))/255. * 2 - 1 \n \n result = np.squeeze(np.array([path_func([[ae_input]])]))\n result_a = result[:,:,0] * 255\n result_bgr = np.clip( (result[:,:,1:] + 1) * 255 / 2, 0, 255 )\n result_a = np.expand_dims(result_a, axis=2)\n result = (result_a/255 * result_bgr + (1 - result_a/255) * ((ae_input + 1) * 255 / 2)).astype('uint8')\n \n #result = np.clip( (result + 1) * 255 / 2, 0, 255 ).astype('uint8') \n result = cv2.cvtColor(result, cv2.COLOR_BGR2RGB) \n result = cv2.resize(result, (input_size[1],input_size[0]))\n result_a = np.expand_dims(cv2.resize(result_a, (input_size[1],input_size[0])), axis=2)\n return result, result_a",
"_____no_output_____"
],
[
"whom2whom = \"BtoA\" # default trainsforming faceB to faceA\n\nif whom2whom is \"AtoB\":\n path_func = path_abgr_B\nelif whom2whom is \"BtoA\":\n path_func = path_abgr_A\nelse:\n print (\"whom2whom should be either AtoB or BtoA\")",
"_____no_output_____"
],
[
"input_img = plt.imread(\"./IMAGE_FILENAME.jpg\")",
"_____no_output_____"
],
[
"plt.imshow(input_img)",
"_____no_output_____"
],
[
"result_img, result_mask = swap_face(input_img, path_func)",
"_____no_output_____"
],
[
"plt.imshow(result_img)",
"_____no_output_____"
],
[
"plt.imshow(result_mask[:, :, 0]) # cmap='gray'",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d099ec5708875e294d3b64b8bbb26f320e6b358b | 21,629 | ipynb | Jupyter Notebook | TSO-imaging-sims/datalabs-sim/MIRI_im_tso_datalabs.ipynb | STScI-MIRI/TSO-MIRI-simulations | 5f02243ad2669535423121ee8ddbf7452cb42e1c | [
"BSD-3-Clause"
] | null | null | null | TSO-imaging-sims/datalabs-sim/MIRI_im_tso_datalabs.ipynb | STScI-MIRI/TSO-MIRI-simulations | 5f02243ad2669535423121ee8ddbf7452cb42e1c | [
"BSD-3-Clause"
] | 1 | 2021-11-08T22:09:50.000Z | 2021-11-09T12:24:50.000Z | TSO-imaging-sims/datalabs-sim/MIRI_im_tso_datalabs.ipynb | STScI-MIRI/TSO-MIRI-simulations | 5f02243ad2669535423121ee8ddbf7452cb42e1c | [
"BSD-3-Clause"
] | 1 | 2021-11-09T02:42:55.000Z | 2021-11-09T02:42:55.000Z | 69.323718 | 2,069 | 0.662999 | [
[
[
"%load_ext autoreload\n%autoreload",
"_____no_output_____"
],
[
"import numpy as np\nimport matplotlib.pyplot as plt\nimport os\nimport glob\nfrom mirisim.config_parser import SimulatorConfig\nfrom mirisim import MiriSimulation\nimport tso_img_datalabs_sim\nfrom tso_img_datalabs_sim import wasp103_scene, wasp103_sim_config\n\nfrom importlib import reload",
"_____no_output_____"
]
],
[
[
"In this notebook I'm going to generate simulated MIRI time series imaging data, to provide as test set for ESA Datalabs. To install Mirisim, see the [the public release webpage](http://miri.ster.kuleuven.be/bin/view/Public/MIRISim_Public). The target for the mock observations is WASP-103, an exoplanet host star with the following properties from [the exoplanet encyclopaedia](http://exoplanet.eu/catalog/wasp-103_b/):\n\n* spectral type F8V\n* T_bb = 6110 K\n* V = 12.0, K = 10.7\n\nK magnitude of 10.7 corresponds to a flux of 32.5 mJy or 32.5e3 microJy.\n\nUsing the ETC, I calculated the following number of groups for a high-SNR but unsaturated image:\n* FULL array: NGROUPS = 5\n* SUB64 subarray: NGROUPS = 60\n\nWe want to simulate a medium length exposure in both FULL and SUB64 subarras. In total that's 2 simulations.\n\n\n| Sim no | Array | NGroups | NInt | NExp | Exp time |\n| -------|---------| ---------|--------|--------|----------|\n|1 |FULL | 5 | 200 | 1 | 0.77 hr |\n|2 |SUB64 | 60 | 600 | 1 | 0.85 hr |\n\n### Steps in setting up the simulation\n\nThis notebook will go through the following steps:\n\n* Create the scene\n* Set up the simulation\n* Run the simulation\n\nEach step has its own function. Steps 1 and 2 will each write out a .ini file, which will be used as input for the final step.",
"_____no_output_____"
]
],
[
[
"arr = ['FULL', 'SUB64']\nngrp = [5, 60]\n#nints = [200, 600]\nnints = [1, 1]",
"_____no_output_____"
]
],
[
[
"## Step 1: Creating the input scene (WASP-103)\n\nHere we'll create the input scene for the simulations using the function wasp103_scene(). Arguments:\n\n* scene_file: the filename for the .ini file\n* write_cube: write the scene image out to a FITS file (optional; default=False)\n\nThe function returns a mirisim.skysim.scenes.CompositeSkyScene object.\n",
"_____no_output_____"
]
],
[
[
"scene_ini = wasp103_scene(scene_file='wasp103_scene.ini', write_cube=False)",
"2021-02-24 14:04:56,692 - INFO - Initializing Point\n2021-02-24 14:04:56,693 - INFO - Initializing Background\n"
],
[
"print(scene_ini)",
"wasp103_scene.ini\n"
]
],
[
[
"## Step 2: Configuring the simulation\n\nNow I'll set up the simulations and prepare to run them. I'll set it up to loop through the 2 simulations. For this I wrote the function wasp103_sim_config. Check the docstring for descriptions and default values of the arguments. \n\nThe function will write out another .ini file containing the simulation configuration, and it returns the output filename for further use.",
"_____no_output_____"
]
],
[
[
"#reload(tso_img_sims_setup)\n#from tso_img_sims_setup import wasp103_sim_config\n\nfor (a, g, i) in zip(arr, ngrp, nints):\n sim_ini = wasp103_sim_config(mode='imaging', arr=a, ngrp=g, nint=i, nexp=1, filt='F770W', \n scene_file=scene_ini, out=True)\n print(sim_ini)",
"Found scene file wasp103_scene.ini\nwasp103_FULL_5G1I1E_simconfig.ini exists, overwrite (y/[n])?y\nwasp103_FULL_5G1I1E_simconfig.ini\nFound scene file wasp103_scene.ini\nwasp103_SUB64_60G1I1E_simconfig.ini exists, overwrite (y/[n])?y\nwasp103_SUB64_60G1I1E_simconfig.ini\n"
]
],
[
[
"### Step 3: Run the simulation\n\nIn the following step we'll run the simulations for the 6 different cases. For each run, we need 3 input files: the scene, the simulation configuration, and the simulator setup file. The first and last of these remain the same for each run, and we loop through the list of 6 simulation config files.\n\nAfter the simulation has run, the code renames the output directory to include the simulation settings to the directory.\n",
"_____no_output_____"
]
],
[
[
"cfg_files = glob.glob('*_simconfig.ini')\nprint(cfg_files)\n\n",
"['wasp103_FULL_5G1I1E_simconfig.ini', 'wasp103_SUB64_60G1I1E_simconfig.ini']\n"
],
[
"# configure the simulator engine - this requires no editing from the default\nsimulator_config = SimulatorConfig.from_default()\n\nfor f in cfg_files[:1]:\n tmp = f.split('.')\n fcomps = tmp[0].split('_')\n sim = MiriSimulation.from_configfiles(f)\n sim.run()\n outdir = sorted(glob.glob('*_*_mirisim'), key=os.path.getmtime )[-1]\n new_outdir = 'wasp103_imtso_{0}_{1}_{2}'.format(fcomps[1], fcomps[2], outdir)\n os.rename(outdir, new_outdir)\n print(outdir, new_outdir)\n",
"2021-02-24 14:10:04,456 - INFO - Using simulation configuration: wasp103_FULL_5G1I1E_simconfig.ini\n2021-02-24 14:10:04,458 - INFO - Using scene configuration: wasp103_scene.ini\n2021-02-24 14:10:04,460 - INFO - MIRISim version: 2.3.0\n2021-02-24 14:10:04,461 - INFO - MIRI Simulation started.\n2021-02-24 14:10:04,463 - INFO - Output will be saved to: 20210224_141004_mirisim\n2021-02-24 14:10:04,464 - INFO - Storing configs in output directory.\n2021-02-24 14:10:04,467 - INFO - Storing dither pattern in output directory.\n2021-02-24 14:10:04,468 - INFO - Using $CDP_DIR for location of CDP files: /Users/kendrew//CDP_2.3\n2021-02-24 14:10:04,469 - INFO - Setting up simulated Observation, with following settings:\n2021-02-24 14:10:04,470 - INFO - Configuration Path: IMA_FULL\n2021-02-24 14:10:04,471 - INFO - Primary optical path: IMA\n2021-02-24 14:10:04,472 - INFO - IMA Filter: F770W\n2021-02-24 14:10:04,473 - INFO - IMA Subarray: FULL\n2021-02-24 14:10:04,474 - INFO - IMA detector readout mode: FAST\n2021-02-24 14:10:04,475 - INFO - IMA detector # exposures: 1\n2021-02-24 14:10:04,476 - INFO - IMA detector # integrations: 1\n2021-02-24 14:10:04,477 - INFO - IMA detector # frames: 5\n2021-02-24 14:10:04,478 - INFO - Parsing: Background\n2021-02-24 14:10:04,479 - INFO - Initializing Background\n2021-02-24 14:10:04,480 - INFO - Parsing: point_1\n2021-02-24 14:10:04,481 - INFO - Initializing Point\n2021-02-24 14:10:04,481 - INFO - Simulating a single pointing.\n2021-02-24 14:10:04,482 - WARNING - Matching against local CDP cache only.\n2021-02-24 14:10:04,483 - ERROR - The criteria given (DISTORTION, detector=MIRIMAGE) did not match any CDP files.\n2021-02-24 14:10:04,484 - ERROR - No data model could be retrieved.\n"
]
],
[
[
"### Step 3: Minor housekeeping to make the sim pipeline-ready\n\nTo make the MIRISim data ready for the TSO-specific pipeline, we have to make a couple of small changes to the data:\n\n* add the TSOVISIT = TRUE to the primary header\n* make sure the ",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
d099ee47908fe7a94dd1100caaa854284af39f44 | 5,168 | ipynb | Jupyter Notebook | examples/nb_py_melodia_pir_clustering.ipynb | rwmontalvao/Melodia | ff0a21637b976fd89853504a59c86db6e127878f | [
"Apache-2.0"
] | 2 | 2022-03-04T17:03:30.000Z | 2022-03-09T07:21:09.000Z | examples/nb_py_melodia_pir_clustering.ipynb | rwmontalvao/Melodia | ff0a21637b976fd89853504a59c86db6e127878f | [
"Apache-2.0"
] | null | null | null | examples/nb_py_melodia_pir_clustering.ipynb | rwmontalvao/Melodia | ff0a21637b976fd89853504a59c86db6e127878f | [
"Apache-2.0"
] | null | null | null | 28.711111 | 844 | 0.571594 | [
[
[
"# Melodia: A Python Library for Protein Structure and Dynamics Analysis\n\n## Structure Similarity Analysis",
"_____no_output_____"
]
],
[
[
"import dill\nimport warnings\n\nimport melodia as mel\nimport seaborn as sns\n\nfrom os import path\n\nfrom Bio.PDB.PDBExceptions import PDBConstructionWarning",
"_____no_output_____"
],
[
"warnings.filterwarnings(\"ignore\", category=PDBConstructionWarning)",
"_____no_output_____"
]
],
[
[
"### Parser an alignment in the PIR file format",
"_____no_output_____"
]
],
[
[
"# Dill can be used for storage\nif path.exists('model.dill'):\n with open('model.dill', 'rb') as file:\n align = dill.load(file)\nelse:\n align = mel.parser_pir_file('model.ali')\n with open('model.dill', 'wb') as file:\n dill.dump(align, file)",
"_____no_output_____"
],
[
"palette='Dark2'\ncolors=7\nsns.color_palette(palette, colors)",
"_____no_output_____"
],
[
"mel.cluster_alignment(align=align, threshold=1.1, long=True)",
"_____no_output_____"
],
[
"mel.save_align_to_ps(align=align, ps_file='model', palette=palette, colors=colors)",
"_____no_output_____"
],
[
"mel.save_pymol_script(align=align, pml_file='clusters_model', palette=palette, colors=colors)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
d099f5cb0adabc499328e0402e0c488a4e8ac187 | 109,233 | ipynb | Jupyter Notebook | Cluster_Feature_Importance.ipynb | HartmutD/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers | 0fd32b9a9e0ff99e5d90f581cf223e7cd0b8d5d9 | [
"MIT"
] | null | null | null | Cluster_Feature_Importance.ipynb | HartmutD/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers | 0fd32b9a9e0ff99e5d90f581cf223e7cd0b8d5d9 | [
"MIT"
] | null | null | null | Cluster_Feature_Importance.ipynb | HartmutD/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers | 0fd32b9a9e0ff99e5d90f581cf223e7cd0b8d5d9 | [
"MIT"
] | null | null | null | 105.743466 | 30,458 | 0.801617 | [
[
[
"<a href=\"https://colab.research.google.com/github/HartmutD/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Cluster_Feature_Importance.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"# Clustered Feature Importance",
"_____no_output_____"
],
[
"The goal of these notebook is demostrate the Clustered Feature Imporatance, a feature importance method suggested by **Dr. Marcos Lopez de Prado** in the [paper](https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3517595) and the book Machine Learning for Asset Managers. The aim of CFI is to cluster similar features and apply the feature importance analysis at the cluster level. This way clusters are mutually dissimilar and the method is tends tame the substitution effect and by using information theory along we can also reduce the multicollinearity of the dataset.",
"_____no_output_____"
]
],
[
[
"# General Imports \nimport warnings\nwarnings.filterwarnings('ignore')\n\nimport numpy as np\nimport pandas as pd\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom sklearn.metrics import accuracy_score, log_loss\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import BaggingClassifier\nfrom sklearn.model_selection._split import KFold\n\n# Import MlFinLab tools\nimport mlfinlab as ml\nfrom mlfinlab.util.generate_dataset import get_classification_data \nfrom mlfinlab.clustering.feature_clusters import get_feature_clusters\nfrom mlfinlab.cross_validation import ml_cross_val_score\nfrom mlfinlab.feature_importance import (mean_decrease_impurity, mean_decrease_accuracy,\n plot_feature_importance)\nfrom mlfinlab.clustering.onc import get_onc_clusters",
"_____no_output_____"
]
],
[
[
"**Clustered Feature Importance or CFI algorithm can be implemented in a two step process as mentioned in the book.**\n",
"_____no_output_____"
],
[
"## Step - 1 : Features Clustering",
"_____no_output_____"
],
[
"As first step we need to generate the clusters or subsets of features we want to analyse with feature importance methods. This can be done using feature cluster module of mlfinlab. It uses various parameters to generating feature clusters as in the book.",
"_____no_output_____"
],
[
"* The algorithm projects the observed features into a metric space by applying a dependence matric function either correlation based or information theory based. Information-theoretic metrics have the advantage of recognizing redundant features that are the result of nonlinear combinations of informative features (i.e. multicollinearity). \n* Next, we need to determine the optimal number of clusters. The user can either specify the number cluster to use, this will apply a hierarchical clustering on the defined distance matrix of dependence matrix for a given linkage method for clustering, or the user can use the ONC algorithm which uses K-Means clustering, to automate the task of either getting the optimal number of clusters or get both optimal number of clusters and cluster compositions. \n\nBut the *caveat* of these process is that some silhouette scores may be low due one feature being a combination of multiple features across clusters. This is a problem, because ONC cannot assign one feature to multiple clusters. Hence, the following transformation may help reduce the multicollinearity of the system:",
"_____no_output_____"
]
],
[
[
"# Generating a synthetic dataset for testing \n\n# We generate 40 features, 5 informative ('I_') , 30 redudent ('R_') and rest (5) noisy ('N_') features\n# with 10000 rows of samples \n# Redundent features are those which share large amount of information among each other and also with informative features \n# That is the redudent features are those with substitution effect\nX, y = get_classification_data(n_features=40, n_informative=5, n_redundant=30, n_samples=10000, sigma=0.1)",
"_____no_output_____"
],
[
"X.head(3)",
"_____no_output_____"
],
[
"# Now we get the feature clusters\ndep_matrix = 'linear' # Linear correlation base dependence matric\n\n# The n_cluster is set to None for getting the Optimal Number of Clusters using ONC Algorithm\nclusters = get_feature_clusters(X, dependence_metric=dep_matrix, distance_metric=None, linkage_method=None, n_clusters=None)",
"No feature/s found with low silhouette score. All features belongs to its respective clusters\n"
],
[
"clusters",
"_____no_output_____"
]
],
[
[
"As we can see that algorithm have not detected any features with low silhoutte score. So, there is no need replace the features with their residuals*. Now, that we have identified the number clusters (six in this case) and composition of features with in each cluster, we can move to the next step.\n<br> ( *This will be discussed in the later part of this notebook)",
"_____no_output_____"
],
[
"## Step - 2 : Clustered Importance",
"_____no_output_____"
],
[
"Clustered Feature Importance can be implemented by simply passing the feature clusters obtained in Step-1 to the **clustered_subsets** argument of the MDI or MDA feature importance algorithm. We can apply MDI and MDA on groups of similar features, rather than on individual features and obtain the importance of the cluster as whole instead of individual features. This way we can anlayse how mutually dissimilar clusters interact with model and possibly isolate the noisy/non-infomative clusters.",
"_____no_output_____"
]
],
[
[
"# Setup for feature importance algorithm \n# We define a classifier \nclf_base = DecisionTreeClassifier(criterion='entropy', max_features=1, class_weight='balanced', min_weight_fraction_leaf=0)\nclf = BaggingClassifier(base_estimator=clf_base, n_estimators=1000, max_features=1., max_samples=1.,\n oob_score=True, n_jobs=-1)\n\n# Fit the classifier\nfit = clf.fit(X,y)\n\n# Setting up cross-validation generator\n# Use Purged K-Fold generator while using it on real financial dataset to avoid leakage\ncvGen = KFold(n_splits=10)\noos_score = ml_cross_val_score(clf, X, y, cv_gen=cvGen, sample_weight_train=None, scoring=log_loss).mean()",
"_____no_output_____"
]
],
[
[
"### Clustered MDI",
"_____no_output_____"
],
[
"We compute the clustered MDI as the sum of the MDI values of the features that constitute that cluster. If there is one feature per cluster, then MDI and clustered MDI are the same.",
"_____no_output_____"
]
],
[
[
"clustered_mdi_imp = mean_decrease_impurity(clf,X.columns,clustered_subsets=clusters)",
"_____no_output_____"
],
[
"plot_feature_importance(clustered_mdi_imp,oob_score=clf.oob_score_, oos_score=oos_score,\n save_fig=True, output_path='images/clustered_mdi.png')",
"_____no_output_____"
]
],
[
[
"As expected the clusters of non-informative features are given the least importnance and the clusters with redundent and informative features are placed above the noise cluster. This is very usefull for detecting features that are non-informative without the presence of some other features within the same cluster. ",
"_____no_output_____"
],
[
"### Clustered MDA",
"_____no_output_____"
],
[
"As an extension to normal MDA to tackle multi-collinearity and (linear or non-linear) substitution effect. Its implementation was also discussed by Dr. Marcos Lopez de Prado in the Clustered Feature Importance [Presentaion Slides](https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3517595)",
"_____no_output_____"
]
],
[
[
"clustered_mda_imp = mean_decrease_accuracy(clf, X, y, cv_gen=cvGen, clustered_subsets=clusters,\n scoring=log_loss)",
"_____no_output_____"
],
[
"plot_feature_importance(clustered_mda_imp,oob_score=clf.oob_score_, oos_score=oos_score,\n save_fig=True, output_path='images/clustered_mda.png')",
"_____no_output_____"
]
],
[
[
"The clustered MDA has also correctly identified the noisy cluster and placed it below. ",
"_____no_output_____"
],
[
"## The Caveat\nNow that we saw how to implement the CFI with MDI and MDA, we have to discuss the *caveat* of normal ONC algorithm that was mentioned in the Step -1 of this notebook.",
"_____no_output_____"
],
[
"To understand the caveat of the normal ONC algorithm, we need a understanding of how it works. ONC finds the optimal number of clusters as well as the composition of those clusters, where each feature belongs to one and only one cluster. Features that\nbelong to the same cluster share a large amount of information, and features that belong to different clusters share only a relatively small amount of information. \n<br>The consistency composition of the clusters are determined by the [silhouette score](https://en.wikipedia.org/wiki/Silhouette_(clustering)) of the features. The silhouette ranges from −1 to +1, where a high value indicates that the object is well matched to its own cluster and poorly matched to neighboring clusters. So, there may be some features with low silhouette score and this is a problem, because ONC cannot assign one feature to multiple clusters. \n<br>In this case, the following transformation may help reduce the multicollinearity of the system :",
"_____no_output_____"
],
[
"For each cluster $k = 1 . . . K$, replace the features included in that cluster with residual features, so that it do not contain any information outside cluster $k$. That is let $D_{k}$ be the subset of index features $D = {1,...,F}$ included in cluster $k$, where $D_{k}\\subset{D}\\ , ||D_{k}|| > 0 \\ , \\forall{k}\\ ; \\ D_{k} \\bigcap D_{l} = \\Phi\\ , \\forall k \\ne l\\ ; \\bigcup \\limits _{k=1} ^{k} D_{k} = D$ . Then, for a given feature $X_{i}$ where $i \\in D_{k}$, we compute the residual feature $\\hat \\varepsilon _{i}$ by fitting the following equation for regression -\n\n$$X_{n,j} = \\alpha _{i} + \\sum \\limits _{j \\in \\{ \\bigcup _{l<k}\\ D_{l} \\} } \\beta _{i,j} X_{n,j} + \\varepsilon _{n,i}$$\n\nWhere $n = 1,....,N$ is the index of observations per feature. But if the degrees of freedom in the above regression is too low, one option is to use as regressors linear combinations of the features within each cluster by following a minimum variance weighting scheme so that only $K-1$ betas need to be estimated.\nThis transformation is not necessary if the silhouette scores clearly indicate that features belong to their respective clusters.",
"_____no_output_____"
]
],
[
[
"corr0, clstrs, silh = get_onc_clusters(X.corr(), repeat=3)",
"_____no_output_____"
],
[
"plt.figure(figsize=(16,9))\nsns.heatmap(corr0,cmap='viridis');",
"_____no_output_____"
],
[
"silh",
"_____no_output_____"
]
],
[
[
"As we can see there is very low correlation among clusters. Hence, we need not to tranform anything in this dataset. The silhouette score also confirm the same, as there no features have silhouette score below zero.",
"_____no_output_____"
],
[
"Now let us artificially generate a dataset that can introduce features with low silhouette score. Here the sigmaStd argument of get_classification_data will help us to generate a dataset with high substitution effect.",
"_____no_output_____"
]
],
[
[
"# We set the value of sigmaStd to 4 to introduce high substitution effect\nX_, y_ = get_classification_data(n_features=40, n_informative=5, n_redundant=30, n_samples=1000, sigma=5)",
"_____no_output_____"
],
[
"# Now lets check if we obtained our desired dataset \ncorr0, clstrs, silh = get_onc_clusters(X_.corr())",
"_____no_output_____"
],
[
"clstrs",
"_____no_output_____"
]
],
[
[
"Now, lets see if there is any features with low silhouette score. If yes then we can correct it with the transformation mentioned above (transformation is appiled automatically).",
"_____no_output_____"
]
],
[
[
"# This function has built-in detection property that detects the features with low silhouette score\n# and corrects it with transformation\nclusters = get_feature_clusters(X_, dependence_metric=dep_matrix, distance_metric=None,\n linkage_method=None, n_clusters=None)",
"3 feature/s found with low silhouette score Index(['N_0', 'N_4', 'R_0'], dtype='object'). Returning the transformed dataset\n"
]
],
[
[
"We have got the dataset with some features that has some negative silhouette score. Due to this all of noisy features are placed with the informative and redundent feature clusters. **This is the caveat of the ONC algorithm**",
"_____no_output_____"
]
],
[
[
"clusters",
"_____no_output_____"
]
],
[
[
"As we can see the composition after transformation has changed and now we have 3 clusters instead of 2. Though this is not perfect but it has done a must better job in clustering than the normal ONC algorithm. Also the get_feature_clusters function can detect the problem of low degree of freedom of the regression model used for generating the residual $\\hat \\varepsilon _{i}$ for replacing the orginal feature $X_{i}$ as mentioned above.",
"_____no_output_____"
],
[
"## Using Hierarchical Clustering ",
"_____no_output_____"
]
],
[
[
"dist_matrix = 'angular' # Angular distance matric\nlinkage = 'single' # Linkage method for hierarchical clustering\nclusters_ = get_feature_clusters(X, dependence_metric=dep_matrix, distance_metric=dist_matrix,\n linkage_method=linkage, n_clusters=None)",
"No feature/s found with low silhouette score. All features belongs to its respective clusters\n"
],
[
"clusters_",
"_____no_output_____"
]
],
[
[
"Using Heirarchical Clustering we get 6 clusters and 3 of them are with only single element and are non-informative features.",
"_____no_output_____"
],
[
"## Conclusion",
"_____no_output_____"
],
[
"At the end I would like say that the user can use different depedence matric including both correlation based and information theory based metric (like Information Variation which have the advantage of recognizing redundant features that are the result of nonlinear combinations of informative features). Also the user can use different linkage methods for hierarchical clustering or define the number of clusters.",
"_____no_output_____"
],
[
"## References",
"_____no_output_____"
],
[
"* Paper: https://papers.ssrn.com/sol3/papers.cfm?abstract_id=3517595\n* Book: Machine Learning for Asset Managers by Dr. Marcos Lopez De Prado",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
d099f73bde94354439b4ff7d0508e27e4fd5338f | 366,945 | ipynb | Jupyter Notebook | CSFI_removed_n575.ipynb | shmp0722/CSFI | 8e15e45aca7c9bad157d5fadd20967175bbc39c3 | [
"BSD-2-Clause"
] | null | null | null | CSFI_removed_n575.ipynb | shmp0722/CSFI | 8e15e45aca7c9bad157d5fadd20967175bbc39c3 | [
"BSD-2-Clause"
] | null | null | null | CSFI_removed_n575.ipynb | shmp0722/CSFI | 8e15e45aca7c9bad157d5fadd20967175bbc39c3 | [
"BSD-2-Clause"
] | null | null | null | 271.811111 | 30,890 | 0.910019 | [
[
[
"empty"
]
]
] | [
"empty"
] | [
[
"empty"
]
] |
d099f9b06e187266e1a2a03bbfc98d95b61b7e2b | 780,185 | ipynb | Jupyter Notebook | JitterWindspeedHumidity.ipynb | gil612/Datenanalyse-und-Business-Intelligence-1 | a2fb3bb105484b26b305e4b275c3d86af65a7685 | [
"MIT"
] | null | null | null | JitterWindspeedHumidity.ipynb | gil612/Datenanalyse-und-Business-Intelligence-1 | a2fb3bb105484b26b305e4b275c3d86af65a7685 | [
"MIT"
] | null | null | null | JitterWindspeedHumidity.ipynb | gil612/Datenanalyse-und-Business-Intelligence-1 | a2fb3bb105484b26b305e4b275c3d86af65a7685 | [
"MIT"
] | null | null | null | 3,769.009662 | 290,372 | 0.965595 | [
[
[
"import pandas as pd\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\nimport datetime as dt\nimport numpy as np",
"_____no_output_____"
],
[
"df = pd.read_parquet('escooter_history.parquet', engine='pyarrow')",
"_____no_output_____"
],
[
"df.windspeed = pd.cut(df.windspeed,6,labels=[\"Calm\",\"Gentle breeze\",\"Moderate breeze\",\"Fresh breeze\",\"Strong breeze\", \"High wind\"])\ndf.humidity = pd.cut(df.humidity,[0,20,60,100],labels=[\"Uncomfortably dry\",\"Comfort range\",\"Uncomfortably wet\"])",
"_____no_output_____"
],
[
"df['rc_true'] = df['registered_customer']\ndf[\"rc_true\"] = df[\"rc_true\"].astype(int)\ndf[\"rc_false\"] = 1 - df[\"rc_true\"]\n\ndf_agg = df.groupby([df.datetime.dt.to_period('H')]).agg({\n 'holiday':'max',\n 'workingday':'max',\n 'weather' : 'max',\n 'temp' : 'max',\n 'atemp' : 'max',\n 'humidity': 'max',\n 'windspeed' : 'max',\n 'registered_customer': 'count',\n })\ndf_agg.columns = df_agg.columns.str.replace('registered_customer', 'all_customers')",
"_____no_output_____"
],
[
"# df_agg['hour'] = df_agg['datetime'].dt.hour\n# df_agg = df_agg.drop(df_agg[df_agg.hour < 6].index)\n# df_agg = df_agg.drop(columns=['hour'])",
"_____no_output_____"
],
[
"fig = plt.figure(figsize=(10,10))\nsns.stripplot(x=\"windspeed\", y=\"all_customers\", hue=\"workingday\", data=df_agg,jitter=0.4,size=5)",
"_____no_output_____"
],
[
"fig = plt.figure(figsize=(10,10))\nsns.stripplot(x=\"humidity\", y=\"all_customers\", hue=\"workingday\", data=df_agg,jitter=0.3,size=5)",
"_____no_output_____"
],
[
"fig = plt.figure(figsize=(10,10))\nsns.stripplot(x=\"windspeed\", y=\"all_customers\", hue=\"humidity\", data=df_agg,jitter=0.3,size=5)",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d099f9cf2ccb25194e2d073cf292974d8b4ec387 | 23,639 | ipynb | Jupyter Notebook | natural_language_joint_query_search/colab/unsplash_image_search.ipynb | g-luo/CLIP_Explainability | 0bcbac0e0afb33faf5061987ba2db22c77d6b294 | [
"Apache-2.0"
] | 3 | 2021-05-02T16:56:13.000Z | 2021-08-11T18:52:13.000Z | natural_language_joint_query_search/colab/unsplash_image_search.ipynb | g-luo/CLIP_Explainability | 0bcbac0e0afb33faf5061987ba2db22c77d6b294 | [
"Apache-2.0"
] | null | null | null | natural_language_joint_query_search/colab/unsplash_image_search.ipynb | g-luo/CLIP_Explainability | 0bcbac0e0afb33faf5061987ba2db22c77d6b294 | [
"Apache-2.0"
] | null | null | null | 36.200613 | 300 | 0.543678 | [
[
[
"# Unsplash Joint Query Search\n\nUsing this notebook you can search for images from the [Unsplash Dataset](https://unsplash.com/data) using natural language queries. The search is powered by OpenAI's [CLIP](https://github.com/openai/CLIP) neural network.\n\nThis notebook uses the precomputed feature vectors for almost 2 million images from the full version of the [Unsplash Dataset](https://unsplash.com/data). If you want to compute the features yourself, see [here](https://github.com/haltakov/natural-language-image-search#on-your-machine).\n\nThis project was mostly based on the [project](https://github.com/haltakov/natural-language-image-search) created by [Vladimir Haltakov](https://twitter.com/haltakov) and the full code is open-sourced on [GitHub](https://github.com/haofanwang/natural-language-joint-query-search).",
"_____no_output_____"
]
],
[
[
"!git clone https://github.com/haofanwang/natural-language-joint-query-search.git",
"Cloning into 'natural-language-joint-query-search'...\nremote: Enumerating objects: 116, done.\u001b[K\nremote: Counting objects: 100% (116/116), done.\u001b[K\nremote: Compressing objects: 100% (106/106), done.\u001b[K\nremote: Total 116 (delta 37), reused 43 (delta 5), pack-reused 0\u001b[K\nReceiving objects: 100% (116/116), 13.12 MiB | 29.52 MiB/s, done.\nResolving deltas: 100% (37/37), done.\n"
],
[
"cd natural-language-joint-query-search",
"/content/natural-language-joint-query-search\n"
]
],
[
[
"## Setup Environment\n\nIn this section we will setup the environment.",
"_____no_output_____"
],
[
"First we need to install CLIP and then upgrade the version of torch to 1.7.1 with CUDA support (by default CLIP installs torch 1.7.1 without CUDA). Google Colab currently has torch 1.7.0 which doesn't work well with CLIP.",
"_____no_output_____"
]
],
[
[
"!pip install torch==1.7.1+cu101 torchvision==0.8.2+cu101 -f https://download.pytorch.org/whl/torch_stable.html\n!pip install ftfy regex tqdm",
"Looking in links: https://download.pytorch.org/whl/torch_stable.html\nCollecting torch==1.7.1+cu101\n\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.7.1%2Bcu101-cp36-cp36m-linux_x86_64.whl (735.4MB)\n\u001b[K |████████████████████████████████| 735.4MB 24kB/s \n\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ProtocolError('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))': /simple/torchvision/\u001b[0m\n\u001b[?25hCollecting torchvision==0.8.2+cu101\n\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.8.2%2Bcu101-cp36-cp36m-linux_x86_64.whl (12.8MB)\n\u001b[K |████████████████████████████████| 12.8MB 114kB/s \n\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torch==1.7.1+cu101) (1.19.5)\nRequirement already satisfied: dataclasses; python_version < \"3.7\" in /usr/local/lib/python3.6/dist-packages (from torch==1.7.1+cu101) (0.8)\nRequirement already satisfied: typing-extensions in /usr/local/lib/python3.6/dist-packages (from torch==1.7.1+cu101) (3.7.4.3)\nRequirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision==0.8.2+cu101) (7.0.0)\nInstalling collected packages: torch, torchvision\n Found existing installation: torch 1.7.0+cu101\n Uninstalling torch-1.7.0+cu101:\n Successfully uninstalled torch-1.7.0+cu101\n Found existing installation: torchvision 0.8.1+cu101\n Uninstalling torchvision-0.8.1+cu101:\n Successfully uninstalled torchvision-0.8.1+cu101\nSuccessfully installed torch-1.7.1+cu101 torchvision-0.8.2+cu101\nCollecting ftfy\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/04/06/e5c80e2e0f979628d47345efba51f7ba386fe95963b11c594209085f5a9b/ftfy-5.9.tar.gz (66kB)\n\u001b[K |████████████████████████████████| 71kB 8.8MB/s \n\u001b[?25hRequirement already satisfied: regex in /usr/local/lib/python3.6/dist-packages (2019.12.20)\nRequirement already satisfied: tqdm in /usr/local/lib/python3.6/dist-packages (4.41.1)\nRequirement already satisfied: wcwidth in /usr/local/lib/python3.6/dist-packages (from ftfy) (0.2.5)\nBuilding wheels for collected packages: ftfy\n Building wheel for ftfy (setup.py) ... \u001b[?25l\u001b[?25hdone\n Created wheel for ftfy: filename=ftfy-5.9-cp36-none-any.whl size=46451 sha256=9ebbd9cc943e4a7d486233233aef6bcea6db5cb3fd6f1061bf945e202d4052f6\n Stored in directory: /root/.cache/pip/wheels/5e/2e/f0/b07196e8c929114998f0316894a61c752b63bfa3fdd50d2fc3\nSuccessfully built ftfy\nInstalling collected packages: ftfy\nSuccessfully installed ftfy-5.9\n"
]
],
[
[
"## Download the Precomputed Data\n\nIn this section the precomputed feature vectors for all photos are downloaded.",
"_____no_output_____"
],
[
"In order to compare the photos from the Unsplash dataset to a text query, we need to compute the feature vector of each photo using CLIP. \n\nWe need to download two files:\n* `photo_ids.csv` - a list of the photo IDs for all images in the dataset. The photo ID can be used to get the actual photo from Unsplash.\n* `features.npy` - a matrix containing the precomputed 512 element feature vector for each photo in the dataset.\n\nThe files are available on [Google Drive](https://drive.google.com/drive/folders/1WQmedVCDIQKA2R33dkS1f980YsJXRZ-q?usp=sharing).",
"_____no_output_____"
]
],
[
[
"from pathlib import Path\n\n# Create a folder for the precomputed features\n!mkdir unsplash-dataset\n\n# Download the photo IDs and the feature vectors\n!gdown --id 1FdmDEzBQCf3OxqY9SbU-jLfH_yZ6UPSj -O unsplash-dataset/photo_ids.csv\n!gdown --id 1L7ulhn4VeN-2aOM-fYmljza_TQok-j9F -O unsplash-dataset/features.npy\n\n# Download from alternative source, if the download doesn't work for some reason (for example download quota limit exceeded)\nif not Path('unsplash-dataset/photo_ids.csv').exists():\n !wget https://transfer.army/api/download/TuWWFTe2spg/EDm6KBjc -O unsplash-dataset/photo_ids.csv\n\nif not Path('unsplash-dataset/features.npy').exists():\n !wget https://transfer.army/api/download/LGXAaiNnMLA/AamL9PpU -O unsplash-dataset/features.npy",
"Downloading...\nFrom: https://drive.google.com/uc?id=1FdmDEzBQCf3OxqY9SbU-jLfH_yZ6UPSj\nTo: /content/natural-language-joint-query-search/unsplash-dataset/photo_ids.csv\n23.8MB [00:00, 111MB/s] \nDownloading...\nFrom: https://drive.google.com/uc?id=1L7ulhn4VeN-2aOM-fYmljza_TQok-j9F\nTo: /content/natural-language-joint-query-search/unsplash-dataset/features.npy\n2.03GB [00:40, 50.3MB/s]\n"
]
],
[
[
"## Define Functions\n\nSome important functions from CLIP for processing the data are defined here.",
"_____no_output_____"
],
[
"The `encode_search_query` function takes a text description and encodes it into a feature vector using the CLIP model.",
"_____no_output_____"
]
],
[
[
"def encode_search_query(search_query):\n with torch.no_grad():\n # Encode and normalize the search query using CLIP\n text_encoded, weight = model.encode_text(clip.tokenize(search_query).to(device))\n text_encoded /= text_encoded.norm(dim=-1, keepdim=True)\n\n # Retrieve the feature vector from the GPU and convert it to a numpy array\n return text_encoded.cpu().numpy()",
"_____no_output_____"
]
],
[
[
"The `find_best_matches` function compares the text feature vector to the feature vectors of all images and finds the best matches. The function returns the IDs of the best matching photos.",
"_____no_output_____"
]
],
[
[
"def find_best_matches(text_features, photo_features, photo_ids, results_count=3):\n # Compute the similarity between the search query and each photo using the Cosine similarity\n similarities = (photo_features @ text_features.T).squeeze(1)\n\n # Sort the photos by their similarity score\n best_photo_idx = (-similarities).argsort()\n\n # Return the photo IDs of the best matches\n return [photo_ids[i] for i in best_photo_idx[:results_count]]",
"_____no_output_____"
]
],
[
[
"We can load the pretrained public CLIP model.",
"_____no_output_____"
]
],
[
[
"import torch\n\nfrom CLIP.clip import clip\n\n# Load the open CLIP model\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\nmodel, preprocess = clip.load(\"ViT-B/32\", device=device, jit=False)",
"100%|████████████████████████████████████████| 354M/354M [00:02<00:00, 138MiB/s]\n"
]
],
[
[
"We can now load the pre-extracted unsplash image features.\n\n",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\n\n# Load the photo IDs\nphoto_ids = pd.read_csv(\"unsplash-dataset/photo_ids.csv\")\nphoto_ids = list(photo_ids['photo_id'])\n\n# Load the features vectors\nphoto_features = np.load(\"unsplash-dataset/features.npy\")\n\n# Print some statistics\nprint(f\"Photos loaded: {len(photo_ids)}\")",
"Photos loaded: 1981161\n"
]
],
[
[
"## Search Unsplash\n\n",
"_____no_output_____"
],
[
"Now we are ready to search the dataset using natural language. Check out the examples below and feel free to try out your own queries.\n\nIn this project, we support more types of searching than the [original project](https://github.com/haltakov/natural-language-image-search).\n\n1. Text-to-Image Search\n2. Image-to-Image Search\n3. Text+Text-to-Image Search\n4. Image+Text-to-Image Search\n\nNote: \n\n1. As the Unsplash API limit is hit from time to time, we don't display the image, but show the link to download the image.\n2. As the pretrained CLIP model is mainly trained with English texts, if you want to try with different language, please use Google translation API or NMT model to translate first.",
"_____no_output_____"
],
[
"### Text-to-Image Search",
"_____no_output_____"
],
[
"#### \"Tokyo Tower at night\"",
"_____no_output_____"
]
],
[
[
"search_query = \"Tokyo Tower at night.\"\n\ntext_features = encode_search_query(search_query)\n\n# Find the best matches\nbest_photo_ids = find_best_matches(text_features, photo_features, photo_ids, 5)\n\nfor photo_id in best_photo_ids:\n print(\"https://unsplash.com/photos/{}/download\".format(photo_id))",
"https://unsplash.com/photos/Hfjoa3qqytM/download\nhttps://unsplash.com/photos/9tOyu48-P7M/download\nhttps://unsplash.com/photos/OCgMGflYgVg/download\nhttps://unsplash.com/photos/msYlh78QagI/download\nhttps://unsplash.com/photos/UYmsWq6Cf1c/download\n"
]
],
[
[
"#### \"Two children are playing in the amusement park.\"",
"_____no_output_____"
]
],
[
[
"search_query = \"Two children are playing in the amusement park.\"\n\ntext_features = encode_search_query(search_query)\n\n# Find the best matches\nbest_photo_ids = find_best_matches(text_features, photo_features, photo_ids, 5)\n\nfor photo_id in best_photo_ids:\n print(\"https://unsplash.com/photos/{}/download\".format(photo_id))",
"https://unsplash.com/photos/VPq1DiHNShY/download\nhttps://unsplash.com/photos/nQlKkqq6qEw/download\nhttps://unsplash.com/photos/lgXRsUVWl88/download\nhttps://unsplash.com/photos/b10qqhvwWg4/download\nhttps://unsplash.com/photos/xUDUhI_qsKQ/download\n"
]
],
[
[
"### Image-to-Image Search",
"_____no_output_____"
]
],
[
[
"from PIL import Image\n\nsource_image = \"./images/borna-hrzina-8IPrifbjo-0-unsplash.jpg\"\nwith torch.no_grad():\n image_feature = model.encode_image(preprocess(Image.open(source_image)).unsqueeze(0).to(device))\n image_feature = (image_feature / image_feature.norm(dim=-1, keepdim=True)).cpu().numpy()\n\n# Find the best matches\nbest_photo_ids = find_best_matches(image_feature, photo_features, photo_ids, 5)\n\nfor photo_id in best_photo_ids:\n print(\"https://unsplash.com/photos/{}/download\".format(photo_id))",
"https://unsplash.com/photos/8IPrifbjo-0/download\nhttps://unsplash.com/photos/2Hzzw1qfVTQ/download\nhttps://unsplash.com/photos/q1gXY48Ej78/download\nhttps://unsplash.com/photos/OYaw40WnhSc/download\nhttps://unsplash.com/photos/DpeXitxtix8/download\n"
]
],
[
[
"### Text+Text-to-Image Search",
"_____no_output_____"
]
],
[
[
"search_query = \"red flower\"\nsearch_query_extra = \"blue sky\"\n\ntext_features = encode_search_query(search_query)\ntext_features_extra = encode_search_query(search_query_extra)\n\nmixed_features = text_features + text_features_extra\n\n# Find the best matches\nbest_photo_ids = find_best_matches(mixed_features, photo_features, photo_ids, 5)\n\nfor photo_id in best_photo_ids:\n print(\"https://unsplash.com/photos/{}/download\".format(photo_id))",
"https://unsplash.com/photos/NewdN4HJaWM/download\nhttps://unsplash.com/photos/r6DXsecvS4w/download\nhttps://unsplash.com/photos/Ye-PdCxCmEQ/download\nhttps://unsplash.com/photos/AFT4cSrnVZk/download\nhttps://unsplash.com/photos/qKBVUBtZJCU/download\n"
]
],
[
[
"### Image+Text-to-Image Search",
"_____no_output_____"
]
],
[
[
"source_image = \"./images/borna-hrzina-8IPrifbjo-0-unsplash.jpg\"\nsearch_text = \"cars\"\n\nwith torch.no_grad():\n image_feature = model.encode_image(preprocess(Image.open(source_image)).unsqueeze(0).to(device))\n image_feature = (image_feature / image_feature.norm(dim=-1, keepdim=True)).cpu().numpy()\n\ntext_feature = encode_search_query(search_text)\n\n# image + text\nmodified_feature = image_feature + text_feature\n\nbest_photo_ids = find_best_matches(modified_feature, photo_features, photo_ids, 5)\n \nfor photo_id in best_photo_ids:\n print(\"https://unsplash.com/photos/{}/download\".format(photo_id))",
"https://unsplash.com/photos/8IPrifbjo-0/download\nhttps://unsplash.com/photos/2Hzzw1qfVTQ/download\nhttps://unsplash.com/photos/6FpUtZtjFjM/download\nhttps://unsplash.com/photos/Qm8pvpJ-uGs/download\nhttps://unsplash.com/photos/c3ddbxzQtdM/download\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d099feeb004d4bac00cdc45da7a77317b6635467 | 791,149 | ipynb | Jupyter Notebook | Notebooks/RadarCOVID-Report/Daily/RadarCOVID-Report-2021-03-11.ipynb | pvieito/Radar-STATS | 9ff991a4db776259bc749a823ee6f0b0c0d38108 | [
"Apache-2.0"
] | 9 | 2020-10-14T16:58:32.000Z | 2021-10-05T12:01:56.000Z | Notebooks/RadarCOVID-Report/Daily/RadarCOVID-Report-2021-03-11.ipynb | pvieito/Radar-STATS | 9ff991a4db776259bc749a823ee6f0b0c0d38108 | [
"Apache-2.0"
] | 3 | 2020-10-08T04:48:35.000Z | 2020-10-10T20:46:58.000Z | Notebooks/RadarCOVID-Report/Daily/RadarCOVID-Report-2021-03-11.ipynb | Radar-STATS/Radar-STATS | 61d8b3529f6bbf4576d799e340feec5b183338a3 | [
"Apache-2.0"
] | 3 | 2020-09-27T07:39:26.000Z | 2020-10-02T07:48:56.000Z | 87.973869 | 141,896 | 0.724027 | [
[
[
"# RadarCOVID-Report",
"_____no_output_____"
],
[
"## Data Extraction",
"_____no_output_____"
]
],
[
[
"import datetime\nimport json\nimport logging\nimport os\nimport shutil\nimport tempfile\nimport textwrap\nimport uuid\n\nimport matplotlib.pyplot as plt\nimport matplotlib.ticker\nimport numpy as np\nimport pandas as pd\nimport pycountry\nimport retry\nimport seaborn as sns\n\n%matplotlib inline",
"_____no_output_____"
],
[
"current_working_directory = os.environ.get(\"PWD\")\nif current_working_directory:\n os.chdir(current_working_directory)\n\nsns.set()\nmatplotlib.rcParams[\"figure.figsize\"] = (15, 6)\n\nextraction_datetime = datetime.datetime.utcnow()\nextraction_date = extraction_datetime.strftime(\"%Y-%m-%d\")\nextraction_previous_datetime = extraction_datetime - datetime.timedelta(days=1)\nextraction_previous_date = extraction_previous_datetime.strftime(\"%Y-%m-%d\")\nextraction_date_with_hour = datetime.datetime.utcnow().strftime(\"%Y-%m-%d@%H\")\ncurrent_hour = datetime.datetime.utcnow().hour\nare_today_results_partial = current_hour != 23",
"_____no_output_____"
]
],
[
[
"### Constants",
"_____no_output_____"
]
],
[
[
"from Modules.ExposureNotification import exposure_notification_io\n\nspain_region_country_code = \"ES\"\ngermany_region_country_code = \"DE\"\n\ndefault_backend_identifier = spain_region_country_code\n\nbackend_generation_days = 7 * 2\ndaily_summary_days = 7 * 4 * 3\ndaily_plot_days = 7 * 4\ntek_dumps_load_limit = daily_summary_days + 1",
"_____no_output_____"
]
],
[
[
"### Parameters",
"_____no_output_____"
]
],
[
[
"environment_backend_identifier = os.environ.get(\"RADARCOVID_REPORT__BACKEND_IDENTIFIER\")\nif environment_backend_identifier:\n report_backend_identifier = environment_backend_identifier\nelse:\n report_backend_identifier = default_backend_identifier\nreport_backend_identifier",
"_____no_output_____"
],
[
"environment_enable_multi_backend_download = \\\n os.environ.get(\"RADARCOVID_REPORT__ENABLE_MULTI_BACKEND_DOWNLOAD\")\nif environment_enable_multi_backend_download:\n report_backend_identifiers = None\nelse:\n report_backend_identifiers = [report_backend_identifier]\n\nreport_backend_identifiers",
"_____no_output_____"
],
[
"environment_invalid_shared_diagnoses_dates = \\\n os.environ.get(\"RADARCOVID_REPORT__INVALID_SHARED_DIAGNOSES_DATES\")\nif environment_invalid_shared_diagnoses_dates:\n invalid_shared_diagnoses_dates = environment_invalid_shared_diagnoses_dates.split(\",\")\nelse:\n invalid_shared_diagnoses_dates = []\n\ninvalid_shared_diagnoses_dates",
"_____no_output_____"
]
],
[
[
"### COVID-19 Cases",
"_____no_output_____"
]
],
[
[
"report_backend_client = \\\n exposure_notification_io.get_backend_client_with_identifier(\n backend_identifier=report_backend_identifier)",
"_____no_output_____"
],
[
"@retry.retry(tries=10, delay=10, backoff=1.1, jitter=(0, 10))\ndef download_cases_dataframe():\n return pd.read_csv(\"https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/owid-covid-data.csv\")\n\nconfirmed_df_ = download_cases_dataframe()\nconfirmed_df_.iloc[0]",
"_____no_output_____"
],
[
"confirmed_df = confirmed_df_.copy()\nconfirmed_df = confirmed_df[[\"date\", \"new_cases\", \"iso_code\"]]\nconfirmed_df.rename(\n columns={\n \"date\": \"sample_date\",\n \"iso_code\": \"country_code\",\n },\n inplace=True)\n\ndef convert_iso_alpha_3_to_alpha_2(x):\n try:\n return pycountry.countries.get(alpha_3=x).alpha_2\n except Exception as e:\n logging.info(f\"Error converting country ISO Alpha 3 code '{x}': {repr(e)}\")\n return None\n\nconfirmed_df[\"country_code\"] = confirmed_df.country_code.apply(convert_iso_alpha_3_to_alpha_2)\nconfirmed_df.dropna(inplace=True)\nconfirmed_df[\"sample_date\"] = pd.to_datetime(confirmed_df.sample_date, dayfirst=True)\nconfirmed_df[\"sample_date\"] = confirmed_df.sample_date.dt.strftime(\"%Y-%m-%d\")\nconfirmed_df.sort_values(\"sample_date\", inplace=True)\nconfirmed_df.tail()",
"_____no_output_____"
],
[
"confirmed_days = pd.date_range(\n start=confirmed_df.iloc[0].sample_date,\n end=extraction_datetime)\nconfirmed_days_df = pd.DataFrame(data=confirmed_days, columns=[\"sample_date\"])\nconfirmed_days_df[\"sample_date_string\"] = \\\n confirmed_days_df.sample_date.dt.strftime(\"%Y-%m-%d\")\nconfirmed_days_df.tail()",
"_____no_output_____"
],
[
"def sort_source_regions_for_display(source_regions: list) -> list:\n if report_backend_identifier in source_regions:\n source_regions = [report_backend_identifier] + \\\n list(sorted(set(source_regions).difference([report_backend_identifier])))\n else:\n source_regions = list(sorted(source_regions))\n return source_regions",
"_____no_output_____"
],
[
"report_source_regions = report_backend_client.source_regions_for_date(\n date=extraction_datetime.date())\nreport_source_regions = sort_source_regions_for_display(\n source_regions=report_source_regions)\nreport_source_regions",
"_____no_output_____"
],
[
"def get_cases_dataframe(source_regions_for_date_function, columns_suffix=None):\n source_regions_at_date_df = confirmed_days_df.copy()\n source_regions_at_date_df[\"source_regions_at_date\"] = \\\n source_regions_at_date_df.sample_date.apply(\n lambda x: source_regions_for_date_function(date=x))\n source_regions_at_date_df.sort_values(\"sample_date\", inplace=True)\n source_regions_at_date_df[\"_source_regions_group\"] = source_regions_at_date_df. \\\n source_regions_at_date.apply(lambda x: \",\".join(sort_source_regions_for_display(x)))\n source_regions_at_date_df.tail()\n\n #%%\n\n source_regions_for_summary_df_ = \\\n source_regions_at_date_df[[\"sample_date\", \"_source_regions_group\"]].copy()\n source_regions_for_summary_df_.rename(columns={\"_source_regions_group\": \"source_regions\"}, inplace=True)\n source_regions_for_summary_df_.tail()\n\n #%%\n\n confirmed_output_columns = [\"sample_date\", \"new_cases\", \"covid_cases\"]\n confirmed_output_df = pd.DataFrame(columns=confirmed_output_columns)\n\n for source_regions_group, source_regions_group_series in \\\n source_regions_at_date_df.groupby(\"_source_regions_group\"):\n source_regions_set = set(source_regions_group.split(\",\"))\n confirmed_source_regions_set_df = \\\n confirmed_df[confirmed_df.country_code.isin(source_regions_set)].copy()\n confirmed_source_regions_group_df = \\\n confirmed_source_regions_set_df.groupby(\"sample_date\").new_cases.sum() \\\n .reset_index().sort_values(\"sample_date\")\n confirmed_source_regions_group_df = \\\n confirmed_source_regions_group_df.merge(\n confirmed_days_df[[\"sample_date_string\"]].rename(\n columns={\"sample_date_string\": \"sample_date\"}),\n how=\"right\")\n confirmed_source_regions_group_df[\"new_cases\"] = \\\n confirmed_source_regions_group_df[\"new_cases\"].clip(lower=0)\n confirmed_source_regions_group_df[\"covid_cases\"] = \\\n confirmed_source_regions_group_df.new_cases.rolling(7, min_periods=0).mean().round()\n confirmed_source_regions_group_df = \\\n confirmed_source_regions_group_df[confirmed_output_columns]\n confirmed_source_regions_group_df = confirmed_source_regions_group_df.replace(0, np.nan)\n confirmed_source_regions_group_df.fillna(method=\"ffill\", inplace=True)\n confirmed_source_regions_group_df = \\\n confirmed_source_regions_group_df[\n confirmed_source_regions_group_df.sample_date.isin(\n source_regions_group_series.sample_date_string)]\n confirmed_output_df = confirmed_output_df.append(confirmed_source_regions_group_df)\n\n result_df = confirmed_output_df.copy()\n result_df.tail()\n\n #%%\n\n result_df.rename(columns={\"sample_date\": \"sample_date_string\"}, inplace=True)\n result_df = confirmed_days_df[[\"sample_date_string\"]].merge(result_df, how=\"left\")\n result_df.sort_values(\"sample_date_string\", inplace=True)\n result_df.fillna(method=\"ffill\", inplace=True)\n result_df.tail()\n\n #%%\n\n result_df[[\"new_cases\", \"covid_cases\"]].plot()\n\n if columns_suffix:\n result_df.rename(\n columns={\n \"new_cases\": \"new_cases_\" + columns_suffix,\n \"covid_cases\": \"covid_cases_\" + columns_suffix},\n inplace=True)\n return result_df, source_regions_for_summary_df_",
"_____no_output_____"
],
[
"confirmed_eu_df, source_regions_for_summary_df = get_cases_dataframe(\n report_backend_client.source_regions_for_date)\nconfirmed_es_df, _ = get_cases_dataframe(\n lambda date: [spain_region_country_code],\n columns_suffix=spain_region_country_code.lower())",
"_____no_output_____"
]
],
[
[
"### Extract API TEKs",
"_____no_output_____"
]
],
[
[
"raw_zip_path_prefix = \"Data/TEKs/Raw/\"\nbase_backend_identifiers = [report_backend_identifier]\nmulti_backend_exposure_keys_df = \\\n exposure_notification_io.download_exposure_keys_from_backends(\n backend_identifiers=report_backend_identifiers,\n generation_days=backend_generation_days,\n fail_on_error_backend_identifiers=base_backend_identifiers,\n save_raw_zip_path_prefix=raw_zip_path_prefix)\nmulti_backend_exposure_keys_df[\"region\"] = multi_backend_exposure_keys_df[\"backend_identifier\"]\nmulti_backend_exposure_keys_df.rename(\n columns={\n \"generation_datetime\": \"sample_datetime\",\n \"generation_date_string\": \"sample_date_string\",\n },\n inplace=True)\nmulti_backend_exposure_keys_df.head()",
"WARNING:root:NoKeysFoundException(\"No exposure keys found on endpoint 'https://radarcovid.covid19.gob.es/dp3t/v2/gaen/exposed/?originCountries=PT' (parameters: {'origin_country': 'PT', 'endpoint_identifier_components': ['PT'], 'backend_identifier': 'PT@ES', 'server_endpoint_url': 'https://radarcovid.covid19.gob.es/dp3t'}).\")\n"
],
[
"early_teks_df = multi_backend_exposure_keys_df[\n multi_backend_exposure_keys_df.rolling_period < 144].copy()\nearly_teks_df[\"rolling_period_in_hours\"] = early_teks_df.rolling_period / 6\nearly_teks_df[early_teks_df.sample_date_string != extraction_date] \\\n .rolling_period_in_hours.hist(bins=list(range(24)))",
"_____no_output_____"
],
[
"early_teks_df[early_teks_df.sample_date_string == extraction_date] \\\n .rolling_period_in_hours.hist(bins=list(range(24)))",
"_____no_output_____"
],
[
"multi_backend_exposure_keys_df = multi_backend_exposure_keys_df[[\n \"sample_date_string\", \"region\", \"key_data\"]]\nmulti_backend_exposure_keys_df.head()",
"_____no_output_____"
],
[
"active_regions = \\\n multi_backend_exposure_keys_df.groupby(\"region\").key_data.nunique().sort_values().index.unique().tolist()\nactive_regions",
"_____no_output_____"
],
[
"multi_backend_summary_df = multi_backend_exposure_keys_df.groupby(\n [\"sample_date_string\", \"region\"]).key_data.nunique().reset_index() \\\n .pivot(index=\"sample_date_string\", columns=\"region\") \\\n .sort_index(ascending=False)\nmulti_backend_summary_df.rename(\n columns={\"key_data\": \"shared_teks_by_generation_date\"},\n inplace=True)\nmulti_backend_summary_df.rename_axis(\"sample_date\", inplace=True)\nmulti_backend_summary_df = multi_backend_summary_df.fillna(0).astype(int)\nmulti_backend_summary_df = multi_backend_summary_df.head(backend_generation_days)\nmulti_backend_summary_df.head()",
"_____no_output_____"
],
[
"def compute_keys_cross_sharing(x):\n teks_x = x.key_data_x.item()\n common_teks = set(teks_x).intersection(x.key_data_y.item())\n common_teks_fraction = len(common_teks) / len(teks_x)\n return pd.Series(dict(\n common_teks=common_teks,\n common_teks_fraction=common_teks_fraction,\n ))\n\nmulti_backend_exposure_keys_by_region_df = \\\n multi_backend_exposure_keys_df.groupby(\"region\").key_data.unique().reset_index()\nmulti_backend_exposure_keys_by_region_df[\"_merge\"] = True\nmulti_backend_exposure_keys_by_region_combination_df = \\\n multi_backend_exposure_keys_by_region_df.merge(\n multi_backend_exposure_keys_by_region_df, on=\"_merge\")\nmulti_backend_exposure_keys_by_region_combination_df.drop(\n columns=[\"_merge\"], inplace=True)\nif multi_backend_exposure_keys_by_region_combination_df.region_x.nunique() > 1:\n multi_backend_exposure_keys_by_region_combination_df = \\\n multi_backend_exposure_keys_by_region_combination_df[\n multi_backend_exposure_keys_by_region_combination_df.region_x !=\n multi_backend_exposure_keys_by_region_combination_df.region_y]\nmulti_backend_exposure_keys_cross_sharing_df = \\\n multi_backend_exposure_keys_by_region_combination_df \\\n .groupby([\"region_x\", \"region_y\"]) \\\n .apply(compute_keys_cross_sharing) \\\n .reset_index()\nmulti_backend_cross_sharing_summary_df = \\\n multi_backend_exposure_keys_cross_sharing_df.pivot_table(\n values=[\"common_teks_fraction\"],\n columns=\"region_x\",\n index=\"region_y\",\n aggfunc=lambda x: x.item())\nmulti_backend_cross_sharing_summary_df",
"<ipython-input-21-4e21708c19d8>:2: FutureWarning: `item` has been deprecated and will be removed in a future version\n teks_x = x.key_data_x.item()\n<ipython-input-21-4e21708c19d8>:3: FutureWarning: `item` has been deprecated and will be removed in a future version\n common_teks = set(teks_x).intersection(x.key_data_y.item())\n"
],
[
"multi_backend_without_active_region_exposure_keys_df = \\\n multi_backend_exposure_keys_df[multi_backend_exposure_keys_df.region != report_backend_identifier]\nmulti_backend_without_active_region = \\\n multi_backend_without_active_region_exposure_keys_df.groupby(\"region\").key_data.nunique().sort_values().index.unique().tolist()\nmulti_backend_without_active_region",
"_____no_output_____"
],
[
"exposure_keys_summary_df = multi_backend_exposure_keys_df[\n multi_backend_exposure_keys_df.region == report_backend_identifier]\nexposure_keys_summary_df.drop(columns=[\"region\"], inplace=True)\nexposure_keys_summary_df = \\\n exposure_keys_summary_df.groupby([\"sample_date_string\"]).key_data.nunique().to_frame()\nexposure_keys_summary_df = \\\n exposure_keys_summary_df.reset_index().set_index(\"sample_date_string\")\nexposure_keys_summary_df.sort_index(ascending=False, inplace=True)\nexposure_keys_summary_df.rename(columns={\"key_data\": \"shared_teks_by_generation_date\"}, inplace=True)\nexposure_keys_summary_df.head()",
"/opt/hostedtoolcache/Python/3.8.8/x64/lib/python3.8/site-packages/pandas/core/frame.py:4110: SettingWithCopyWarning: \nA value is trying to be set on a copy of a slice from a DataFrame\n\nSee the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n return super().drop(\n"
]
],
[
[
"### Dump API TEKs",
"_____no_output_____"
]
],
[
[
"tek_list_df = multi_backend_exposure_keys_df[\n [\"sample_date_string\", \"region\", \"key_data\"]].copy()\ntek_list_df[\"key_data\"] = tek_list_df[\"key_data\"].apply(str)\ntek_list_df.rename(columns={\n \"sample_date_string\": \"sample_date\",\n \"key_data\": \"tek_list\"}, inplace=True)\ntek_list_df = tek_list_df.groupby(\n [\"sample_date\", \"region\"]).tek_list.unique().reset_index()\ntek_list_df[\"extraction_date\"] = extraction_date\ntek_list_df[\"extraction_date_with_hour\"] = extraction_date_with_hour\n\ntek_list_path_prefix = \"Data/TEKs/\"\ntek_list_current_path = tek_list_path_prefix + f\"/Current/RadarCOVID-TEKs.json\"\ntek_list_daily_path = tek_list_path_prefix + f\"Daily/RadarCOVID-TEKs-{extraction_date}.json\"\ntek_list_hourly_path = tek_list_path_prefix + f\"Hourly/RadarCOVID-TEKs-{extraction_date_with_hour}.json\"\n\nfor path in [tek_list_current_path, tek_list_daily_path, tek_list_hourly_path]:\n os.makedirs(os.path.dirname(path), exist_ok=True)\n\ntek_list_base_df = tek_list_df[tek_list_df.region == report_backend_identifier]\ntek_list_base_df.drop(columns=[\"extraction_date\", \"extraction_date_with_hour\"]).to_json(\n tek_list_current_path,\n lines=True, orient=\"records\")\ntek_list_base_df.drop(columns=[\"extraction_date_with_hour\"]).to_json(\n tek_list_daily_path,\n lines=True, orient=\"records\")\ntek_list_base_df.to_json(\n tek_list_hourly_path,\n lines=True, orient=\"records\")\ntek_list_base_df.head()",
"_____no_output_____"
]
],
[
[
"### Load TEK Dumps",
"_____no_output_____"
]
],
[
[
"import glob\n\ndef load_extracted_teks(mode, region=None, limit=None) -> pd.DataFrame:\n extracted_teks_df = pd.DataFrame(columns=[\"region\"])\n file_paths = list(reversed(sorted(glob.glob(tek_list_path_prefix + mode + \"/RadarCOVID-TEKs-*.json\"))))\n if limit:\n file_paths = file_paths[:limit]\n for file_path in file_paths:\n logging.info(f\"Loading TEKs from '{file_path}'...\")\n iteration_extracted_teks_df = pd.read_json(file_path, lines=True)\n extracted_teks_df = extracted_teks_df.append(\n iteration_extracted_teks_df, sort=False)\n extracted_teks_df[\"region\"] = \\\n extracted_teks_df.region.fillna(spain_region_country_code).copy()\n if region:\n extracted_teks_df = \\\n extracted_teks_df[extracted_teks_df.region == region]\n return extracted_teks_df",
"_____no_output_____"
],
[
"daily_extracted_teks_df = load_extracted_teks(\n mode=\"Daily\",\n region=report_backend_identifier,\n limit=tek_dumps_load_limit)\ndaily_extracted_teks_df.head()",
"_____no_output_____"
],
[
"exposure_keys_summary_df_ = daily_extracted_teks_df \\\n .sort_values(\"extraction_date\", ascending=False) \\\n .groupby(\"sample_date\").tek_list.first() \\\n .to_frame()\nexposure_keys_summary_df_.index.name = \"sample_date_string\"\nexposure_keys_summary_df_[\"tek_list\"] = \\\n exposure_keys_summary_df_.tek_list.apply(len)\nexposure_keys_summary_df_ = exposure_keys_summary_df_ \\\n .rename(columns={\"tek_list\": \"shared_teks_by_generation_date\"}) \\\n .sort_index(ascending=False)\nexposure_keys_summary_df = exposure_keys_summary_df_\nexposure_keys_summary_df.head()",
"_____no_output_____"
]
],
[
[
"### Daily New TEKs",
"_____no_output_____"
]
],
[
[
"tek_list_df = daily_extracted_teks_df.groupby(\"extraction_date\").tek_list.apply(\n lambda x: set(sum(x, []))).reset_index()\ntek_list_df = tek_list_df.set_index(\"extraction_date\").sort_index(ascending=True)\ntek_list_df.head()",
"_____no_output_____"
],
[
"def compute_teks_by_generation_and_upload_date(date):\n day_new_teks_set_df = tek_list_df.copy().diff()\n try:\n day_new_teks_set = day_new_teks_set_df[\n day_new_teks_set_df.index == date].tek_list.item()\n except ValueError:\n day_new_teks_set = None\n if pd.isna(day_new_teks_set):\n day_new_teks_set = set()\n day_new_teks_df = daily_extracted_teks_df[\n daily_extracted_teks_df.extraction_date == date].copy()\n day_new_teks_df[\"shared_teks\"] = \\\n day_new_teks_df.tek_list.apply(lambda x: set(x).intersection(day_new_teks_set))\n day_new_teks_df[\"shared_teks\"] = \\\n day_new_teks_df.shared_teks.apply(len)\n day_new_teks_df[\"upload_date\"] = date\n day_new_teks_df.rename(columns={\"sample_date\": \"generation_date\"}, inplace=True)\n day_new_teks_df = day_new_teks_df[\n [\"upload_date\", \"generation_date\", \"shared_teks\"]]\n day_new_teks_df[\"generation_to_upload_days\"] = \\\n (pd.to_datetime(day_new_teks_df.upload_date) -\n pd.to_datetime(day_new_teks_df.generation_date)).dt.days\n day_new_teks_df = day_new_teks_df[day_new_teks_df.shared_teks > 0]\n return day_new_teks_df\n\nshared_teks_generation_to_upload_df = pd.DataFrame()\nfor upload_date in daily_extracted_teks_df.extraction_date.unique():\n shared_teks_generation_to_upload_df = \\\n shared_teks_generation_to_upload_df.append(\n compute_teks_by_generation_and_upload_date(date=upload_date))\nshared_teks_generation_to_upload_df \\\n .sort_values([\"upload_date\", \"generation_date\"], ascending=False, inplace=True)\nshared_teks_generation_to_upload_df.tail()",
"<ipython-input-29-827222b35590>:4: FutureWarning: `item` has been deprecated and will be removed in a future version\n day_new_teks_set = day_new_teks_set_df[\n"
],
[
"today_new_teks_df = \\\n shared_teks_generation_to_upload_df[\n shared_teks_generation_to_upload_df.upload_date == extraction_date].copy()\ntoday_new_teks_df.tail()",
"_____no_output_____"
],
[
"if not today_new_teks_df.empty:\n today_new_teks_df.set_index(\"generation_to_upload_days\") \\\n .sort_index().shared_teks.plot.bar()",
"_____no_output_____"
],
[
"generation_to_upload_period_pivot_df = \\\n shared_teks_generation_to_upload_df[\n [\"upload_date\", \"generation_to_upload_days\", \"shared_teks\"]] \\\n .pivot(index=\"upload_date\", columns=\"generation_to_upload_days\") \\\n .sort_index(ascending=False).fillna(0).astype(int) \\\n .droplevel(level=0, axis=1)\ngeneration_to_upload_period_pivot_df.head()",
"_____no_output_____"
],
[
"new_tek_df = tek_list_df.diff().tek_list.apply(\n lambda x: len(x) if not pd.isna(x) else None).to_frame().reset_index()\nnew_tek_df.rename(columns={\n \"tek_list\": \"shared_teks_by_upload_date\",\n \"extraction_date\": \"sample_date_string\",}, inplace=True)\nnew_tek_df.tail()",
"_____no_output_____"
],
[
"shared_teks_uploaded_on_generation_date_df = shared_teks_generation_to_upload_df[\n shared_teks_generation_to_upload_df.generation_to_upload_days == 0] \\\n [[\"upload_date\", \"shared_teks\"]].rename(\n columns={\n \"upload_date\": \"sample_date_string\",\n \"shared_teks\": \"shared_teks_uploaded_on_generation_date\",\n })\nshared_teks_uploaded_on_generation_date_df.head()",
"_____no_output_____"
],
[
"estimated_shared_diagnoses_df = shared_teks_generation_to_upload_df \\\n .groupby([\"upload_date\"]).shared_teks.max().reset_index() \\\n .sort_values([\"upload_date\"], ascending=False) \\\n .rename(columns={\n \"upload_date\": \"sample_date_string\",\n \"shared_teks\": \"shared_diagnoses\",\n })\ninvalid_shared_diagnoses_dates_mask = \\\n estimated_shared_diagnoses_df.sample_date_string.isin(invalid_shared_diagnoses_dates)\nestimated_shared_diagnoses_df[invalid_shared_diagnoses_dates_mask] = 0\nestimated_shared_diagnoses_df.head()",
"_____no_output_____"
]
],
[
[
"### Hourly New TEKs",
"_____no_output_____"
]
],
[
[
"hourly_extracted_teks_df = load_extracted_teks(\n mode=\"Hourly\", region=report_backend_identifier, limit=25)\nhourly_extracted_teks_df.head()",
"_____no_output_____"
],
[
"hourly_new_tek_count_df = hourly_extracted_teks_df \\\n .groupby(\"extraction_date_with_hour\").tek_list. \\\n apply(lambda x: set(sum(x, []))).reset_index().copy()\nhourly_new_tek_count_df = hourly_new_tek_count_df.set_index(\"extraction_date_with_hour\") \\\n .sort_index(ascending=True)\n\nhourly_new_tek_count_df[\"new_tek_list\"] = hourly_new_tek_count_df.tek_list.diff()\nhourly_new_tek_count_df[\"new_tek_count\"] = hourly_new_tek_count_df.new_tek_list.apply(\n lambda x: len(x) if not pd.isna(x) else 0)\nhourly_new_tek_count_df.rename(columns={\n \"new_tek_count\": \"shared_teks_by_upload_date\"}, inplace=True)\nhourly_new_tek_count_df = hourly_new_tek_count_df.reset_index()[[\n \"extraction_date_with_hour\", \"shared_teks_by_upload_date\"]]\nhourly_new_tek_count_df.head()",
"_____no_output_____"
],
[
"hourly_summary_df = hourly_new_tek_count_df.copy()\nhourly_summary_df.set_index(\"extraction_date_with_hour\", inplace=True)\nhourly_summary_df = hourly_summary_df.fillna(0).astype(int).reset_index()\nhourly_summary_df[\"datetime_utc\"] = pd.to_datetime(\n hourly_summary_df.extraction_date_with_hour, format=\"%Y-%m-%d@%H\")\nhourly_summary_df.set_index(\"datetime_utc\", inplace=True)\nhourly_summary_df = hourly_summary_df.tail(-1)\nhourly_summary_df.head()",
"_____no_output_____"
]
],
[
[
"### Official Statistics",
"_____no_output_____"
]
],
[
[
"import requests\nimport pandas.io.json\n\nofficial_stats_response = requests.get(\"https://radarcovid.covid19.gob.es/kpi/statistics/basics\")\nofficial_stats_response.raise_for_status()\nofficial_stats_df_ = pandas.io.json.json_normalize(official_stats_response.json())",
"_____no_output_____"
],
[
"official_stats_df = official_stats_df_.copy()\nofficial_stats_df[\"date\"] = pd.to_datetime(official_stats_df[\"date\"], dayfirst=True)\nofficial_stats_df.head()",
"_____no_output_____"
],
[
"official_stats_column_map = {\n \"date\": \"sample_date\",\n \"applicationsDownloads.totalAcummulated\": \"app_downloads_es_accumulated\",\n \"communicatedContagions.totalAcummulated\": \"shared_diagnoses_es_accumulated\",\n}\naccumulated_suffix = \"_accumulated\"\naccumulated_values_columns = \\\n list(filter(lambda x: x.endswith(accumulated_suffix), official_stats_column_map.values()))\ninterpolated_values_columns = \\\n list(map(lambda x: x[:-len(accumulated_suffix)], accumulated_values_columns))",
"_____no_output_____"
],
[
"official_stats_df = \\\n official_stats_df[official_stats_column_map.keys()] \\\n .rename(columns=official_stats_column_map)\nofficial_stats_df[\"extraction_date\"] = extraction_date\nofficial_stats_df.head()",
"_____no_output_____"
],
[
"official_stats_path = \"Data/Statistics/Current/RadarCOVID-Statistics.json\"\nprevious_official_stats_df = pd.read_json(official_stats_path, orient=\"records\", lines=True)\nprevious_official_stats_df[\"sample_date\"] = pd.to_datetime(previous_official_stats_df[\"sample_date\"], dayfirst=True)\nofficial_stats_df = official_stats_df.append(previous_official_stats_df)\nofficial_stats_df.head()",
"_____no_output_____"
],
[
"official_stats_df = official_stats_df[~(official_stats_df.shared_diagnoses_es_accumulated == 0)]\nofficial_stats_df.sort_values(\"extraction_date\", ascending=False, inplace=True)\nofficial_stats_df.drop_duplicates(subset=[\"sample_date\"], keep=\"first\", inplace=True)\nofficial_stats_df.head()",
"_____no_output_____"
],
[
"official_stats_stored_df = official_stats_df.copy()\nofficial_stats_stored_df[\"sample_date\"] = official_stats_stored_df.sample_date.dt.strftime(\"%Y-%m-%d\")\nofficial_stats_stored_df.to_json(official_stats_path, orient=\"records\", lines=True)",
"_____no_output_____"
],
[
"official_stats_df.drop(columns=[\"extraction_date\"], inplace=True)\nofficial_stats_df = confirmed_days_df.merge(official_stats_df, how=\"left\")\nofficial_stats_df.sort_values(\"sample_date\", ascending=False, inplace=True)\nofficial_stats_df.head()",
"_____no_output_____"
],
[
"official_stats_df[accumulated_values_columns] = \\\n official_stats_df[accumulated_values_columns] \\\n .astype(float).interpolate(limit_area=\"inside\")\nofficial_stats_df[interpolated_values_columns] = \\\n official_stats_df[accumulated_values_columns].diff(periods=-1)\nofficial_stats_df.drop(columns=\"sample_date\", inplace=True)\nofficial_stats_df.head()",
"_____no_output_____"
]
],
[
[
"### Data Merge",
"_____no_output_____"
]
],
[
[
"result_summary_df = exposure_keys_summary_df.merge(\n new_tek_df, on=[\"sample_date_string\"], how=\"outer\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df = result_summary_df.merge(\n shared_teks_uploaded_on_generation_date_df, on=[\"sample_date_string\"], how=\"outer\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df = result_summary_df.merge(\n estimated_shared_diagnoses_df, on=[\"sample_date_string\"], how=\"outer\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df = result_summary_df.merge(\n official_stats_df, on=[\"sample_date_string\"], how=\"outer\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df = confirmed_eu_df.tail(daily_summary_days).merge(\n result_summary_df, on=[\"sample_date_string\"], how=\"left\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df = confirmed_es_df.tail(daily_summary_days).merge(\n result_summary_df, on=[\"sample_date_string\"], how=\"left\")\nresult_summary_df.head()",
"_____no_output_____"
],
[
"result_summary_df[\"sample_date\"] = pd.to_datetime(result_summary_df.sample_date_string)\nresult_summary_df = result_summary_df.merge(source_regions_for_summary_df, how=\"left\")\nresult_summary_df.set_index([\"sample_date\", \"source_regions\"], inplace=True)\nresult_summary_df.drop(columns=[\"sample_date_string\"], inplace=True)\nresult_summary_df.sort_index(ascending=False, inplace=True)\nresult_summary_df.head()",
"_____no_output_____"
],
[
"with pd.option_context(\"mode.use_inf_as_na\", True):\n result_summary_df = result_summary_df.fillna(0).astype(int)\n result_summary_df[\"teks_per_shared_diagnosis\"] = \\\n (result_summary_df.shared_teks_by_upload_date / result_summary_df.shared_diagnoses).fillna(0)\n result_summary_df[\"shared_diagnoses_per_covid_case\"] = \\\n (result_summary_df.shared_diagnoses / result_summary_df.covid_cases).fillna(0)\n result_summary_df[\"shared_diagnoses_per_covid_case_es\"] = \\\n (result_summary_df.shared_diagnoses_es / result_summary_df.covid_cases_es).fillna(0)\n\nresult_summary_df.head(daily_plot_days)",
"_____no_output_____"
],
[
"def compute_aggregated_results_summary(days) -> pd.DataFrame:\n aggregated_result_summary_df = result_summary_df.copy()\n aggregated_result_summary_df[\"covid_cases_for_ratio\"] = \\\n aggregated_result_summary_df.covid_cases.mask(\n aggregated_result_summary_df.shared_diagnoses == 0, 0)\n aggregated_result_summary_df[\"covid_cases_for_ratio_es\"] = \\\n aggregated_result_summary_df.covid_cases_es.mask(\n aggregated_result_summary_df.shared_diagnoses_es == 0, 0)\n aggregated_result_summary_df = aggregated_result_summary_df \\\n .sort_index(ascending=True).fillna(0).rolling(days).agg({\n \"covid_cases\": \"sum\",\n \"covid_cases_es\": \"sum\",\n \"covid_cases_for_ratio\": \"sum\",\n \"covid_cases_for_ratio_es\": \"sum\",\n \"shared_teks_by_generation_date\": \"sum\",\n \"shared_teks_by_upload_date\": \"sum\",\n \"shared_diagnoses\": \"sum\",\n \"shared_diagnoses_es\": \"sum\",\n }).sort_index(ascending=False)\n\n with pd.option_context(\"mode.use_inf_as_na\", True):\n aggregated_result_summary_df = aggregated_result_summary_df.fillna(0).astype(int)\n aggregated_result_summary_df[\"teks_per_shared_diagnosis\"] = \\\n (aggregated_result_summary_df.shared_teks_by_upload_date /\n aggregated_result_summary_df.covid_cases_for_ratio).fillna(0)\n aggregated_result_summary_df[\"shared_diagnoses_per_covid_case\"] = \\\n (aggregated_result_summary_df.shared_diagnoses /\n aggregated_result_summary_df.covid_cases_for_ratio).fillna(0)\n aggregated_result_summary_df[\"shared_diagnoses_per_covid_case_es\"] = \\\n (aggregated_result_summary_df.shared_diagnoses_es /\n aggregated_result_summary_df.covid_cases_for_ratio_es).fillna(0)\n\n return aggregated_result_summary_df",
"_____no_output_____"
],
[
"aggregated_result_with_7_days_window_summary_df = compute_aggregated_results_summary(days=7)\naggregated_result_with_7_days_window_summary_df.head()",
"_____no_output_____"
],
[
"last_7_days_summary = aggregated_result_with_7_days_window_summary_df.to_dict(orient=\"records\")[1]\nlast_7_days_summary",
"_____no_output_____"
],
[
"aggregated_result_with_14_days_window_summary_df = compute_aggregated_results_summary(days=13)\nlast_14_days_summary = aggregated_result_with_14_days_window_summary_df.to_dict(orient=\"records\")[1]\nlast_14_days_summary",
"_____no_output_____"
]
],
[
[
"## Report Results",
"_____no_output_____"
]
],
[
[
"display_column_name_mapping = {\n \"sample_date\": \"Sample\\u00A0Date\\u00A0(UTC)\",\n \"source_regions\": \"Source Countries\",\n \"datetime_utc\": \"Timestamp (UTC)\",\n \"upload_date\": \"Upload Date (UTC)\",\n \"generation_to_upload_days\": \"Generation to Upload Period in Days\",\n \"region\": \"Backend\",\n \"region_x\": \"Backend\\u00A0(A)\",\n \"region_y\": \"Backend\\u00A0(B)\",\n \"common_teks\": \"Common TEKs Shared Between Backends\",\n \"common_teks_fraction\": \"Fraction of TEKs in Backend (A) Available in Backend (B)\",\n \"covid_cases\": \"COVID-19 Cases (Source Countries)\",\n \"shared_teks_by_generation_date\": \"Shared TEKs by Generation Date (Source Countries)\",\n \"shared_teks_by_upload_date\": \"Shared TEKs by Upload Date (Source Countries)\",\n \"shared_teks_uploaded_on_generation_date\": \"Shared TEKs Uploaded on Generation Date (Source Countries)\",\n \"shared_diagnoses\": \"Shared Diagnoses (Source Countries – Estimation)\",\n \"teks_per_shared_diagnosis\": \"TEKs Uploaded per Shared Diagnosis (Source Countries)\",\n \"shared_diagnoses_per_covid_case\": \"Usage Ratio (Source Countries)\",\n\n \"covid_cases_es\": \"COVID-19 Cases (Spain)\",\n \"app_downloads_es\": \"App Downloads (Spain – Official)\",\n \"shared_diagnoses_es\": \"Shared Diagnoses (Spain – Official)\",\n \"shared_diagnoses_per_covid_case_es\": \"Usage Ratio (Spain)\",\n}",
"_____no_output_____"
],
[
"summary_columns = [\n \"covid_cases\",\n \"shared_teks_by_generation_date\",\n \"shared_teks_by_upload_date\",\n \"shared_teks_uploaded_on_generation_date\",\n \"shared_diagnoses\",\n \"teks_per_shared_diagnosis\",\n \"shared_diagnoses_per_covid_case\",\n\n \"covid_cases_es\",\n \"app_downloads_es\",\n \"shared_diagnoses_es\",\n \"shared_diagnoses_per_covid_case_es\",\n]\n\nsummary_percentage_columns= [\n \"shared_diagnoses_per_covid_case_es\",\n \"shared_diagnoses_per_covid_case\",\n]",
"_____no_output_____"
]
],
[
[
"### Daily Summary Table",
"_____no_output_____"
]
],
[
[
"result_summary_df_ = result_summary_df.copy()\nresult_summary_df = result_summary_df[summary_columns]\nresult_summary_with_display_names_df = result_summary_df \\\n .rename_axis(index=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping)\nresult_summary_with_display_names_df",
"_____no_output_____"
]
],
[
[
"### Daily Summary Plots",
"_____no_output_____"
]
],
[
[
"result_plot_summary_df = result_summary_df.head(daily_plot_days)[summary_columns] \\\n .droplevel(level=[\"source_regions\"]) \\\n .rename_axis(index=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping)\nsummary_ax_list = result_plot_summary_df.sort_index(ascending=True).plot.bar(\n title=f\"Daily Summary\",\n rot=45, subplots=True, figsize=(15, 30), legend=False)\nax_ = summary_ax_list[0]\nax_.get_figure().tight_layout()\nax_.get_figure().subplots_adjust(top=0.95)\n_ = ax_.set_xticklabels(sorted(result_plot_summary_df.index.strftime(\"%Y-%m-%d\").tolist()))\n\nfor percentage_column in summary_percentage_columns:\n percentage_column_index = summary_columns.index(percentage_column)\n summary_ax_list[percentage_column_index].yaxis \\\n .set_major_formatter(matplotlib.ticker.PercentFormatter(1.0))",
"/opt/hostedtoolcache/Python/3.8.8/x64/lib/python3.8/site-packages/pandas/plotting/_matplotlib/tools.py:307: MatplotlibDeprecationWarning: \nThe rowNum attribute was deprecated in Matplotlib 3.2 and will be removed two minor releases later. Use ax.get_subplotspec().rowspan.start instead.\n layout[ax.rowNum, ax.colNum] = ax.get_visible()\n/opt/hostedtoolcache/Python/3.8.8/x64/lib/python3.8/site-packages/pandas/plotting/_matplotlib/tools.py:307: MatplotlibDeprecationWarning: \nThe colNum attribute was deprecated in Matplotlib 3.2 and will be removed two minor releases later. Use ax.get_subplotspec().colspan.start instead.\n layout[ax.rowNum, ax.colNum] = ax.get_visible()\n/opt/hostedtoolcache/Python/3.8.8/x64/lib/python3.8/site-packages/pandas/plotting/_matplotlib/tools.py:313: MatplotlibDeprecationWarning: \nThe rowNum attribute was deprecated in Matplotlib 3.2 and will be removed two minor releases later. Use ax.get_subplotspec().rowspan.start instead.\n if not layout[ax.rowNum + 1, ax.colNum]:\n/opt/hostedtoolcache/Python/3.8.8/x64/lib/python3.8/site-packages/pandas/plotting/_matplotlib/tools.py:313: MatplotlibDeprecationWarning: \nThe colNum attribute was deprecated in Matplotlib 3.2 and will be removed two minor releases later. Use ax.get_subplotspec().colspan.start instead.\n if not layout[ax.rowNum + 1, ax.colNum]:\n"
]
],
[
[
"### Daily Generation to Upload Period Table",
"_____no_output_____"
]
],
[
[
"display_generation_to_upload_period_pivot_df = \\\n generation_to_upload_period_pivot_df \\\n .head(backend_generation_days)\ndisplay_generation_to_upload_period_pivot_df \\\n .head(backend_generation_days) \\\n .rename_axis(columns=display_column_name_mapping) \\\n .rename_axis(index=display_column_name_mapping)",
"_____no_output_____"
],
[
"fig, generation_to_upload_period_pivot_table_ax = plt.subplots(\n figsize=(12, 1 + 0.6 * len(display_generation_to_upload_period_pivot_df)))\ngeneration_to_upload_period_pivot_table_ax.set_title(\n \"Shared TEKs Generation to Upload Period Table\")\nsns.heatmap(\n data=display_generation_to_upload_period_pivot_df\n .rename_axis(columns=display_column_name_mapping)\n .rename_axis(index=display_column_name_mapping),\n fmt=\".0f\",\n annot=True,\n ax=generation_to_upload_period_pivot_table_ax)\ngeneration_to_upload_period_pivot_table_ax.get_figure().tight_layout()",
"_____no_output_____"
]
],
[
[
"### Hourly Summary Plots ",
"_____no_output_____"
]
],
[
[
"hourly_summary_ax_list = hourly_summary_df \\\n .rename_axis(index=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping) \\\n .plot.bar(\n title=f\"Last 24h Summary\",\n rot=45, subplots=True, legend=False)\nax_ = hourly_summary_ax_list[-1]\nax_.get_figure().tight_layout()\nax_.get_figure().subplots_adjust(top=0.9)\n_ = ax_.set_xticklabels(sorted(hourly_summary_df.index.strftime(\"%Y-%m-%d@%H\").tolist()))",
"_____no_output_____"
]
],
[
[
"### Publish Results",
"_____no_output_____"
]
],
[
[
"github_repository = os.environ.get(\"GITHUB_REPOSITORY\")\nif github_repository is None:\n github_repository = \"pvieito/Radar-STATS\"\n\ngithub_project_base_url = \"https://github.com/\" + github_repository\n\ndisplay_formatters = {\n display_column_name_mapping[\"teks_per_shared_diagnosis\"]: lambda x: f\"{x:.2f}\" if x != 0 else \"\",\n display_column_name_mapping[\"shared_diagnoses_per_covid_case\"]: lambda x: f\"{x:.2%}\" if x != 0 else \"\",\n display_column_name_mapping[\"shared_diagnoses_per_covid_case_es\"]: lambda x: f\"{x:.2%}\" if x != 0 else \"\",\n}\ngeneral_columns = \\\n list(filter(lambda x: x not in display_formatters, display_column_name_mapping.values()))\ngeneral_formatter = lambda x: f\"{x}\" if x != 0 else \"\"\ndisplay_formatters.update(dict(map(lambda x: (x, general_formatter), general_columns)))\n\ndaily_summary_table_html = result_summary_with_display_names_df \\\n .head(daily_plot_days) \\\n .rename_axis(index=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping) \\\n .to_html(formatters=display_formatters)\nmulti_backend_summary_table_html = multi_backend_summary_df \\\n .head(daily_plot_days) \\\n .rename_axis(columns=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping) \\\n .rename_axis(index=display_column_name_mapping) \\\n .to_html(formatters=display_formatters)\n\ndef format_multi_backend_cross_sharing_fraction(x):\n if pd.isna(x):\n return \"-\"\n elif round(x * 100, 1) == 0:\n return \"\"\n else:\n return f\"{x:.1%}\"\n\nmulti_backend_cross_sharing_summary_table_html = multi_backend_cross_sharing_summary_df \\\n .rename_axis(columns=display_column_name_mapping) \\\n .rename(columns=display_column_name_mapping) \\\n .rename_axis(index=display_column_name_mapping) \\\n .to_html(\n classes=\"table-center\",\n formatters=display_formatters,\n float_format=format_multi_backend_cross_sharing_fraction)\nmulti_backend_cross_sharing_summary_table_html = \\\n multi_backend_cross_sharing_summary_table_html \\\n .replace(\"<tr>\",\"<tr style=\\\"text-align: center;\\\">\")\n\nextraction_date_result_summary_df = \\\n result_summary_df[result_summary_df.index.get_level_values(\"sample_date\") == extraction_date]\nextraction_date_result_hourly_summary_df = \\\n hourly_summary_df[hourly_summary_df.extraction_date_with_hour == extraction_date_with_hour]\n\ncovid_cases = \\\n extraction_date_result_summary_df.covid_cases.item()\nshared_teks_by_generation_date = \\\n extraction_date_result_summary_df.shared_teks_by_generation_date.item()\nshared_teks_by_upload_date = \\\n extraction_date_result_summary_df.shared_teks_by_upload_date.item()\nshared_diagnoses = \\\n extraction_date_result_summary_df.shared_diagnoses.item()\nteks_per_shared_diagnosis = \\\n extraction_date_result_summary_df.teks_per_shared_diagnosis.item()\nshared_diagnoses_per_covid_case = \\\n extraction_date_result_summary_df.shared_diagnoses_per_covid_case.item()\n\nshared_teks_by_upload_date_last_hour = \\\n extraction_date_result_hourly_summary_df.shared_teks_by_upload_date.sum().astype(int)\n\ndisplay_source_regions = \", \".join(report_source_regions)\nif len(report_source_regions) == 1:\n display_brief_source_regions = report_source_regions[0]\nelse:\n display_brief_source_regions = f\"{len(report_source_regions)} 🇪🇺\"",
"<ipython-input-67-0a0cb8e530af>:55: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.covid_cases.item()\n<ipython-input-67-0a0cb8e530af>:57: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.shared_teks_by_generation_date.item()\n<ipython-input-67-0a0cb8e530af>:59: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.shared_teks_by_upload_date.item()\n<ipython-input-67-0a0cb8e530af>:61: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.shared_diagnoses.item()\n<ipython-input-67-0a0cb8e530af>:63: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.teks_per_shared_diagnosis.item()\n<ipython-input-67-0a0cb8e530af>:65: FutureWarning: `item` has been deprecated and will be removed in a future version\n extraction_date_result_summary_df.shared_diagnoses_per_covid_case.item()\n"
],
[
"def get_temporary_image_path() -> str:\n return os.path.join(tempfile.gettempdir(), str(uuid.uuid4()) + \".png\")\n\ndef save_temporary_plot_image(ax):\n if isinstance(ax, np.ndarray):\n ax = ax[0]\n media_path = get_temporary_image_path()\n ax.get_figure().savefig(media_path)\n return media_path\n\ndef save_temporary_dataframe_image(df):\n import dataframe_image as dfi\n df = df.copy()\n df_styler = df.style.format(display_formatters)\n media_path = get_temporary_image_path()\n dfi.export(df_styler, media_path)\n return media_path",
"_____no_output_____"
],
[
"summary_plots_image_path = save_temporary_plot_image(\n ax=summary_ax_list)\nsummary_table_image_path = save_temporary_dataframe_image(\n df=result_summary_with_display_names_df)\nhourly_summary_plots_image_path = save_temporary_plot_image(\n ax=hourly_summary_ax_list)\nmulti_backend_summary_table_image_path = save_temporary_dataframe_image(\n df=multi_backend_summary_df)\ngeneration_to_upload_period_pivot_table_image_path = save_temporary_plot_image(\n ax=generation_to_upload_period_pivot_table_ax)",
"_____no_output_____"
]
],
[
[
"### Save Results",
"_____no_output_____"
]
],
[
[
"report_resources_path_prefix = \"Data/Resources/Current/RadarCOVID-Report-\"\nresult_summary_df.to_csv(\n report_resources_path_prefix + \"Summary-Table.csv\")\nresult_summary_df.to_html(\n report_resources_path_prefix + \"Summary-Table.html\")\nhourly_summary_df.to_csv(\n report_resources_path_prefix + \"Hourly-Summary-Table.csv\")\nmulti_backend_summary_df.to_csv(\n report_resources_path_prefix + \"Multi-Backend-Summary-Table.csv\")\nmulti_backend_cross_sharing_summary_df.to_csv(\n report_resources_path_prefix + \"Multi-Backend-Cross-Sharing-Summary-Table.csv\")\ngeneration_to_upload_period_pivot_df.to_csv(\n report_resources_path_prefix + \"Generation-Upload-Period-Table.csv\")\n_ = shutil.copyfile(\n summary_plots_image_path,\n report_resources_path_prefix + \"Summary-Plots.png\")\n_ = shutil.copyfile(\n summary_table_image_path,\n report_resources_path_prefix + \"Summary-Table.png\")\n_ = shutil.copyfile(\n hourly_summary_plots_image_path,\n report_resources_path_prefix + \"Hourly-Summary-Plots.png\")\n_ = shutil.copyfile(\n multi_backend_summary_table_image_path,\n report_resources_path_prefix + \"Multi-Backend-Summary-Table.png\")\n_ = shutil.copyfile(\n generation_to_upload_period_pivot_table_image_path,\n report_resources_path_prefix + \"Generation-Upload-Period-Table.png\")",
"_____no_output_____"
]
],
[
[
"### Publish Results as JSON",
"_____no_output_____"
]
],
[
[
"def generate_summary_api_results(df: pd.DataFrame) -> list:\n api_df = df.reset_index().copy()\n api_df[\"sample_date_string\"] = \\\n api_df[\"sample_date\"].dt.strftime(\"%Y-%m-%d\")\n api_df[\"source_regions\"] = \\\n api_df[\"source_regions\"].apply(lambda x: x.split(\",\"))\n return api_df.to_dict(orient=\"records\")\n\nsummary_api_results = \\\n generate_summary_api_results(df=result_summary_df)\ntoday_summary_api_results = \\\n generate_summary_api_results(df=extraction_date_result_summary_df)[0]\n\nsummary_results = dict(\n backend_identifier=report_backend_identifier,\n source_regions=report_source_regions,\n extraction_datetime=extraction_datetime,\n extraction_date=extraction_date,\n extraction_date_with_hour=extraction_date_with_hour,\n last_hour=dict(\n shared_teks_by_upload_date=shared_teks_by_upload_date_last_hour,\n shared_diagnoses=0,\n ),\n today=today_summary_api_results,\n last_7_days=last_7_days_summary,\n last_14_days=last_14_days_summary,\n daily_results=summary_api_results)\n\nsummary_results = \\\n json.loads(pd.Series([summary_results]).to_json(orient=\"records\"))[0]\n\nwith open(report_resources_path_prefix + \"Summary-Results.json\", \"w\") as f:\n json.dump(summary_results, f, indent=4)",
"_____no_output_____"
]
],
[
[
"### Publish on README",
"_____no_output_____"
]
],
[
[
"with open(\"Data/Templates/README.md\", \"r\") as f:\n readme_contents = f.read()\n\nreadme_contents = readme_contents.format(\n extraction_date_with_hour=extraction_date_with_hour,\n github_project_base_url=github_project_base_url,\n daily_summary_table_html=daily_summary_table_html,\n multi_backend_summary_table_html=multi_backend_summary_table_html,\n multi_backend_cross_sharing_summary_table_html=multi_backend_cross_sharing_summary_table_html,\n display_source_regions=display_source_regions)\n\nwith open(\"README.md\", \"w\") as f:\n f.write(readme_contents)",
"_____no_output_____"
]
],
[
[
"### Publish on Twitter",
"_____no_output_____"
]
],
[
[
"enable_share_to_twitter = os.environ.get(\"RADARCOVID_REPORT__ENABLE_PUBLISH_ON_TWITTER\")\ngithub_event_name = os.environ.get(\"GITHUB_EVENT_NAME\")\n\nif enable_share_to_twitter and github_event_name == \"schedule\" and \\\n (shared_teks_by_upload_date_last_hour or not are_today_results_partial):\n import tweepy\n\n twitter_api_auth_keys = os.environ[\"RADARCOVID_REPORT__TWITTER_API_AUTH_KEYS\"]\n twitter_api_auth_keys = twitter_api_auth_keys.split(\":\")\n auth = tweepy.OAuthHandler(twitter_api_auth_keys[0], twitter_api_auth_keys[1])\n auth.set_access_token(twitter_api_auth_keys[2], twitter_api_auth_keys[3])\n\n api = tweepy.API(auth)\n\n summary_plots_media = api.media_upload(summary_plots_image_path)\n summary_table_media = api.media_upload(summary_table_image_path)\n generation_to_upload_period_pivot_table_image_media = api.media_upload(generation_to_upload_period_pivot_table_image_path)\n media_ids = [\n summary_plots_media.media_id,\n summary_table_media.media_id,\n generation_to_upload_period_pivot_table_image_media.media_id,\n ]\n\n if are_today_results_partial:\n today_addendum = \" (Partial)\"\n else:\n today_addendum = \"\"\n\n def format_shared_diagnoses_per_covid_case(value) -> str:\n if value == 0:\n return \"–\"\n return f\"≤{value:.2%}\"\n\n display_shared_diagnoses_per_covid_case = \\\n format_shared_diagnoses_per_covid_case(value=shared_diagnoses_per_covid_case)\n display_last_14_days_shared_diagnoses_per_covid_case = \\\n format_shared_diagnoses_per_covid_case(value=last_14_days_summary[\"shared_diagnoses_per_covid_case\"])\n display_last_14_days_shared_diagnoses_per_covid_case_es = \\\n format_shared_diagnoses_per_covid_case(value=last_14_days_summary[\"shared_diagnoses_per_covid_case_es\"])\n\n status = textwrap.dedent(f\"\"\"\n #RadarCOVID – {extraction_date_with_hour}\n\n Today{today_addendum}:\n - Uploaded TEKs: {shared_teks_by_upload_date:.0f} ({shared_teks_by_upload_date_last_hour:+d} last hour)\n - Shared Diagnoses: ≤{shared_diagnoses:.0f}\n - Usage Ratio: {display_shared_diagnoses_per_covid_case}\n\n Last 14 Days:\n - Usage Ratio (Estimation): {display_last_14_days_shared_diagnoses_per_covid_case}\n - Usage Ratio (Official): {display_last_14_days_shared_diagnoses_per_covid_case_es}\n\n Info: {github_project_base_url}#documentation\n \"\"\")\n status = status.encode(encoding=\"utf-8\")\n api.update_status(status=status, media_ids=media_ids)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d099ff11b535290254080ff96c7692607d15e312 | 10,721 | ipynb | Jupyter Notebook | Week - 6/UMS with JSON/UMS with JSON.ipynb | AshishJangra27/Data-Science-Specialization | 638e490fdd00e57fa8cdc7fcbc946307babd6d0a | [
"Apache-2.0"
] | null | null | null | Week - 6/UMS with JSON/UMS with JSON.ipynb | AshishJangra27/Data-Science-Specialization | 638e490fdd00e57fa8cdc7fcbc946307babd6d0a | [
"Apache-2.0"
] | null | null | null | Week - 6/UMS with JSON/UMS with JSON.ipynb | AshishJangra27/Data-Science-Specialization | 638e490fdd00e57fa8cdc7fcbc946307babd6d0a | [
"Apache-2.0"
] | null | null | null | 23.055914 | 499 | 0.431956 | [
[
[
"## 1. Adding Student Details ",
"_____no_output_____"
]
],
[
[
"import time\nimport numpy as np\nfrom json import loads, dumps\n\ndata = {}\nhistory = {}",
"_____no_output_____"
],
[
"reg_no = str(input('Enter your registraion no: '))\n\nname = str(input('Name : '))\nmail = str(input('Mail-ID : '))\nphone = str(input('Phone No : '))\nsection = str(input('Section : '))\n\n\ndct = {}\ndct['name'] = name\ndct['mail'] = mail\ndct['phone'] = phone\ndct['section'] = section\ndata[reg_no] = dct",
"Enter your registraion no: 11602257\nName : Saksham\nMail-ID : [email protected]\nPhone No : 7643853423\nSection : K1632\n"
],
[
"data",
"_____no_output_____"
]
],
[
[
"## Saving student details in JSON file",
"_____no_output_____"
]
],
[
[
"from json import loads, dumps",
"_____no_output_____"
],
[
"type(data)",
"_____no_output_____"
],
[
"txt = dumps(data)",
"_____no_output_____"
],
[
"txt",
"_____no_output_____"
],
[
"fd = open('data.json','w')\n\nfd.write(txt)\n\nfd.close()",
"_____no_output_____"
]
],
[
[
"## Loading the data from JSON",
"_____no_output_____"
]
],
[
[
"fd = open('data.json','r')\n\ntxt = fd.read()\n\nfd.close()",
"_____no_output_____"
]
],
[
[
"## Adding user details in JSON Directly",
"_____no_output_____"
]
],
[
[
"fd = open('data.json','r')\ntxt = fd.read()\nfd.close()\ndata = loads(txt)\n\n\nreg_no = str(input('Enter your registraion no: '))\nname = str(input('Name : '))\nmail = str(input('Mail-ID : '))\nphone = str(input('Phone No : '))\nsection = str(input('Section : '))\n\n\ndct = {}\ndct['name'] = name\ndct['mail'] = mail\ndct['phone'] = phone\ndct['section'] = section\ndata[reg_no] = dct\n\n\ntxt = dumps(data)\nfd = open('data.json','w')\nfd.write(txt)\nfd.close()",
"Enter your registraion no: 11602256\nName : Sahil\nMail-ID : [email protected]\nPhone No : 857346957834\nSection : K1632\n"
]
],
[
[
"## Get User Details based on Reg No",
"_____no_output_____"
]
],
[
[
"fd = open('data.json','r')\ntxt = fd.read()\nfd.close()\ndata = loads(txt)\n\nuser_reg = str(input('Enter the registration no: '))\n\nprint('-'*35)\nprint('Name : ', data[user_reg]['name'])\nprint('Mail : ', data[user_reg]['mail'])\nprint('Phone : ', data[user_reg]['phone'])\nprint('Section : ', data[user_reg]['section'])\nprint('-'*35)",
"Enter the registration no: 11602258\n-----------------------------------\nName : Shivam\nMail : [email protected]\nPhone : 8735497534\nSection : K1632\n-----------------------------------\n"
]
],
[
[
"## Get User Details based on Name",
"_____no_output_____"
]
],
[
[
"fd = open('data.json','r')\ntxt = fd.read()\nfd.close()\n\ndata = loads(txt)",
"_____no_output_____"
],
[
"name = input('Enter the name: ')\n\nfor key in data.keys():\n if(name.lower() == data[key]['name'].lower()):\n \n print('-'*35)\n print(\"Registration No : \", key)\n print('Name : ', data[key]['name'])\n print('Mail : ', data[key]['mail'])\n print('Phone : ', data[key]['phone'])\n print('Section : ', data[key]['section'])\n print('-'*35)",
"Enter the name: rohit\n-----------------------------------\nRegistration No : 11602255\nName : Rohit\nMail : [email protected]\nPhone : 85739465343\nSection : K1632\n-----------------------------------\n"
]
],
[
[
"## Saving Search History in JSON",
"_____no_output_____"
]
],
[
[
"fd = open('data.json','r')\ntxt = fd.read()\nfd.close()\ndata = loads(txt)\n\nname = input('Enter the name: ')\nfor key in data.keys():\n if(name.lower() == data[key]['name'].lower()): \n print('-'*35)\n print(\"Registration No : \", key)\n print('Name : ', data[key]['name'])\n print('Mail : ', data[key]['mail'])\n print('Phone : ', data[key]['phone'])\n print('Section : ', data[key]['section'])\n print('-'*35)\n\nif (name in history.keys()): \n history[name]['frequency'] += 1\n history[name]['time'] = time.ctime()\nelse: \n log = {}\n log['time'] = time.ctime()\n log['frequency'] = 1\n \nhistory[name] = log",
"Enter the name: Ashish\n-----------------------------------\nRegistration No : 11602259\nName : Ashish\nMail : [email protected]\nPhone : 8457694375\nSection : K1632\n-----------------------------------\n"
],
[
"txt = dumps(history)\n\nfd = open('History.json','w')\nfd.write(txt)\nfd.close()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09a0260f6a607c69227a5c41d15eb6046709adc | 42,716 | ipynb | Jupyter Notebook | sspredict/test/test_edge.ipynb | DS-Wen/SSPredict | 663f693405b066d4b93751c8374d9f5412c501ee | [
"MIT"
] | 2 | 2019-03-02T06:32:05.000Z | 2021-04-30T04:18:24.000Z | sspredict/test/test_edge.ipynb | DS-Wen/SSPredict | 663f693405b066d4b93751c8374d9f5412c501ee | [
"MIT"
] | null | null | null | sspredict/test/test_edge.ipynb | DS-Wen/SSPredict | 663f693405b066d4b93751c8374d9f5412c501ee | [
"MIT"
] | 1 | 2019-05-12T11:46:24.000Z | 2019-05-12T11:46:24.000Z | 132.247678 | 16,512 | 0.864758 | [
[
[
"import numpy as np\nimport matplotlib.pyplot as plt",
"_____no_output_____"
]
],
[
[
"# BCC and FCC ",
"_____no_output_____"
]
],
[
[
"def average_quantities(E_list,V_list,S_list,Comp_list):\n average_E_list=np.empty(len(Comp_list))\n average_S_list=np.empty(len(Comp_list))\n average_V_list=np.empty(len(Comp_list))\n average_b_list=np.empty(len(Comp_list))\n average_nu_list=np.empty(len(Comp_list))\n delta_Vn_list=np.empty([len(Comp_list),len(E_list)])\n for i in range(len(Comp_list)):\n c = Comp_list[i]\n #print(c)\n avg_E = np.dot(E_list,c)\n avg_S = np.dot(S_list,c)\n avg_nu = avg_E/(2*avg_S)-1\n avg_V = np.dot(V_list,c)\n delta_Vn = V_list-avg_V\n avg_b = (4*avg_V)**(1/3)/(2**0.5)\n average_E_list[i]=(avg_E)\n average_S_list[i]=(avg_S)\n average_V_list[i]=(avg_V)\n average_b_list[i]=(avg_b)\n average_nu_list[i]=(avg_nu)\n delta_Vn_list[i,:]=(delta_Vn)\n return average_E_list,average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list\n\ndef curtin_BCC(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep):\n kc = 1.38064852*10**(-23) #J/K\n J2eV=6.2415093433*10**18\n ep0 = 10**4 \n aver_S = average_S_list\n aver_b = average_b_list\n \n sum_cndVn_b6_list = np.empty(len(Comp_list))\n \n dEb_list=np.empty(len(Comp_list))\n Ty0_list=np.empty(len(Comp_list))\n delta_ss_list=np.empty(len(Comp_list))\n for i in range(len(Comp_list)):\n c = Comp_list[i]\n #print(delta_Vn_list[i,:])\n #print(delta_Vn_list[i,:]**2)\n sum_cndVn_b6 = np.dot(c,delta_Vn_list[i,:]**2)/average_b_list[i]**6\n #print(sum_cndVn_b6)\n sum_cndVn_b6_list[i]=sum_cndVn_b6\n q_nu = ((1 + average_nu_list)/(1 - average_nu_list))\n dEb = 2.00 * 0.123**(1/3) * aver_S * aver_b**3 * q_nu**(2/3) * sum_cndVn_b6**(1/3)\n Ty0 = 0.040 * 0.123**(-1/3) * aver_S * q_nu**(4/3) * sum_cndVn_b6**(2/3)\n Ty_T = Ty0 * (1 - ((kc*T)/(dEb) * np.log(ep0/ep))**(2/3) )\n if Ty_T<=Ty0/2:\n Ty_T = Ty0 * np.exp(-1/0.55* kc*T/dEb*np.log(ep0/ep))\n delta_ss = 3.06*Ty_T\n dEb_list[i]=dEb\n Ty0_list[i]=Ty0\n delta_ss_list[i]=delta_ss\n return dEb_list, Ty0_list, delta_ss_list\n\ndef curtin_BCC_old(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep):\n kc = 1.38064852*10**(-23) #J/K\n J2eV=6.2415093433*10**18\n ep0 = 10**4 \n aver_S = average_S_list\n aver_b = average_b_list\n \n sum_cndVn_b6_list = np.empty(len(Comp_list))\n \n dEb_list=np.empty(len(Comp_list))\n Ty0_list=np.empty(len(Comp_list))\n delta_ss_list=np.empty(len(Comp_list))\n for i in range(len(Comp_list)):\n c = Comp_list[i]\n #print(delta_Vn_list[i,:])\n #print(delta_Vn_list[i,:]**2)\n sum_cndVn_b6 = np.dot(c,delta_Vn_list[i,:]**2)/average_b_list[i]**6\n #print(sum_cndVn_b6)\n sum_cndVn_b6_list[i]=sum_cndVn_b6\n q_nu = ((1 + average_nu_list)/(1 - average_nu_list))\n dEb = 2.00 * 0.123**(1/3) * aver_S * aver_b**3 * q_nu**(2/3) * sum_cndVn_b6**(1/3)\n Ty0 = 0.040 * 0.123**(-1/3) * aver_S * q_nu**(4/3) * sum_cndVn_b6**(2/3)\n Ty_T = Ty0 * (1 - ((kc*T)/(dEb) * np.log(ep0/ep))**(2/3) )\n \n delta_ss = 3.06*Ty_T\n dEb_list[i]=dEb\n Ty0_list[i]=Ty0\n delta_ss_list[i]=delta_ss\n return dEb_list, Ty0_list, delta_ss_list",
"_____no_output_____"
],
[
"# Mo-Ta-Nb\nV_list=np.array([15.941,18.345,18.355])*1e-30\nE_list=np.array([326.78,170.02,69.389])*1e9\nS_list=np.array([126.4,62.8,24.2])*1e9\nComp_list = np.array([[0.75,0.,0.25]])\nep = 1e-3\nT = 1573",
"_____no_output_____"
],
[
"average_E_list,average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list= average_quantities(E_list,V_list,S_list,Comp_list)",
"_____no_output_____"
],
[
"dEb_list, Ty0_list, delta_ss_list=curtin_BCC(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\ndEb_list2, Ty0_list2, delta_ss_list2=curtin_BCC_old(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\n\n",
"0.001995898788797323\n0.001995898788797323\n"
],
[
"T_list = np.linspace(0,1600,170)\n",
"_____no_output_____"
],
[
"dEb_list_comp0 = np.empty(len(T_list))\nTy0_list_comp0 = np.empty(len(T_list))\ndelta_ss_list_comp0 = np.empty(len(T_list))\ndEb_list_comp0_old = np.empty(len(T_list))\nTy0_list_comp0_old = np.empty(len(T_list))\ndelta_ss_list_comp0_old = np.empty(len(T_list))\nfor i in range(len(T_list)):\n T = T_list[i]\n dEb_list, Ty0_list, delta_ss_list=curtin_BCC(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\n dEb_list_comp0[i]=(dEb_list[0])\n Ty0_list_comp0[i]=(Ty0_list[0])\n delta_ss_list_comp0[i]=(delta_ss_list[0]/1e6)\n \n dEb_list2, Ty0_list2, delta_ss_list2=curtin_BCC_old(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\n dEb_list_comp0_old[i]=(dEb_list2[0])\n Ty0_list_comp0_old[i]=(Ty0_list2[0])\n delta_ss_list_comp0_old[i]=(delta_ss_list2[0]/1e6)",
"_____no_output_____"
],
[
"plt.plot(T_list,delta_ss_list_comp0)\nplt.plot(T_list,delta_ss_list_comp0_old)",
"_____no_output_____"
],
[
"Comp_list = np.array([[0.1,0.00,0.9]])\naverage_E_list,average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list= average_quantities(E_list,V_list,S_list,Comp_list)\n\ndEb_list, Ty0_list, delta_ss_list=curtin_BCC(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\nT_list = np.linspace(0,1600,170)\n\ndEb_list_comp0 = np.empty(len(T_list))\nTy0_list_comp0 = np.empty(len(T_list))\ndelta_ss_list_comp0 = np.empty(len(T_list))\ndEb_list_comp0_old = np.empty(len(T_list))\nTy0_list_comp0_old = np.empty(len(T_list))\ndelta_ss_list_comp0_old = np.empty(len(T_list))\nfor i in range(len(T_list)):\n T = T_list[i]\n dEb_list, Ty0_list, delta_ss_list=curtin_BCC(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\n dEb_list_comp0[i]=(dEb_list[0])\n Ty0_list_comp0[i]=(Ty0_list[0])\n delta_ss_list_comp0[i]=(delta_ss_list[0]/1e6)\n \n dEb_list2, Ty0_list2, delta_ss_list2=curtin_BCC_old(average_S_list,average_V_list,average_b_list,average_nu_list,delta_Vn_list,Comp_list,T,ep)\n dEb_list_comp0_old[i]=(dEb_list2[0])\n Ty0_list_comp0_old[i]=(Ty0_list2[0])\n delta_ss_list_comp0_old[i]=(delta_ss_list2[0]/1e6)\nplt.plot(T_list,delta_ss_list_comp0)\nplt.plot(T_list,delta_ss_list_comp0_old)",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code"
] | [
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09a0f87017a35b987f8e5129262a511c4ce19be | 5,417 | ipynb | Jupyter Notebook | 4_8_Vehicle_Motion_and_Calculus/Implement an Accelerometer.ipynb | mustafa1adel/CVND_Localization_Exercises | bff1f6879458b78d89a5fdce79a6e7da434f3a2a | [
"MIT"
] | 121 | 2018-06-05T02:46:52.000Z | 2022-03-23T10:11:21.000Z | 4_8_Vehicle_Motion_and_Calculus/Implement an Accelerometer.ipynb | mustafa1adel/CVND_Localization_Exercises | bff1f6879458b78d89a5fdce79a6e7da434f3a2a | [
"MIT"
] | 5 | 2021-03-19T01:13:24.000Z | 2022-03-11T23:49:57.000Z | 4_8_Vehicle_Motion_and_Calculus/Implement an Accelerometer.ipynb | mustafa1adel/CVND_Localization_Exercises | bff1f6879458b78d89a5fdce79a6e7da434f3a2a | [
"MIT"
] | 133 | 2018-06-01T02:38:57.000Z | 2021-12-27T18:53:58.000Z | 27.497462 | 209 | 0.584456 | [
[
[
"# Implement an Accelerometer\nIn this notebook you will define your own `get_derivative_from_data` function and use it to differentiate position data ONCE to get velocity information and then again to get acceleration information.\n\nIn part 1 I will demonstrate what this process looks like and then in part 2 you'll implement the function yourself.",
"_____no_output_____"
],
[
"-----",
"_____no_output_____"
],
[
"## Part 1 - Reminder and Demonstration",
"_____no_output_____"
]
],
[
[
"# run this cell for required imports\n\nfrom helpers import process_data\nfrom helpers import get_derivative_from_data as solution_derivative\nfrom matplotlib import pyplot as plt",
"_____no_output_____"
],
[
"# load the parallel park data\nPARALLEL_PARK_DATA = process_data(\"parallel_park.pickle\")\n\n# get the relevant columns\ntimestamps = [row[0] for row in PARALLEL_PARK_DATA]\ndisplacements = [row[1] for row in PARALLEL_PARK_DATA]\n\n# calculate first derivative\nspeeds = solution_derivative(displacements, timestamps)\n\n# plot\nplt.title(\"Position and Velocity vs Time\")\nplt.xlabel(\"Time (seconds)\")\nplt.ylabel(\"Position (blue) and Speed (orange)\")\nplt.scatter(timestamps, displacements)\nplt.scatter(timestamps[1:], speeds)\nplt.show()",
"_____no_output_____"
]
],
[
[
"But you just saw that acceleration is the derivative of velocity... which means we can use the same derivative function to calculate acceleration!",
"_____no_output_____"
]
],
[
[
"# calculate SECOND derivative \naccelerations = solution_derivative(speeds, timestamps[1:])\n\n# plot (note the slicing of timestamps from 2 --> end)\nplt.scatter(timestamps[2:], accelerations)\nplt.show()",
"_____no_output_____"
]
],
[
[
"As you can see, this parallel park motion consisted of four segments with different (but constant) acceleration. We can plot all three quantities at once like this:\n\n",
"_____no_output_____"
]
],
[
[
"plt.title(\"x(t), v(t), a(t)\")\nplt.xlabel(\"Time (seconds)\")\nplt.ylabel(\"x (blue), v (orange), a (green)\")\nplt.scatter(timestamps, displacements)\nplt.scatter(timestamps[1:], speeds)\nplt.scatter(timestamps[2:], accelerations)\nplt.show()",
"_____no_output_____"
]
],
[
[
"----",
"_____no_output_____"
],
[
"## Part 2 - Implement it yourself!",
"_____no_output_____"
]
],
[
[
"def get_derivative_from_data(position_data, time_data):\n # TODO - try your best to implement this code yourself!\n # if you get really stuck feel free to go back\n # to the previous notebook for a hint.\n return ",
"_____no_output_____"
],
[
"# Testing part 1 - visual testing of first derivative\n# compare this output to the corresponding graph above.\nspeeds = get_derivative_from_data(displacements, timestamps)\n\nplt.title(\"Position and Velocity vs Time\")\nplt.xlabel(\"Time (seconds)\")\nplt.ylabel(\"Position (blue) and Speed (orange)\")\nplt.scatter(timestamps, displacements)\nplt.scatter(timestamps[1:], speeds)\nplt.show()",
"_____no_output_____"
],
[
"# Testing part 2 - visual testing of second derivative\n# compare this output to the corresponding graph above.\nspeeds = get_derivative_from_data(displacements, timestamps)\naccelerations = get_derivative_from_data(speeds, timestamps[1:])\n\nplt.title(\"x(t), v(t), a(t)\")\nplt.xlabel(\"Time (seconds)\")\nplt.ylabel(\"x (blue), v (orange), a (green)\")\nplt.scatter(timestamps, displacements)\nplt.scatter(timestamps[1:], speeds)\nplt.scatter(timestamps[2:], accelerations)\nplt.show()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
]
] |
d09a1ec1e1f5114c29baa429bf7fcc1ef2f664bc | 26,785 | ipynb | Jupyter Notebook | BSSN/Psi4Cartesianvalidation/Tutorial-ETK_thorn-WeylScal4NRPD.ipynb | philchang/nrpytutorial | a69d90777b2519192e3c53a129fe42827224faa3 | [
"BSD-2-Clause"
] | 66 | 2018-06-26T22:18:09.000Z | 2022-02-09T21:12:33.000Z | BSSN/Psi4Cartesianvalidation/Tutorial-ETK_thorn-WeylScal4NRPD.ipynb | philchang/nrpytutorial | a69d90777b2519192e3c53a129fe42827224faa3 | [
"BSD-2-Clause"
] | 14 | 2020-02-13T16:09:29.000Z | 2021-11-12T14:59:59.000Z | BSSN/Psi4Cartesianvalidation/Tutorial-ETK_thorn-WeylScal4NRPD.ipynb | philchang/nrpytutorial | a69d90777b2519192e3c53a129fe42827224faa3 | [
"BSD-2-Clause"
] | 30 | 2019-01-09T09:57:51.000Z | 2022-03-08T18:45:08.000Z | 48.001792 | 980 | 0.625126 | [
[
[
"<script async src=\"https://www.googletagmanager.com/gtag/js?id=UA-59152712-8\"></script>\n<script>\n window.dataLayer = window.dataLayer || [];\n function gtag(){dataLayer.push(arguments);}\n gtag('js', new Date());\n\n gtag('config', 'UA-59152712-8');\n</script>\n\n# Weyl Scalars and Invariants: An Introduction to Einstein Toolkit Diagnostic Thorns\n\n## Author: Patrick Nelson & Zach Etienne\n### Formatting improvements courtesy Brandon Clark\n\n[comment]: <> (Abstract: TODO)\n\n**Notebook Status:** <font color='green'><b> Validated </b></font>\n\n**Validation Notes:** Numerical results from this module have been confirmed to agree with the trusted WeylScal4 Einstein Toolkit thorn to roundoff error.\n\n### NRPy+ Source Code for this module: \n* [WeylScal4NRPD/WeylScalars_Cartesian.py](../edit/WeylScal4NRPD/WeylScalars_Cartesian.py)\n* [WeylScal4NRPD/WeylScalarInvariants_Cartesian.py](../edit/WeylScal4NRPD/WeylScalarInvariants_Cartesian.py)\n\nwhich are fully documented in the NRPy+ [Tutorial-WeylScalars-Cartesian](Tutorial-WeylScalars-Cartesian.ipynb) module on using NRPy+ to construct the Weyl scalars and invariants as SymPy expressions.\n\n## Introduction:\nIn the [previous tutorial notebook](Tutorial-WeylScalars-Cartesian.ipynb), we constructed within SymPy full expressions for the real and imaginary components of all five Weyl scalars $\\psi_0$, $\\psi_1$, $\\psi_2$, $\\psi_3$, and $\\psi_4$ as well as the Weyl invariants. So that we can easily access these expressions, we have ported the Python code needed to generate the Weyl scalar SymPy expressions to [WeylScal4NRPD/WeylScalars_Cartesian.py](../edit/WeylScal4NRPD/WeylScalars_Cartesian.py), and the Weyl invariant SymPy expressions to [WeylScal4NRPD/WeylScalarInvariants_Cartesian.py](../edit/WeylScal4NRPD/WeylScalarInvariants_Cartesian.py).\n\nHere we will work through the steps necessary to construct an Einstein Toolkit diagnostic thorn (module), starting from these SymPy expressions, which computes these expressions using ADMBase gridfunctions as input. This tutorial is in two steps:\n\n1. Call on NRPy+ to convert the SymPy expressions for the Weyl Scalars and associated Invariants into one C-code kernel for each.\n1. Write the C code and build up the needed Einstein Toolkit infrastructure (i.e., the .ccl files).",
"_____no_output_____"
],
[
"<a id='toc'></a>\n\n# Table of Contents\n$$\\label{toc}$$\n\nThis notebook is organized as follows\n\n1. [Step 1](#nrpy): Call on NRPy+ to convert the SymPy expressions for the Weyl scalars and associated invariants into one C-code kernel for each\n1. [Step 2](#etk): Interfacing with the Einstein Toolkit\n 1. [Step 2.a](#etkc): Constructing the Einstein Toolkit C-code calling functions that include the C code kernels\n 1. [Step 2.b](#cclfiles): CCL files - Define how this module interacts and interfaces with the larger Einstein Toolkit infrastructure\n 1. [Step 2.c](#etk_list): Add the C file to Einstein Toolkit compilation list\n1. [Step 3](#latex_pdf_output): Output this notebook to $\\LaTeX$-formatted PDF file",
"_____no_output_____"
],
[
"<a id='nrpy'></a>\n\n# Step 1: Call on NRPy+ to convert the SymPy expressions for the Weyl scalars and associated invariants into one C-code kernel for each \\[Back to [top](#toc)\\]\n$$\\label{nrpy}$$\n\n<font color='red'><b>WARNING</b></font>: It takes some time to generate the CSE-optimized C code kernels for these quantities, especially the Weyl scalars... expect 5 minutes on a modern computer.",
"_____no_output_____"
]
],
[
[
"from outputC import * # NRPy+: Core C code output module\nimport finite_difference as fin # NRPy+: Finite difference C code generation module\nimport NRPy_param_funcs as par # NRPy+: Parameter interface\nimport grid as gri # NRPy+: Functions having to do with numerical grids\nimport indexedexp as ixp # NRPy+: Symbolic indexed expression (e.g., tensors, vectors, etc.) support\nimport reference_metric as rfm # NRPy+: Reference metric support\nimport cmdline_helper as cmd # NRPy+: Multi-platform Python command-line interface\nimport loop as lp # NRPy+: loop infrasructure\nimport shutil, os, sys, time # Standard Python modules for multiplatform OS-level functions, benchmarking\n\n# Step 1: Set the coordinate system for the numerical grid to Cartesian.\npar.set_parval_from_str(\"reference_metric::CoordSystem\",\"Cartesian\")\nrfm.reference_metric() # Create ReU, ReDD needed for rescaling B-L initial data, generating BSSN RHSs, etc.\n\n# Step 2: Set the finite differencing order to FD_order to 4\npar.set_parval_from_str(\"finite_difference::FD_CENTDERIVS_ORDER\", 4)\n\n# Step 3: Create output directories\n!mkdir WeylScal4NRPD 2>/dev/null # 2>/dev/null: Don't throw an error or warning if the directory already exists.\n!mkdir WeylScal4NRPD/src 2>/dev/null # 2>/dev/null: Don't throw an error or warning if the directory already exists.\n\n# Step 4: Generate symbolic expressions\n# Since we are writing an Einstein Toolkit thorn, we must set our memory access style to \"ETK\".\npar.set_parval_from_str(\"grid::GridFuncMemAccess\",\"ETK\")\nimport BSSN.Psi4_tetrads as BP4t\npar.set_parval_from_str(\"BSSN.Psi4_tetrads::TetradChoice\",\"QuasiKinnersley\")\n#par.set_parval_from_str(\"BSSN.Psi4_tetrads::UseCorrectUnitNormal\",\"True\")\nimport BSSN.Psi4 as BP4\nprint(\"Generating symbolic expressions for psi4...\")\nstart = time.time()\nBP4.Psi4()\nend = time.time()\nprint(\"(BENCH) Finished psi4 symbolic expressions in \"+str(end-start)+\" seconds.\")\n\npsi4r = gri.register_gridfunctions(\"AUX\",\"psi4r\")\npsi4r0pt = gri.register_gridfunctions(\"AUX\",\"psi4r0pt\")\npsi4r1pt = gri.register_gridfunctions(\"AUX\",\"psi4r1pt\")\npsi4r2pt = gri.register_gridfunctions(\"AUX\",\"psi4r2pt\")\n\n# Construct RHSs:\npsi4r_lhrh = [lhrh(lhs=gri.gfaccess(\"out_gfs\",\"psi4r\"),rhs=BP4.psi4_re_pt[0]+BP4.psi4_re_pt[1]+BP4.psi4_re_pt[2]),\n lhrh(lhs=gri.gfaccess(\"out_gfs\",\"psi4r0pt\"),rhs=BP4.psi4_re_pt[0]),\n lhrh(lhs=gri.gfaccess(\"out_gfs\",\"psi4r1pt\"),rhs=BP4.psi4_re_pt[1]),\n lhrh(lhs=gri.gfaccess(\"out_gfs\",\"psi4r2pt\"),rhs=BP4.psi4_re_pt[2])]\n\n# Generating the CSE is the slowest\n# operation in this notebook, and much of the CSE\n# time is spent sorting CSE expressions. Disabling\n# this sorting makes the C codegen 3-4x faster,\n# but the tradeoff is that every time this is\n# run, the CSE patterns will be different \n# (though they should result in mathematically \n# *identical* expressions). You can expect \n# roundoff-level differences as a result.\nstart = time.time()\nprint(\"Generating C code kernel for psi4r...\")\npsi4r_CcodeKernel = fin.FD_outputC(\"returnstring\",psi4r_lhrh,params=\"outCverbose=False,CSE_sorting=none\")\nend = time.time()\nprint(\"(BENCH) Finished psi4r C code kernel generation in \"+str(end-start)+\" seconds.\")\npsi4r_looped = lp.loop([\"i2\",\"i1\",\"i0\"],[\"2\",\"2\",\"2\"],[\"cctk_lsh[2]-2\",\"cctk_lsh[1]-2\",\"cctk_lsh[0]-2\"],\\\n [\"1\",\"1\",\"1\"],[\"#pragma omp parallel for\",\"\",\"\"],\"\",\"\"\"\n const CCTK_REAL xx0 = xGF[CCTK_GFINDEX3D(cctkGH, i0,i1,i2)];\n const CCTK_REAL xx1 = yGF[CCTK_GFINDEX3D(cctkGH, i0,i1,i2)];\n const CCTK_REAL xx2 = zGF[CCTK_GFINDEX3D(cctkGH, i0,i1,i2)];\n\"\"\"+psi4r_CcodeKernel)\nwith open(\"WeylScal4NRPD/src/WeylScal4NRPD_psi4r.h\", \"w\") as file:\n file.write(str(psi4r_looped))",
"Generating symbolic expressions for psi4...\n(BENCH) Finished psi4 symbolic expressions in 1.7516753673553467 seconds.\nGenerating C code kernel for psi4r...\n(BENCH) Finished psi4r C code kernel generation in 40.00490069389343 seconds.\n"
]
],
[
[
"<a id='etk'></a>\n\n# Step 2: Interfacing with the Einstein Toolkit \\[Back to [top](#toc)\\]\n$$\\label{etk}$$\n",
"_____no_output_____"
],
[
"<a id='etkc'></a>\n\n## Step 2.a: Constructing the Einstein Toolkit calling functions that include the C code kernels \\[Back to [top](#toc)\\]\n$$\\label{etkc}$$\n\nNow that we have generated the C code kernels (`WeylScal4NRPD_psis.h` and `WeylScal4NRPD_invars.h`) express the Weyl scalars and invariants as CSE-optimized finite-difference expressions, we next need to write the C code functions that incorporate these kernels and are called by the Einstein Toolkit scheduler.",
"_____no_output_____"
]
],
[
[
"%%writefile WeylScal4NRPD/src/WeylScal4NRPD.c\n\n#include <math.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include \"cctk.h\"\n#include \"cctk_Arguments.h\"\n#include \"cctk_Parameters.h\"\n\nvoid WeylScal4NRPD_calc_psi4r(const cGH* restrict const cctkGH,const int *cctk_lsh,const int *cctk_nghostzones,\nconst CCTK_REAL invdx0,const CCTK_REAL invdx1,const CCTK_REAL invdx2,\nconst CCTK_REAL *xGF,const CCTK_REAL *yGF,const CCTK_REAL *zGF,\nconst CCTK_REAL *hDD00GF,const CCTK_REAL *hDD01GF,const CCTK_REAL *hDD02GF,const CCTK_REAL *hDD11GF,const CCTK_REAL *hDD12GF,const CCTK_REAL *hDD22GF,\nconst CCTK_REAL *aDD00GF,const CCTK_REAL *aDD01GF,const CCTK_REAL *aDD02GF,const CCTK_REAL *aDD11GF,const CCTK_REAL *aDD12GF,const CCTK_REAL *aDD22GF,\nconst CCTK_REAL *trKGF,const CCTK_REAL *cfGF,\nCCTK_REAL *psi4rGF,\nCCTK_REAL *psi4r0ptGF,\nCCTK_REAL *psi4r1ptGF,\nCCTK_REAL *psi4r2ptGF) {\n\n DECLARE_CCTK_PARAMETERS;\n\n#include \"WeylScal4NRPD_psi4r.h\"\n\n}\n\nextern void WeylScal4NRPD_mainfunction(CCTK_ARGUMENTS) {\n\n DECLARE_CCTK_PARAMETERS;\n DECLARE_CCTK_ARGUMENTS;\n\n if(cctk_iteration % WeylScal4NRPD_calc_every != 0) { return; }\n \n const CCTK_REAL invdx0 = 1.0 / (CCTK_DELTA_SPACE(0));\n const CCTK_REAL invdx1 = 1.0 / (CCTK_DELTA_SPACE(1));\n const CCTK_REAL invdx2 = 1.0 / (CCTK_DELTA_SPACE(2));\n\n /* Now, to calculate psi4: */\n WeylScal4NRPD_calc_psi4r(cctkGH,cctk_lsh,cctk_nghostzones,\n invdx0,invdx1,invdx2,\n x,y,z,\n hDD00GF,hDD01GF,hDD02GF,hDD11GF,hDD12GF,hDD22GF,\n aDD00GF,aDD01GF,aDD02GF,aDD11GF,aDD12GF,aDD22GF,\n trKGF,cfGF,\n psi4rGF,\n psi4r0ptGF,psi4r1ptGF,psi4r2ptGF);\n}",
"Overwriting WeylScal4NRPD/src/WeylScal4NRPD.c\n"
],
[
"# First we convert from ADM to BSSN, as is required to convert initial data \n# (given using) ADM quantities, to the BSSN evolved variables\nimport BSSN.ADM_Numerical_Spherical_or_Cartesian_to_BSSNCurvilinear as atob\nIDhDD,IDaDD,IDtrK,IDvetU,IDbetU,IDalpha,IDcf,IDlambdaU = \\\n atob.Convert_Spherical_or_Cartesian_ADM_to_BSSN_curvilinear(\"Cartesian\",\"DoNotOutputADMInputFunction\",os.path.join(\"WeylScal4NRPD\",\"src\"))\n\n# Store the original list of registered gridfunctions; we'll want to unregister\n# all the *SphorCart* gridfunctions after we're finished with them below.\norig_glb_gridfcs_list = []\nfor gf in gri.glb_gridfcs_list:\n orig_glb_gridfcs_list.append(gf)\n\nalphaSphorCart = gri.register_gridfunctions( \"AUXEVOL\", \"alphaSphorCart\")\nbetaSphorCartU = ixp.register_gridfunctions_for_single_rank1(\"AUXEVOL\", \"betaSphorCartU\")\nBSphorCartU = ixp.register_gridfunctions_for_single_rank1(\"AUXEVOL\", \"BSphorCartU\")\ngammaSphorCartDD = ixp.register_gridfunctions_for_single_rank2(\"AUXEVOL\", \"gammaSphorCartDD\", \"sym01\")\nKSphorCartDD = ixp.register_gridfunctions_for_single_rank2(\"AUXEVOL\", \"KSphorCartDD\", \"sym01\")\n\n# ADM to BSSN conversion, used for converting ADM initial data into a form readable by this thorn.\n# ADM to BSSN, Part 1: Set up function call and pointers to ADM gridfunctions\noutstr = \"\"\"\n#include <math.h>\n\n#include \"cctk.h\"\n#include \"cctk_Arguments.h\"\n#include \"cctk_Parameters.h\"\n\nvoid WeylScal4NRPD_ADM_to_BSSN(CCTK_ARGUMENTS) {\n DECLARE_CCTK_ARGUMENTS;\n DECLARE_CCTK_PARAMETERS;\n\n CCTK_REAL *alphaSphorCartGF = alp;\n\"\"\"\n# It's ugly if we output code in the following ordering, so we'll first\n# output to a string and then sort the string to beautify the code a bit.\noutstrtmp = []\nfor i in range(3):\n outstrtmp.append(\" CCTK_REAL *betaSphorCartU\"+str(i)+\"GF = beta\"+chr(ord('x')+i)+\";\\n\")\n# outstrtmp.append(\" CCTK_REAL *BSphorCartU\"+str(i)+\"GF = dtbeta\"+chr(ord('x')+i)+\";\\n\")\n for j in range(i,3):\n outstrtmp.append(\" CCTK_REAL *gammaSphorCartDD\"+str(i)+str(j)+\"GF = g\"+chr(ord('x')+i)+chr(ord('x')+j)+\";\\n\")\n outstrtmp.append(\" CCTK_REAL *KSphorCartDD\"+str(i)+str(j)+\"GF = k\"+chr(ord('x')+i)+chr(ord('x')+j)+\";\\n\")\noutstrtmp.sort()\nfor line in outstrtmp:\n outstr += line\n\n# ADM to BSSN, Part 2: Set up ADM to BSSN conversions for BSSN gridfunctions that do not require\n# finite-difference derivatives (i.e., all gridfunctions except lambda^i (=Gamma^i \n# in non-covariant BSSN)):\n# h_{ij}, a_{ij}, trK, vet^i=beta^i,bet^i=B^i, cf (conformal factor), and alpha\nall_but_lambdaU_expressions = [\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD00\"),rhs=IDhDD[0][0]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD01\"),rhs=IDhDD[0][1]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD02\"),rhs=IDhDD[0][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD11\"),rhs=IDhDD[1][1]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD12\"),rhs=IDhDD[1][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"hDD22\"),rhs=IDhDD[2][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD00\"),rhs=IDaDD[0][0]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD01\"),rhs=IDaDD[0][1]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD02\"),rhs=IDaDD[0][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD11\"),rhs=IDaDD[1][1]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD12\"),rhs=IDaDD[1][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"aDD22\"),rhs=IDaDD[2][2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"trK\"),rhs=IDtrK),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"vetU0\"),rhs=IDvetU[0]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"vetU1\"),rhs=IDvetU[1]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"vetU2\"),rhs=IDvetU[2]),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"alpha\"),rhs=IDalpha),\n lhrh(lhs=gri.gfaccess(\"in_gfs\",\"cf\"),rhs=IDcf)]\n\noutCparams = \"preindent=1,outCfileaccess=a,outCverbose=False,includebraces=False\"\nall_but_lambdaU_outC = fin.FD_outputC(\"returnstring\",all_but_lambdaU_expressions, outCparams)\noutstr += lp.loop([\"i2\",\"i1\",\"i0\"],[\"0\",\"0\",\"0\"],[\"cctk_lsh[2]\",\"cctk_lsh[1]\",\"cctk_lsh[0]\"],\n [\"1\",\"1\",\"1\"],[\"#pragma omp parallel for\",\"\",\"\"],\" \",all_but_lambdaU_outC)\n\noutstr += \"} // END void WeylScal4NRPD_ADM_to_BSSN(CCTK_ARGUMENTS)\\n\"\n\nwith open(\"WeylScal4NRPD/src/ADM_to_BSSN.c\", \"w\") as file:\n file.write(str(outstr))",
"_____no_output_____"
]
],
[
[
"<a id='cclfiles'></a>\n\n## Step 2.b: CCL files - Define how this module interacts and interfaces with the larger Einstein Toolkit infrastructure \\[Back to [top](#toc)\\]\n$$\\label{cclfiles}$$\n\nWriting a module (\"thorn\") within the Einstein Toolkit requires that three \"ccl\" files be constructed, all in the root directory of the thorn:\n\n1.`interface.ccl`: defines the gridfunction groups needed, and provides keywords denoting what this thorn provides and what it should inherit from other thorns.\n1. `param.ccl`: specifies free parameters within the thorn.\n1. `schedule.ccl`: allocates storage for gridfunctions, defines how the thorn's functions should be scheduled in a broader simulation, and specifies the regions of memory written to or read from gridfunctions.\n\nLet's start with `interface.ccl`. The [official Einstein Toolkit (Cactus) documentation](http://einsteintoolkit.org/usersguide/UsersGuide.html) defines what must/should be included in an `interface.ccl` file [**here**](http://einsteintoolkit.org/usersguide/UsersGuidech12.html#x17-178000D2.2). ",
"_____no_output_____"
]
],
[
[
"%%writefile WeylScal4NRPD/interface.ccl\n\n# With \"implements\", we give our thorn its unique name.\nimplements: WeylScal4NRPD\n\n# By \"inheriting\" other thorns, we tell the Toolkit that we \n# will rely on variables/function that exist within those\n# functions. \ninherits: admbase Boundary Grid methodoflines\n\n# Tell the Toolkit that we want the various Weyl scalars \n# and invariants to be visible to other thorns by using \n# the keyword \"public\". Note that declaring these \n# gridfunctions *does not* allocate memory for them;\n# that is done by the schedule.ccl file.\npublic:\nCCTK_REAL NRPyPsi4_group type=GF timelevels=3 tags='tensortypealias=\"Scalar\" tensorweight=0 tensorparity=1'\n{\n psi4rGF,psi4r0ptGF,psi4r1ptGF,psi4r2ptGF, psi4iGF\n} \"Psi4_group\"\n\nCCTK_REAL evol_variables type = GF Timelevels=3\n{\n aDD00GF,aDD01GF,aDD02GF,aDD11GF,aDD12GF,aDD22GF,alphaGF,cfGF,hDD00GF,hDD01GF,hDD02GF,hDD11GF,hDD12GF,hDD22GF,trKGF,vetU0GF,vetU1GF,vetU2GF\n} \"BSSN evolved gridfunctions, sans lambdaU and partial t beta\"\n",
"Overwriting WeylScal4NRPD/interface.ccl\n"
]
],
[
[
"We will now write the file `param.ccl`. This file allows the listed parameters to be set at runtime. We also give allowed ranges and default values for each parameter. More information on this file's syntax can be found in the [official Einstein Toolkit documentation](http://einsteintoolkit.org/usersguide/UsersGuidech12.html#x17-183000D2.3). \n\nThe first parameter specifies how many time levels need to be stored. Generally when using the ETK's adaptive-mesh refinement (AMR) driver [Carpet](https://carpetcode.org/), three timelevels are needed so that the diagnostic quantities can be properly interpolated and defined across refinement boundaries. \n\nThe second parameter determines how often we will calculate $\\psi_4$, and the third parameter indicates whether just $\\psi_4$, all Weyl scalars, or all Weyl scalars and invariants are going to be output. The third parameter is currently specified entirely within NRPy+, so by this point it is *not* a free parameter. Thus it is not quite correct to include it in this list of *free* parameters (FIXME).",
"_____no_output_____"
]
],
[
[
"%%writefile WeylScal4NRPD/param.ccl\n\nrestricted:\nCCTK_INT timelevels \"Number of active timelevels\" STEERABLE=RECOVER\n{\n 0:3 :: \"\"\n} 3\n\nrestricted:\nCCTK_INT WeylScal4NRPD_calc_every \"WeylScal4_psi4_calc_Nth_calc_every\" STEERABLE=ALWAYS\n{\n *:* :: \"\"\n} 1",
"Overwriting WeylScal4NRPD/param.ccl\n"
]
],
[
[
"Finally, we will write the file `schedule.ccl`; its official documentation is found [here](http://einsteintoolkit.org/usersguide/UsersGuidech12.html#x17-186000D2.4). This file dictates when the various parts of the thorn will be run. We first assign storage for both the real and imaginary components of $\\psi_4$, and then specify that we want our code run in the `MoL_PseudoEvolution` schedule group (consistent with the original `WeylScal4` Einstein Toolkit thorn), after the ADM variables are set. At this step, we declare that we will be writing code in C. We also specify the gridfunctions that we wish to read in from memory--in our case, we need all the components of $K_{ij}$ (the spatial extrinsic curvature) and $\\gamma_{ij}$ (the physical [as opposed to conformal] 3-metric), in addition to the coordinate values. Note that the ETK adopts the widely-used convention that components of $\\gamma_{ij}$ are prefixed in the code with $\\text{g}$ and not $\\gamma$.",
"_____no_output_____"
]
],
[
[
"%%writefile WeylScal4NRPD/schedule.ccl\n\nSTORAGE: NRPyPsi4_group[3], evol_variables[3]\nSTORAGE: ADMBase::metric[3], ADMBase::curv[3], ADMBase::lapse[3], ADMBase::shift[3]\n\nschedule group WeylScal4NRPD_group in MoL_PseudoEvolution after ADMBase_SetADMVars\n{\n} \"Schedule WeylScal4NRPD group\"\n\nschedule WeylScal4NRPD_ADM_to_BSSN in WeylScal4NRPD_group before weylscal4_mainfunction\n{\n LANG: C\n} \"Convert ADM into BSSN variables\"\n\n\nschedule WeylScal4NRPD_mainfunction in WeylScal4NRPD_group after WeylScal4NRPD_ADM_to_BSSN\n{\n LANG: C\n} \"Call WeylScal4NRPD main function\"",
"Overwriting WeylScal4NRPD/schedule.ccl\n"
]
],
[
[
"<a id='etk_list'></a>\n\n## Step 2.c: Tell the Einstein Toolkit to compile the C code \\[Back to [top](#toc)\\]\n$$\\label{etk_list}$$\n\nThe `make.code.defn` lists the source files that need to be compiled. Naturally, this thorn has only the one C file $-$ written above $-$ to compile:",
"_____no_output_____"
]
],
[
[
"%%writefile WeylScal4NRPD/src/make.code.defn\n\nSRCS = WeylScal4NRPD.c ADM_to_BSSN.c",
"Overwriting WeylScal4NRPD/src/make.code.defn\n"
]
],
[
[
"<a id='latex_pdf_output'></a>\n\n# Step 3: Output this notebook to $\\LaTeX$-formatted PDF file \\[Back to [top](#toc)\\]\n$$\\label{latex_pdf_output}$$\n\nThe following code cell converts this Jupyter notebook into a proper, clickable $\\LaTeX$-formatted PDF file. After the cell is successfully run, the generated PDF may be found in the root NRPy+ tutorial directory, with filename\n[Tutorial-ETK_thorn-Weyl_Scalars_and_Spacetime_Invariants.pdf](Tutorial-ETK_thorn-Weyl_Scalars_and_Spacetime_Invariants.pdf) (Note that clicking on this link may not work; you may need to open the PDF file through another means.)",
"_____no_output_____"
]
],
[
[
"import cmdline_helper as cmd # NRPy+: Multi-platform Python command-line interface\ncmd.output_Jupyter_notebook_to_LaTeXed_PDF(\"Tutorial-ETK_thorn-WeylScal4NRPD\")",
"Created Tutorial-ETK_thorn-WeylScal4NRPD.tex, and compiled LaTeX file to PDF file Tutorial-ETK_thorn-WeylScal4NRPD.pdf\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09a332e8f99a01a82d76306aaea3db7034aa508 | 27,501 | ipynb | Jupyter Notebook | ETL Pipeline Preparation.ipynb | alserranor/DisasterResponse | 51f3e16cd65ee131e864f08768d50410776bfda0 | [
"MIT"
] | null | null | null | ETL Pipeline Preparation.ipynb | alserranor/DisasterResponse | 51f3e16cd65ee131e864f08768d50410776bfda0 | [
"MIT"
] | null | null | null | ETL Pipeline Preparation.ipynb | alserranor/DisasterResponse | 51f3e16cd65ee131e864f08768d50410776bfda0 | [
"MIT"
] | null | null | null | 47.010256 | 465 | 0.451474 | [
[
[
"empty"
]
]
] | [
"empty"
] | [
[
"empty"
]
] |
d09a33f01f81fb4575812622d8795108ad1c293c | 17,058 | ipynb | Jupyter Notebook | docs/simpl_instructions/eeg_objects.ipynb | UBC-MDS/simpl_eeg_capstone | 141ffafcada42098703628f68780077344dd4944 | [
"MIT"
] | null | null | null | docs/simpl_instructions/eeg_objects.ipynb | UBC-MDS/simpl_eeg_capstone | 141ffafcada42098703628f68780077344dd4944 | [
"MIT"
] | 35 | 2021-06-08T22:31:52.000Z | 2021-06-29T20:25:42.000Z | docs/simpl_instructions/eeg_objects.ipynb | UBC-MDS/simpl_eeg_capstone | 141ffafcada42098703628f68780077344dd4944 | [
"MIT"
] | null | null | null | 30.624776 | 373 | 0.576386 | [
[
[
"# Creating EEG Objects",
"_____no_output_____"
],
[
"## Epoch Creation\n<a id=\"intro\"></a>",
"_____no_output_____"
]
],
[
[
"from simpl_eeg import eeg_objects",
"_____no_output_____"
]
],
[
[
"<br>",
"_____no_output_____"
],
[
"### Module Overview",
"_____no_output_____"
],
[
"The `eeg_objects` module contains helper classes for storing and manipulating relevant information regarding epochs to pass to other package functions. It contains two classes. Typically you will only you use the `eeg_objects.Epochs` directly, which by default contains a `eeg_objects.EEG_File` object in the `eeg_file` attribute. \nBelow are the docstrings for the two classes:",
"_____no_output_____"
]
],
[
[
"# Class for reading and importing EEG files\nhelp(eeg_objects.EEG_File)",
"_____no_output_____"
],
[
"# Class for storing, generating, and adjusting epoch objects\nhelp(eeg_objects.Epochs)",
"_____no_output_____"
]
],
[
[
"<br>",
"_____no_output_____"
],
[
"### Define parameters",
"_____no_output_____"
],
[
"The only required parameter to create an epoch object is the `folder_path` for the experiment of interest, however additional parameters may be used to customize your epoch object. \n\n- `file_name`\n - If you specify a `file_name`, and the file exists in the `folder_path` directory, then it will be used as the main data file for the epoch.\n - If you do not specify a `file_name` then the alphabetical first file with a supported main file type in `folder_path` will be automatically loaded.\n\n\n- `events_file`\n - If you specify an `events_file`, and the file exists in the `folder_path` directory, then it will be used as the events data file for the epoch.\n - If you do not specify an `events_file` then the alphabetical first file with a supported events file type in `folder_path` will be automatically loaded.\n - If you try to load an `events_file` (automatically or manually) with over 5,000 events or if the final column in the loaded dictionary does not contain a numerical value in its first index (both forms of error catching) then the file will be rejected and will not be loaded.\n - If you want to force no events data to be loaded you can pass and `events_file` of `None`.\n \n\n- `montage`\n - If you specify a `montage`, it will load a standard montage with the specified name into the epoch data.\n - If montage data already exists in the main data file and a `montage` is provided the original data overwritten in the epoch object.\n - If you do not specify a `montage` and montage data already exists in the main data then it will be used instead.\n - If you do not specify a `montage` and montage data does not exist in the main data then one attempt will be made to load a \"easycap-M1\" montage. If this fails then no montage information will be loaded.\n - If you want to force no `montage` to be loaded data to be loaded you can pass and `events_file` of `None`.\n\n\n- `start_second`\n - If you specify a `start_second`, a single epoch will be generated with an impact event at the specified second.\n - If you do not specify a `start_second`, epochs will be automatically generated using the impact times found in the `impact locations.mat` file in the selected `experiment_folder`. \n\n\n- `tmin`\n - specifies the number of seconds before the impact to use,\n\n\n- `tmax`\n - specifies the number of seconds after the impact.",
"_____no_output_____"
]
],
[
[
"# path to the experiment folder\nfolder_path = \"../../data/109\"\n\n# the name of the main data file to load (optional)\nfile_name = \"fixica.set\"\n\n# the name of the events file to load (optional)\nevents_file = \"impact locations.mat\"\n\n# the montage type to load (optional)\nmontage = None\n\n# number of seconds before the impact, should be a negative number for before impact (optional)\ntmin = -1\n\n# number of seconds after the impact (optional)\ntmax = 1\n\n# if creating a custom epoch, select a starting second (optional)\nstart_second = None",
"_____no_output_____"
]
],
[
[
"<br>",
"_____no_output_____"
],
[
"### Create epoched data",
"_____no_output_____"
],
[
"The following data formats are currently supported. Note that due to limited availability of test files not all formats have been fully tested (see Notes).\n\n| | Main File | Secondary File | Events File | Notes |\n|-----------------------|-----------|----------------|-------------|---------------------------------------------------------|\n| EEGLAB | .set | .fdt | .mat | |\n| BrainVision | .vhdr | .eeg | .vmrk | |\n| European data format | .edf | N/A | N/A | |\n| BioSemi data format | .bdf | N/A | N/A | Montage has not be successfully loaded with test files. |\n| General data format | .gdf | N/A | N/A | Events have not be successfully loaded with test files. |\n| Neuroscan CNT | .cnt | N/A | N/A | Montage has not be successfully loaded with test files. |\n| eXimia | .nxe | N/A | N/A | Events have not be successfully loaded with test files. |\n| Nihon Kohden EEG data | .eeg | .pnt AND .21e | .log | Montage has not be successfully loaded with test files. |",
"_____no_output_____"
],
[
"- A **main file** represents the lead file used to load in your EEG data. This is the file that may be passed as your `file_name`.\n\n- A **secondary file** contains some secondary information for some data types. They will be automatically loaded to when the main file is loaded.\n\n- A **events file** contains a list of the annotations associated with events in your EEG data. This is the file that may be passed as your `events_file`.\n\n- A **montage** must exist in your epoch in order to visualize it. This contains information about your node locations in 3D space. A complete list of usable montages is available here: https://mne.tools/dev/generated/mne.channels.make_standard_montage.html.",
"_____no_output_____"
],
[
"You can create epoched data using the `Epochs` class.",
"_____no_output_____"
]
],
[
[
"epochs = eeg_objects.Epochs(\n folder_path = folder_path,\n file_name = file_name,\n events_file = events_file,\n montage = montage,\n tmin = tmin,\n tmax = tmax,\n start_second = start_second\n)",
"_____no_output_____"
]
],
[
[
"The generated epoch data is found within the `all_epochs` attribute. Here we are generating epochs with automatically detected impact times, so we can see that there are multiple events.",
"_____no_output_____"
]
],
[
[
"epochs.all_epochs",
"_____no_output_____"
]
],
[
[
"If instead we create epochs with a custom start second, we will only create a single epoch with an impact the given `start_second`.",
"_____no_output_____"
]
],
[
[
"start_second = 15 # record event at second 15\ncustom_epoch = eeg_objects.Epochs(folder_path, tmin, tmax, start_second) \n\ncustom_epoch.all_epochs",
"_____no_output_____"
]
],
[
[
"#### Get information about epochs",
"_____no_output_____"
],
[
"In addition to the epochs contained in the `all_epochs` attribute, the `Epoch` object also contains information about the file used and has a selected epoch for quick access. ",
"_____no_output_____"
]
],
[
[
"eeg_file = epochs.eeg_file\nprint(eeg_file.folder_path) # experiment folder path\nprint(eeg_file.experiment) # experiment number\nprint(eeg_file.raw) # raw data\nprint(eeg_file.file_source) # primary data file the EEG data was loaded from\nprint(eeg_file.events_source) # source file of events\nprint(eeg_file.montage_source) # source of the montage (may be pre-set montage name)\nprint(eeg_file.events) # impact times",
"_____no_output_____"
]
],
[
[
"#### Select specific epoch",
"_____no_output_____"
],
[
"If you have a specific epoch of interest you can specify it with the `get_epoch` method. You can retrieve it later by accessing the `epoch` attribute.",
"_____no_output_____"
]
],
[
[
"nth_epoch = 5 # the epoch of interest to select, the 6th impact\nsingle_epoch = epochs.get_epoch(nth_epoch)\nsingle_epoch",
"_____no_output_____"
],
[
"epochs.epoch",
"_____no_output_____"
]
],
[
[
"#### Getting an evoked object",
"_____no_output_____"
],
[
"You can also use the `get_epoch` method to retrieve an evoked object, which represents an averaging of each event in your epoch. Note that evoked data is its own type of object and is not guaranteed to work with every function in this package.",
"_____no_output_____"
]
],
[
[
"evoked = epochs.get_epoch(\"evoked\")\ntype(evoked)",
"_____no_output_____"
],
[
"evoked.info",
"_____no_output_____"
]
],
[
[
"#### Decimate the epoch (optional)\nTo reduce the size of the selected epoch you can choose to skip a selected number of time steps by calling the `skip_n_steps` method. If `use_single=True` (the default), it will only be run on the current selected epoch from the previous step, contained in the `epoch` attribute. Otherwise it will run on all the epochs contained within the `all_epochs` attribute.\n\nSkipping steps will greatly reduce animation times for the other functions in the package. The greater the number of steps skipped, the fewer the frames to animate. In the example below we are reducing the selected epoch from 4097 time steps to 81 time steps. ",
"_____no_output_____"
]
],
[
[
"single_epoch.get_data().shape",
"_____no_output_____"
],
[
"num_steps = 50\nsmaller_epoch = epochs.skip_n_steps(num_steps)\nsmaller_epoch.get_data().shape",
"_____no_output_____"
]
],
[
[
"#### Average the epoch (optional)\nTo reduce the size of the selected epoch you can choose to average a selected number of time steps by calling the `average_n_steps` method. It will be run on the current selected epoch from the previous step, contained in the `epoch` attribute.\n\nAveraging works the same way as decimating above, but instead of simply ignoring records between steps it takes an average. ",
"_____no_output_____"
]
],
[
[
"num_steps = 50\naverage_epoch = epochs.average_n_steps(num_steps)\naverage_epoch.get_data()",
"_____no_output_____"
]
],
[
[
"### MNE functions",
"_____no_output_____"
],
[
"Now that you have access epoched data, you can use the `simpl_eeg` package functions as well as any [MNE functions](https://mne.tools/stable/generated/mne.Epochs.html) which act on `mne.epoch` objects. Below are some useful examples for the MNE objects contained within the object we created. ",
"_____no_output_____"
],
[
"#### Raw data\nhttps://mne.tools/stable/generated/mne.io.Raw.html",
"_____no_output_____"
]
],
[
[
"raw = epochs.eeg_file.raw\nraw.info",
"_____no_output_____"
],
[
"raw.plot_psd();",
"_____no_output_____"
]
],
[
[
"#### Epoch data",
"_____no_output_____"
]
],
[
[
"# first 3 epochs\nepochs.all_epochs.plot(n_epochs=3);",
"_____no_output_____"
],
[
"# specific epoch\nepochs.epoch.plot();",
"_____no_output_____"
],
[
"# specific epoch with steps skipped\nepochs.skip_n_steps(100).plot();",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
d09a3b1cc5b6c4330aa72f20ac6a9273351a49da | 2,431 | ipynb | Jupyter Notebook | 1-micropython.ipynb | SebastianRoll/NDC-MicroPython | b8a366b02b5eb18b6f2a37f35050bf62cc95dae9 | [
"MIT"
] | 1 | 2018-09-29T12:28:11.000Z | 2018-09-29T12:28:11.000Z | 1-micropython.ipynb | SebastianRoll/NDC-MicroPython | b8a366b02b5eb18b6f2a37f35050bf62cc95dae9 | [
"MIT"
] | null | null | null | 1-micropython.ipynb | SebastianRoll/NDC-MicroPython | b8a366b02b5eb18b6f2a37f35050bf62cc95dae9 | [
"MIT"
] | 1 | 2019-06-09T17:05:51.000Z | 2019-06-09T17:05:51.000Z | 26.714286 | 592 | 0.60798 | [
[
[
"# MicroPython\n\n\n<img src=\"https://upload.wikimedia.org/wikipedia/commons/thumb/4/4e/Micropython-logo.svg/765px-Micropython-logo.svg.png\" alt=\"Drawing\" style=\"width: 200px; align:left;\"/>",
"_____no_output_____"
],
[
"- 256kB\n- hardware modules\n- Created by Damien George\n- Backed by Kickstarter campaign 2013\n- MIT license",
"_____no_output_____"
],
[
"## micropython-lib\n\n[list of package](https://pypi.org/search/?q=micropython)\n\nMicroPython is (usually) distributed as a single executable/binary file with just few builtin modules. There is no extensive standard library comparable with CPython. Instead, there is a related, but separate project micropython-lib which provides implementations for many modules from CPython’s standard library. However, large subset of these modules require POSIX-like environment (Linux, FreeBSD, MacOS, etc.; Windows may be partially supported), and thus would work or make sense only with MicroPython Unix port. Some subset of modules is however usable for baremetal ports too.\n\nUnlike monolithic CPython stdlib, micropython-lib modules are intended to be installed individually - either using manual copying or using upip.",
"_____no_output_____"
],
[
"## Package management\n\nupip",
"_____no_output_____"
],
[
"### machine\n\nhttps://docs.micropython.org/en/latest/esp8266/library/machine.html\n\nmachine.deepsleep()",
"_____no_output_____"
]
]
] | [
"markdown"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
d09a44265010849d542900e623bb4ec4e8d19c20 | 45,505 | ipynb | Jupyter Notebook | IBM_AI/4_Pytorch/8.4.2_NeuralNetworkswithMomentum_v2.ipynb | merula89/cousera_notebooks | caa529a7abd3763d26f3f2add7c3ab508fbb9bd2 | [
"MIT"
] | null | null | null | IBM_AI/4_Pytorch/8.4.2_NeuralNetworkswithMomentum_v2.ipynb | merula89/cousera_notebooks | caa529a7abd3763d26f3f2add7c3ab508fbb9bd2 | [
"MIT"
] | null | null | null | IBM_AI/4_Pytorch/8.4.2_NeuralNetworkswithMomentum_v2.ipynb | merula89/cousera_notebooks | caa529a7abd3763d26f3f2add7c3ab508fbb9bd2 | [
"MIT"
] | null | null | null | 76.222781 | 27,568 | 0.806043 | [
[
[
"<a href=\"http://cocl.us/pytorch_link_top\">\n <img src=\"https://s3-api.us-geo.objectstorage.softlayer.net/cf-courses-data/CognitiveClass/DL0110EN/notebook_images%20/Pytochtop.png\" width=\"750\" alt=\"IBM Product \" />\n</a> \n",
"_____no_output_____"
],
[
"<img src=\"https://s3-api.us-geo.objectstorage.softlayer.net/cf-courses-data/CognitiveClass/DL0110EN/notebook_images%20/cc-logo-square.png\" width=\"200\" alt=\"cognitiveclass.ai logo\" />",
"_____no_output_____"
],
[
"<h1>Neural Networks with Momentum</h1>",
"_____no_output_____"
],
[
"<h2>Table of Contents</h2>\n<p>In this lab, you will see how different values for the momentum parameters affect the convergence rate of a neural network.</p>\n\n<ul>\n<li><a href=\"#Model\">Neural Network Module and Function for Training</a></li>\n<li><a href=\"#Train\">Train Different Neural Networks Model different values for the Momentum Parameter</a></li>\n<li><a href=\"#Result\">Compare Results of Different Momentum Terms</a></li>\n</ul>\n<p>Estimated Time Needed: <strong>25 min</strong></p>\n\n<hr>",
"_____no_output_____"
],
[
"<h2>Preparation</h2>",
"_____no_output_____"
],
[
"We'll need the following libraries: ",
"_____no_output_____"
]
],
[
[
"# Import the libraries for this lab\n\nimport matplotlib.pyplot as plt \nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom matplotlib.colors import ListedColormap\nfrom torch.utils.data import Dataset, DataLoader\n\ntorch.manual_seed(1)\nnp.random.seed(1)",
"_____no_output_____"
]
],
[
[
"Functions used to plot:",
"_____no_output_____"
]
],
[
[
"# Define a function for plot the decision region\n\ndef plot_decision_regions_3class(model, data_set):\n cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA','#00AAFF'])\n cmap_bold = ListedColormap(['#FF0000', '#00FF00','#00AAFF'])\n X=data_set.x.numpy()\n y=data_set.y.numpy()\n h = .02\n x_min, x_max = X[:, 0].min() - 0.1 , X[:, 0].max() + 0.1 \n y_min, y_max = X[:, 1].min() - 0.1 , X[:, 1].max() + 0.1 \n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),np.arange(y_min, y_max, h))\n XX=torch.torch.Tensor(np.c_[xx.ravel(), yy.ravel()])\n _,yhat=torch.max(model(XX),1)\n yhat=yhat.numpy().reshape(xx.shape)\n plt.pcolormesh(xx, yy, yhat, cmap=cmap_light)\n plt.plot(X[y[:]==0,0], X[y[:]==0,1], 'ro', label='y=0')\n plt.plot(X[y[:]==1,0], X[y[:]==1,1], 'go', label='y=1')\n plt.plot(X[y[:]==2,0], X[y[:]==2,1], 'o', label='y=2')\n plt.title(\"decision region\")\n plt.legend()",
"_____no_output_____"
]
],
[
[
"Create the dataset class ",
"_____no_output_____"
]
],
[
[
"# Create the dataset class\n\nclass Data(Dataset):\n \n # modified from: http://cs231n.github.io/neural-networks-case-study/\n # Constructor\n def __init__(self, K=3, N=500):\n D = 2\n X = np.zeros((N * K, D)) # data matrix (each row = single example)\n y = np.zeros(N * K, dtype='uint8') # class labels\n for j in range(K):\n ix = range(N * j, N * (j + 1))\n r = np.linspace(0.0, 1, N) # radius\n t = np.linspace(j * 4, (j + 1) * 4, N) + np.random.randn(N) * 0.2 # theta\n X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]\n y[ix] = j\n \n self.y = torch.from_numpy(y).type(torch.LongTensor)\n self.x = torch.from_numpy(X).type(torch.FloatTensor)\n self.len = y.shape[0]\n \n # Getter\n def __getitem__(self, index): \n return self.x[index], self.y[index]\n \n # Get Length\n def __len__(self):\n return self.len\n \n # Plot the diagram\n def plot_data(self):\n plt.plot(self.x[self.y[:] == 0, 0].numpy(), self.x[self.y[:] == 0, 1].numpy(), 'o', label=\"y=0\")\n plt.plot(self.x[self.y[:] == 1, 0].numpy(), self.x[self.y[:] == 1, 1].numpy(), 'ro', label=\"y=1\")\n plt.plot(self.x[self.y[:] == 2, 0].numpy(),self.x[self.y[:] == 2, 1].numpy(), 'go',label=\"y=2\")\n plt.legend()",
"_____no_output_____"
]
],
[
[
"<!--Empty Space for separating topics-->",
"_____no_output_____"
],
[
"<h2 id=\"Model\">Neural Network Module and Function for Training</h2>",
"_____no_output_____"
],
[
"Create Neural Network Module using <code>ModuleList()</code>",
"_____no_output_____"
]
],
[
[
"# Create dataset object\n\nclass Net(nn.Module):\n \n # Constructor\n def __init__(self, Layers):\n super(Net, self).__init__()\n self.hidden = nn.ModuleList()\n for input_size, output_size in zip(Layers, Layers[1:]):\n self.hidden.append(nn.Linear(input_size, output_size))\n \n # Prediction\n def forward(self, activation):\n L = len(self.hidden)\n for (l, linear_transform) in zip(range(L), self.hidden):\n if l < L - 1:\n activation = F.relu(linear_transform(activation)) \n else:\n activation = linear_transform(activation)\n return activation",
"_____no_output_____"
]
],
[
[
"Create the function for training the model.",
"_____no_output_____"
]
],
[
[
"# Define the function for training the model\n\ndef train(data_set, model, criterion, train_loader, optimizer, epochs=100):\n LOSS = []\n ACC = []\n for epoch in range(epochs):\n for x, y in train_loader:\n optimizer.zero_grad()\n yhat = model(x)\n loss = criterion(yhat, y)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n LOSS.append(loss.item())\n ACC.append(accuracy(model,data_set))\n \n results ={\"Loss\":LOSS, \"Accuracy\":ACC}\n fig, ax1 = plt.subplots()\n color = 'tab:red'\n ax1.plot(LOSS,color=color)\n ax1.set_xlabel('epoch', color=color)\n ax1.set_ylabel('total loss', color=color)\n ax1.tick_params(axis = 'y', color=color)\n \n ax2 = ax1.twinx() \n color = 'tab:blue'\n ax2.set_ylabel('accuracy', color=color) # we already handled the x-label with ax1\n ax2.plot(ACC, color=color)\n ax2.tick_params(axis='y', color=color)\n fig.tight_layout() # otherwise the right y-label is slightly clipped\n \n plt.show()\n return results",
"_____no_output_____"
]
],
[
[
"Define a function used to calculate accuracy.",
"_____no_output_____"
]
],
[
[
"# Define a function for calculating accuracy\n\ndef accuracy(model, data_set):\n _, yhat = torch.max(model(data_set.x), 1)\n return (yhat == data_set.y).numpy().mean()",
"_____no_output_____"
]
],
[
[
"<!--Empty Space for separating topics-->",
"_____no_output_____"
],
[
"<h2 id=\"Train\">Train Different Networks Model different values for the Momentum Parameter</h2>",
"_____no_output_____"
],
[
"Crate a dataset object using <code>Data</code>",
"_____no_output_____"
]
],
[
[
"# Create the dataset and plot it\n\ndata_set = Data()\ndata_set.plot_data()\ndata_set.y = data_set.y.view(-1)",
"_____no_output_____"
]
],
[
[
"Dictionary to contain different cost and accuracy values for each epoch for different values of the momentum parameter.",
"_____no_output_____"
]
],
[
[
"# Initialize a dictionary to contain the cost and accuracy\n\nResults = {\"momentum 0\": {\"Loss\": 0, \"Accuracy:\": 0}, \"momentum 0.1\": {\"Loss\": 0, \"Accuracy:\": 0}}",
"_____no_output_____"
]
],
[
[
"Create a network to classify three classes with 1 hidden layer with 50 neurons and a momentum value of zero.",
"_____no_output_____"
]
],
[
[
"# Train a model with 1 hidden layer and 50 neurons\n\nLayers = [2, 50, 3]\nmodel = Net(Layers)\nlearning_rate = 0.10\noptimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)\ntrain_loader = DataLoader(dataset=data_set, batch_size=20)\ncriterion = nn.CrossEntropyLoss()\nResults[\"momentum 0\"] = train(data_set, model, criterion, train_loader, optimizer, epochs=100)\nplot_decision_regions_3class(model, data_set)",
"_____no_output_____"
]
],
[
[
"Create a network to classify three classes with 1 hidden layer with 50 neurons and a momentum value of 0.1.\n\n\n",
"_____no_output_____"
]
],
[
[
"# Train a model with 1 hidden layer and 50 neurons with 0.1 momentum\n\nLayers = [2, 50, 3]\nmodel = Net(Layers)\nlearning_rate = 0.10\noptimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.1)\ntrain_loader = DataLoader(dataset=data_set, batch_size=20)\ncriterion = nn.CrossEntropyLoss()\nResults[\"momentum 0.1\"] = train(data_set, model, criterion, train_loader, optimizer, epochs=100)\nplot_decision_regions_3class(model, data_set)",
"_____no_output_____"
]
],
[
[
"\nCreate a network to classify three classes with 1 hidden layer with 50 neurons and a momentum value of 0.2.",
"_____no_output_____"
]
],
[
[
"# Train a model with 1 hidden layer and 50 neurons with 0.2 momentum\n\nLayers = [2, 50, 3]\nmodel = Net(Layers)\nlearning_rate = 0.10\noptimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.2)\ntrain_loader = DataLoader(dataset=data_set, batch_size=20)\ncriterion = nn.CrossEntropyLoss()\nResults[\"momentum 0.2\"] = train(data_set, model, criterion, train_loader, optimizer, epochs=100)\nplot_decision_regions_3class(model, data_set)",
"_____no_output_____"
]
],
[
[
"Create a network to classify three classes with 1 hidden layer with 50 neurons and a momentum value of 0.4.",
"_____no_output_____"
]
],
[
[
"# Train a model with 1 hidden layer and 50 neurons with 0.4 momentum\n\nLayers = [2, 50, 3]\nmodel = Net(Layers)\nlearning_rate = 0.10\noptimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.4)\ntrain_loader = DataLoader(dataset=data_set, batch_size=20)\ncriterion = nn.CrossEntropyLoss()\nResults[\"momentum 0.4\"] = train(data_set, model, criterion, train_loader, optimizer, epochs=100)\nplot_decision_regions_3class(model, data_set)",
"_____no_output_____"
]
],
[
[
"Create a network to classify three classes with 1 hidden layer with 50 neurons and a momentum value of 0.5.",
"_____no_output_____"
]
],
[
[
"# Train a model with 1 hidden layer and 50 neurons with 0.5 momentum\n\nLayers = [2, 50, 3]\nmodel = Net(Layers)\nlearning_rate = 0.10\noptimizer = torch.optim.SGD(model.parameters(), lr=learning_rate, momentum=0.5)\ntrain_loader = DataLoader(dataset=data_set, batch_size=20)\ncriterion = nn.CrossEntropyLoss()\nResults[\"momentum 0.5\"] = train(data_set, model, criterion, train_loader, optimizer, epochs=100)\nplot_decision_regions_3class(model,data_set)",
"_____no_output_____"
]
],
[
[
"<!--Empty Space for separating topics-->",
"_____no_output_____"
],
[
"<h2 id=\"Result\">Compare Results of Different Momentum Terms</h2>",
"_____no_output_____"
],
[
"The plot below compares results of different momentum terms. We see that in general. The Cost decreases proportionally to the momentum term, but larger momentum terms lead to larger oscillations. While the momentum term decreases faster, it seems that a momentum term of 0.2 reaches the smallest value for the cost. ",
"_____no_output_____"
]
],
[
[
"# Plot the Loss result for each term\n\nfor key, value in Results.items():\n plt.plot(value['Loss'],label=key)\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('Total Loss or Cost')",
"_____no_output_____"
]
],
[
[
"The accuracy seems to be proportional to the momentum term.",
"_____no_output_____"
]
],
[
[
"# Plot the Accuracy result for each term\n\nfor key, value in Results.items():\n plt.plot(value['Accuracy'],label=key)\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('Accuracy')",
"_____no_output_____"
]
],
[
[
"<!--Empty Space for separating topics-->",
"_____no_output_____"
],
[
"<a href=\"http://cocl.us/pytorch_link_bottom\">\n <img src=\"https://s3-api.us-geo.objectstorage.softlayer.net/cf-courses-data/CognitiveClass/DL0110EN/notebook_images%20/notebook_bottom%20.png\" width=\"750\" alt=\"PyTorch Bottom\" />\n</a>",
"_____no_output_____"
],
[
"<h2>About the Authors:</h2> \n\n<a href=\"https://www.linkedin.com/in/joseph-s-50398b136/\">Joseph Santarcangelo</a> has a PhD in Electrical Engineering, his research focused on using machine learning, signal processing, and computer vision to determine how videos impact human cognition. Joseph has been working for IBM since he completed his PhD. ",
"_____no_output_____"
],
[
"Other contributors: <a href=\"https://www.linkedin.com/in/michelleccarey/\">Michelle Carey</a>, <a href=\"www.linkedin.com/in/jiahui-mavis-zhou-a4537814a\">Mavis Zhou</a>",
"_____no_output_____"
],
[
"<hr>",
"_____no_output_____"
],
[
"Copyright © 2018 <a href=\"cognitiveclass.ai?utm_source=bducopyrightlink&utm_medium=dswb&utm_campaign=bdu\">cognitiveclass.ai</a>. This notebook and its source code are released under the terms of the <a href=\"https://bigdatauniversity.com/mit-license/\">MIT License</a>.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
d09a49997ccb2b43c46bbc2ad250b569c3308184 | 4,167 | ipynb | Jupyter Notebook | notebooks/UC_2018/integrating_workforce_demo_theatre/UC 2018 - 6 - Monitor Assignments And Update SQLite DB.ipynb | airyadriana/workforce-scripts | 85e5b1e706df91c9fa1a7a301e1288daf689b0d7 | [
"Apache-2.0"
] | 1 | 2020-03-11T15:27:59.000Z | 2020-03-11T15:27:59.000Z | notebooks/UC_2018/integrating_workforce_demo_theatre/UC 2018 - 6 - Monitor Assignments And Update SQLite DB.ipynb | airyadriana/workforce-scripts | 85e5b1e706df91c9fa1a7a301e1288daf689b0d7 | [
"Apache-2.0"
] | null | null | null | notebooks/UC_2018/integrating_workforce_demo_theatre/UC 2018 - 6 - Monitor Assignments And Update SQLite DB.ipynb | airyadriana/workforce-scripts | 85e5b1e706df91c9fa1a7a301e1288daf689b0d7 | [
"Apache-2.0"
] | null | null | null | 28.737931 | 476 | 0.594912 | [
[
[
"# Monitor Assignments and Update SQLite Table with Changes\nIn this example, the \"Sidewalk Repair\" assignments will be monitored. When a sidewalk as been repaired, the corresponding work order will be updated in the SQLite table to be marked as \"Completed\".",
"_____no_output_____"
]
],
[
[
"import sqlite3\nfrom datetime import datetime, timedelta\nimport time\nimport pandas as pd\nfrom arcgis.gis import GIS\nfrom arcgis.apps import workforce",
"_____no_output_____"
]
],
[
[
"### Connect to Organization and Get the Project\nConnect to ArcGIS Online and get the Project with assignments.",
"_____no_output_____"
]
],
[
[
"gis = GIS(\"https://arcgis.com\", \"workforce_scripts\")\nitem = gis.content.get(\"1f7b42024da544f6b1e557889e858ac6\")\nproject = workforce.Project(item)",
"_____no_output_____"
]
],
[
[
"### Connect to the SQLite Database and Review the Work Orders\nLet's review what the work order table looks like.",
"_____no_output_____"
]
],
[
[
"connection = sqlite3.connect(\"work_orders\")\ndf = pd.read_sql_query(\"select * from work_orders\", connection)\ndf",
"_____no_output_____"
]
],
[
[
"### Monitor the Project for Completed Assignments\nLet's run a loop that will check for \"Completed\" \"Sidewalk Repair\" assignments. When an assignment is returned from ArcGIS Online, let's change the value of it's status in the SQLite table from \"Backlog\" to \"Completed\". This is accomplished by leveraging the \"work_order_id\" field to lightweight-join the SQLite table to the workforce assignments feature service. When running the following section, complete a \"Sidewalk Repair\" Assignment on the mobile app.",
"_____no_output_____"
]
],
[
[
"processed_orders = [\"-1\"]\n# Run in a loop (for demo only)\nfor i in range(0, 12):\n print(\"Waiting...\")\n time.sleep(5)\n where_clause = f\"status=3 AND assignmentType=2 AND workOrderId NOT IN ({','.join(processed_orders)})\"\n print(f\"Checking for updates... {where_clause}\")\n assignments = project.assignments.search(where_clause)\n for assignment in assignments:\n cur = connection.cursor()\n values = ('Completed', assignment.notes, assignment.work_order_id,)\n cur.execute(\"update work_orders set status=?, notes=? where id=?\", values)\n connection.commit()\n processed_orders.append(assignment.work_order_id)\n print(\"Completed Assignment Processed\")",
"_____no_output_____"
]
],
[
[
"### Verify the Changes\nLet's verify that the changes were actually written to the SQLite table.",
"_____no_output_____"
]
],
[
[
"df = pd.read_sql_query(\"select * from work_orders\", connection)\ndf",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09a4e7998b9701b4c086c5bd712a873eb218dad | 264,416 | ipynb | Jupyter Notebook | examples/Basics/Multiscale_Basics_Tutorial.ipynb | Coilm/hystorian | 1aa3aa6c76fa62e72bd1e936772eef738b85b602 | [
"CC-BY-4.0"
] | null | null | null | examples/Basics/Multiscale_Basics_Tutorial.ipynb | Coilm/hystorian | 1aa3aa6c76fa62e72bd1e936772eef738b85b602 | [
"CC-BY-4.0"
] | null | null | null | examples/Basics/Multiscale_Basics_Tutorial.ipynb | Coilm/hystorian | 1aa3aa6c76fa62e72bd1e936772eef738b85b602 | [
"CC-BY-4.0"
] | null | null | null | 775.41349 | 154,816 | 0.951452 | [
[
[
"# Multiscale Basics Tutorial\n\n*By R. Bulanadi, 28/01/20*\n\n***\nWhile Project Multiscale is currently very powerful, it has a slight learning curve to understand the required functions for basic use. This notebook has been written to teach the basics of using Project Multiscale functions, by binarising the Phase channels of microscopy data obtained from a Cypher Asylum AFM.",
"_____no_output_____"
],
[
"To use Project Multiscale, the Multiscale package must be loaded. Load it as below, being sure to change the directory to lead to your Multiscale package.",
"_____no_output_____"
]
],
[
[
"import sys\nsys.path.insert(0, '../../') #Change to your Multiscale Directory\nfrom multiscale.processing import twodim\nfrom multiscale.processing import core as pt\nfrom multiscale.processing import plot as msplt\nimport multiscale.io",
"Matplotlib_scalebar was not found, please install the package.\n"
]
],
[
[
"We will now convert our raw data (`.ibw` format) into the `.hdf5` format used by Project Multiscale. First, we will set the name of both our raw `.ibw` file, and the new `.hdf5` file.",
"_____no_output_____"
]
],
[
[
"original_filename = 'SD_P4_zB5_050mV_-2550mV_0002.ibw'\nfilename = original_filename.split('.')[0]+'.hdf5'",
"_____no_output_____"
]
],
[
[
"The `multiscale.io` package handles file conversion. In general, one can call `multiscale.io.read_file.tohdf5` to convert the data type.\n\n*If the data type is not currently compatible, either code a conversion function or ask Loic/Ralph/Iaroslav.*",
"_____no_output_____"
]
],
[
[
"multiscale.io.read_file.tohdf5(original_filename)",
"file successfully converted\n"
]
],
[
[
"If you open the newly produced file `SD_P4_zB5_050mV_-2550mV_0002` in HDFView, you will see four folders:\n1. **`datasets`** contains the main converted data from the .ibw files. It contains a subfolder for each of the original scans (in this case, only one), and each of these subfolders contain the 8 data channels obtained from the raw data.\n2. **`metadata`** contains all other data obtained from the .ibw files, except for the image itself, such as the scan rate or tip voltage.\n3. **`process`** is currently empty, but will eventually contain the results of our subsequent processing.\n4. **`type`** indicates the original filetype of the data - that is, 'ibw'.\n\n**Warning: HDFView prevents Python from operating on open .hdf5 files. Make sure to close the open files before proceeding!**\n\n***",
"_____no_output_____"
],
[
"Before we do any processing, let's just check if things work. The function `msplt.save_image` lets us save an image from an array - however, our array is stored in the `.hdf5` file, and Python does not currently know about it. To use `msplt.save_image` then, we call it using the `pt.m_apply` function.\n\nIn short, `pt.m_apply` lets us pass the location of the files within the `.hdf5` file, instead of an actual array. This makes handling several datasets much easier. For now, the main function call of `pt.m_apply` is of the format:\n\n`m_apply(filename, function, in_paths)`\n\n1. **`filename`** The name of the `.hdf5` file we are using. We set this earlier to be `'SD_P4_zB5_050mV_-2550mV_0002.hdf5'`\n2. **`function`** The function we are applying. In this case, we are going to use the function `msplt.save_image`.\n3. **`in_paths`** This is the path (or paths) to the data within the `.hdf5` file. If you look in HDFView, you can see the file directory. In this case, let's look at the `Phase1Trace` channel in `datasets`. We will thus set this argument to `'datasets/SD_P4_zB5_050mV_-2550mV_0002/Phase1Trace'`\n\n**Note:** Other arguments exist, but are beyond this scope. See Intermediate or Programming tutorials for more detail\n\n",
"_____no_output_____"
]
],
[
[
"pt.m_apply(filename, msplt.save_image, 'datasets/SD_P4_zB5_050mV_-2550mV_0002/Phase2Retrace', image_name = 'Original_Phase', show=True)",
"_____no_output_____"
]
],
[
[
"You might notice we added extra arguments to `m_apply`. In general, if `m_apply` is given extra arguments, these arguments are passed to the subfunction: in this case, `msplt.save_image`. Thus, `msplt.save_image` knows to set `image_name` to `'Original_Phase'`, and to set `show` to `True`. You should now also see the image saved in this fiel directory; if you want, you could change this by changing the variable `saving_path`\n\n***",
"_____no_output_____"
],
[
"Now that we have something to compare to, we can begin processing. We are going to linearise the phase of this image (that is, transform the phase, which is currently an angle between -90 and 270, and wrapping at that limit) to a number between 0 and 1. To do this, we are going to use the function phase_linearisation, which we will again call using `m_apply`:",
"_____no_output_____"
]
],
[
[
"pt.m_apply(filename, twodim.phase_linearisation, 'datasets/SD_P4_zB5_050mV_-2550mV_0002/Phase2Retrace')\nprint('Linearisation Complete!')",
"Linearisation Complete!\n"
]
],
[
[
"If you open HDFView right now, you should see a new folder in `process` called `001-phase_linearisation` which contains the newly linearised data. If an error did occur at some point, you might also see other files of the form `abc-phase_linearisation`, where abc is some number. Don't worry; simply mark the correct (or incorrect) ones, and change the path names of the next function calls to ensure it goes to the correct folder.\n\n***",
"_____no_output_____"
],
[
"Now that the data is linearised, we can now binarise it. This is simply a threshold function. This is called very similarly to the last function, except for the different function call, and the different path location. Feel free to look at the code itself in the `twodim` subpackage if y7ou want to see how this code works, or if you want to pass it other arguments.",
"_____no_output_____"
]
],
[
[
"pt.m_apply(filename, twodim.phase_binarisation, 'process/001-phase_linearisation/SD_P4_zB5_050mV_-2550mV_0002/Phase2Retrace')\nprint('Binarisation Complete!')",
"Binarisation Complete!\n"
]
],
[
[
"Finally, we can view our final image. This requires the `msplt.save_image` function, which we used earlier.",
"_____no_output_____"
]
],
[
[
"pt.m_apply(filename, msplt.save_image, 'process/002-phase_binarisation/SD_P4_zB5_050mV_-2550mV_0002/Phase2Retrace', image_name = 'Binarised_Phase', show=True)",
"_____no_output_____"
]
],
[
[
"If we want to, we can also go back and see the intermediate, linearised phase:",
"_____no_output_____"
]
],
[
[
"pt.m_apply(filename, msplt.save_image, 'process/001-phase_linearisation/SD_P4_zB5_050mV_-2550mV_0002/Phase2Retrace', image_name = 'Linearised_Phase', show=True)",
"_____no_output_____"
]
],
[
[
"This ends the basic multiscale tutorial. As shown so far, Multiscale allows you to keep track of all of your variables and intermediate steps. Since they are saved permanently to the `.hdf5` file, they will remain so long as you don't delete it. Any function that works with arrays can also be passed directly into m_apply, and you also gain access to the current repository of functions.\n\nIf you want to apply on multiple datafiles concurrently, or use more complicated functions that require thus (such as distortion correction) please see the Intermediate tutorial. If you want to use Multiscale in more depth after, please check the Programming tutorial.\n\n***",
"_____no_output_____"
],
[
"## Troubleshooting\n\n**OSError: Unable to create file**\n\nClose the file in HDFView!\n\n**KeyError: 'Unable to open object (component not found)'**\n\nMake sure your `in_path` is correct. Open the file, and make sure that all your process numbers (ie, the 002) is the same as in your function call.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
]
] |
d09a501e74a2ce39346d93c52eac9de95c53d199 | 12,852 | ipynb | Jupyter Notebook | examples/.ipynb_checkpoints/Slide-Seq-checkpoint.ipynb | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | 10 | 2020-07-14T13:27:35.000Z | 2021-11-24T21:41:30.000Z | examples/.ipynb_checkpoints/Slide-Seq-checkpoint.ipynb | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | 21 | 2021-01-10T09:39:25.000Z | 2022-03-12T01:04:52.000Z | examples/.ipynb_checkpoints/Slide-Seq-checkpoint.ipynb | Mr-Milk/SpatialTis | bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6 | [
"Apache-2.0"
] | null | null | null | 76.047337 | 1,812 | 0.617336 | [
[
[
"import anndata as ad\nimport spatialtis as st\nimport spatialtis.plotting as sp",
"_____no_output_____"
],
[
"data = ad.read_h5ad('../data/slide-seq.h5ad')\ndata",
"_____no_output_____"
],
[
"st.Config.view()",
"_____no_output_____"
],
[
"st.Config.centroid_key = 'centroid'\nst.Config.roi_key = 'roi'",
"_____no_output_____"
],
[
"st.spatial_autocorr(data)",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09a641ff90ef8c35714592b5a325c544ede0c90 | 617,289 | ipynb | Jupyter Notebook | Exercises/4_Bayesian_Interference/Bayesian_Inference.ipynb | psnx/artificial-intelligence | 136385d4572c9bb520fed1044afb84b93f4ba0bf | [
"MIT"
] | null | null | null | Exercises/4_Bayesian_Interference/Bayesian_Inference.ipynb | psnx/artificial-intelligence | 136385d4572c9bb520fed1044afb84b93f4ba0bf | [
"MIT"
] | null | null | null | Exercises/4_Bayesian_Interference/Bayesian_Inference.ipynb | psnx/artificial-intelligence | 136385d4572c9bb520fed1044afb84b93f4ba0bf | [
"MIT"
] | null | null | null | 87.397565 | 928 | 0.672005 | [
[
[
"## Our Mission ##\n\nSpam detection is one of the major applications of Machine Learning in the interwebs today. Pretty much all of the major email service providers have spam detection systems built in and automatically classify such mail as 'Junk Mail'. \n\nIn this mission we will be using the Naive Bayes algorithm to create a model that can classify SMS messages as spam or not spam, based on the training we give to the model. It is important to have some level of intuition as to what a spammy text message might look like. Often they have words like 'free', 'win', 'winner', 'cash', 'prize' and the like in them as these texts are designed to catch your eye and in some sense tempt you to open them. Also, spam messages tend to have words written in all capitals and also tend to use a lot of exclamation marks. To the human recipient, it is usually pretty straightforward to identify a spam text and our objective here is to train a model to do that for us!\n\nBeing able to identify spam messages is a binary classification problem as messages are classified as either 'Spam' or 'Not Spam' and nothing else. Also, this is a supervised learning problem, as we will be feeding a labelled dataset into the model, that it can learn from, to make future predictions. \n\n# Overview\n\nThis project has been broken down in to the following steps: \n\n- Step 0: Introduction to the Naive Bayes Theorem\n- Step 1.1: Understanding our dataset\n- Step 1.2: Data Preprocessing\n- Step 2.1: Bag of Words (BoW)\n- Step 2.2: Implementing BoW from scratch\n- Step 2.3: Implementing Bag of Words in scikit-learn\n- Step 3.1: Training and testing sets\n- Step 3.2: Applying Bag of Words processing to our dataset.\n- Step 4.1: Bayes Theorem implementation from scratch\n- Step 4.2: Naive Bayes implementation from scratch\n- Step 5: Naive Bayes implementation using scikit-learn\n- Step 6: Evaluating our model\n- Step 7: Conclusion\n\n**Note**: If you need help with a step, you can find the solution notebook by clicking on the Jupyter logo in the top left of the notebook.",
"_____no_output_____"
],
[
"### Step 0: Introduction to the Naive Bayes Theorem ###\n\nBayes Theorem is one of the earliest probabilistic inference algorithms. It was developed by Reverend Bayes (which he used to try and infer the existence of God no less), and still performs extremely well for certain use cases. \n\nIt's best to understand this theorem using an example. Let's say you are a member of the Secret Service and you have been deployed to protect the Democratic presidential nominee during one of his/her campaign speeches. Being a public event that is open to all, your job is not easy and you have to be on the constant lookout for threats. So one place to start is to put a certain threat-factor for each person. So based on the features of an individual, like age, whether the person is carrying a bag, looks nervous, etc., you can make a judgment call as to whether that person is a viable threat. \n\nIf an individual ticks all the boxes up to a level where it crosses a threshold of doubt in your mind, you can take action and remove that person from the vicinity. Bayes Theorem works in the same way, as we are computing the probability of an event (a person being a threat) based on the probabilities of certain related events (age, presence of bag or not, nervousness of the person, etc.). \n\nOne thing to consider is the independence of these features amongst each other. For example if a child looks nervous at the event then the likelihood of that person being a threat is not as much as say if it was a grown man who was nervous. To break this down a bit further, here there are two features we are considering, age AND nervousness. Say we look at these features individually, we could design a model that flags ALL persons that are nervous as potential threats. However, it is likely that we will have a lot of false positives as there is a strong chance that minors present at the event will be nervous. Hence by considering the age of a person along with the 'nervousness' feature we would definitely get a more accurate result as to who are potential threats and who aren't. \n\nThis is the 'Naive' bit of the theorem where it considers each feature to be independent of each other which may not always be the case and hence that can affect the final judgement.\n\nIn short, Bayes Theorem calculates the probability of a certain event happening (in our case, a message being spam) based on the joint probabilistic distributions of certain other events (in our case, the appearance of certain words in a message). We will dive into the workings of Bayes Theorem later in the mission, but first, let us understand the data we are going to work with.",
"_____no_output_____"
],
[
"### Step 1.1: Understanding our dataset ### \n\n\nWe will be using a dataset originally compiled and posted on the UCI Machine Learning repository which has a very good collection of datasets for experimental research purposes. If you're interested, you can review the [abstract](https://archive.ics.uci.edu/ml/datasets/SMS+Spam+Collection) and the original [compressed data file](https://archive.ics.uci.edu/ml/machine-learning-databases/00228/) on the UCI site. For this exercise, however, we've gone ahead and downloaded the data for you.\n\n\n **Here's a preview of the data:** \n\n<img src=\"images/dqnb.png\" height=\"1242\" width=\"1242\">\n\nThe columns in the data set are currently not named and as you can see, there are 2 columns. \n\nThe first column takes two values, 'ham' which signifies that the message is not spam, and 'spam' which signifies that the message is spam. \n\nThe second column is the text content of the SMS message that is being classified.",
"_____no_output_____"
],
[
">**Instructions:**\n* Import the dataset into a pandas dataframe using the **read_table** method. The file has already been downloaded, and you can access it using the filepath 'smsspamcollection/SMSSpamCollection'. Because this is a tab separated dataset we will be using '\\\\t' as the value for the 'sep' argument which specifies this format. \n* Also, rename the column names by specifying a list ['label', 'sms_message'] to the 'names' argument of read_table().\n* Print the first five values of the dataframe with the new column names.",
"_____no_output_____"
]
],
[
[
"# '!' allows you to run bash commands from jupyter notebook.\nprint(\"List all the files in the current directory\\n\")\n!ls\n# The required data table can be found under smsspamcollection/SMSSpamCollection\nprint(\"\\n List all the files inside the smsspamcollection directory\\n\")\n!ls smsspamcollection",
"List all the files in the current directory\n\nBayesian_Inference.ipynb\t images\nBayesian_Inference_solution.ipynb smsspamcollection\n\n List all the files inside the smsspamcollection directory\n\nreadme\tSMSSpamCollection\n"
],
[
"!cat smsspamcollection/SMSSpamCollection",
"ham\tGo until jurong point, crazy.. Available only in bugis n great world la e buffet... Cine there got amore wat...\r\nham\tOk lar... Joking wif u oni...\r\nspam\tFree entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's\r\nham\tU dun say so early hor... U c already then say...\r\nham\tNah I don't think he goes to usf, he lives around here though\r\nspam\tFreeMsg Hey there darling it's been 3 week's now and no word back! I'd like some fun you up for it still? Tb ok! XxX std chgs to send, £1.50 to rcv\r\nham\tEven my brother is not like to speak with me. They treat me like aids patent.\r\nham\tAs per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\r\nspam\tWINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.\r\nspam\tHad your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030\r\nham\tI'm gonna be home soon and i don't want to talk about this stuff anymore tonight, k? I've cried enough today.\r\nspam\tSIX chances to win CASH! From 100 to 20,000 pounds txt> CSH11 and send to 87575. Cost 150p/day, 6days, 16+ TsandCs apply Reply HL 4 info\r\nspam\tURGENT! You have won a 1 week FREE membership in our £100,000 Prize Jackpot! Txt the word: CLAIM to No: 81010 T&C www.dbuk.net LCCLTD POBOX 4403LDNW1A7RW18\r\nham\tI've been searching for the right words to thank you for this breather. I promise i wont take your help for granted and will fulfil my promise. You have been wonderful and a blessing at all times.\r\nham\tI HAVE A DATE ON SUNDAY WITH WILL!!\r\nspam\tXXXMobileMovieClub: To use your credit, click the WAP link in the next txt message or click here>> http://wap. xxxmobilemovieclub.com?n=QJKGIGHJJGCBL\r\nham\tOh k...i'm watching here:)\r\nham\tEh u remember how 2 spell his name... Yes i did. He v naughty make until i v wet.\r\nham\tFine if thats the way u feel. Thats the way its gota b\r\nspam\tEngland v Macedonia - dont miss the goals/team news. Txt ur national team to 87077 eg ENGLAND to 87077 Try:WALES, SCOTLAND 4txt/ú1.20 POBOXox36504W45WQ 16+\r\nham\tIs that seriously how you spell his name?\r\nham\tI‘m going to try for 2 months ha ha only joking\r\nham\tSo ü pay first lar... Then when is da stock comin...\r\nham\tAft i finish my lunch then i go str down lor. Ard 3 smth lor. U finish ur lunch already?\r\nham\tFfffffffff. Alright no way I can meet up with you sooner?\r\nham\tJust forced myself to eat a slice. I'm really not hungry tho. This sucks. Mark is getting worried. He knows I'm sick when I turn down pizza. Lol\r\nham\tLol your always so convincing.\r\nham\tDid you catch the bus ? Are you frying an egg ? Did you make a tea? Are you eating your mom's left over dinner ? Do you feel my Love ?\r\nham\tI'm back & we're packing the car now, I'll let you know if there's room\r\nham\tAhhh. Work. I vaguely remember that! What does it feel like? Lol\r\nham\tWait that's still not all that clear, were you not sure about me being sarcastic or that that's why x doesn't want to live with us\r\nham\tYeah he got in at 2 and was v apologetic. n had fallen out and she was actin like spoilt child and he got caught up in that. Till 2! But we won't go there! Not doing too badly cheers. You? \r\nham\tK tell me anything about you.\r\nham\tFor fear of fainting with the of all that housework you just did? Quick have a cuppa\r\nspam\tThanks for your subscription to Ringtone UK your mobile will be charged £5/month Please confirm by replying YES or NO. If you reply NO you will not be charged\r\nham\tYup... Ok i go home look at the timings then i msg ü again... Xuhui going to learn on 2nd may too but her lesson is at 8am\r\nham\tOops, I'll let you know when my roommate's done\r\nham\tI see the letter B on my car\r\nham\tAnything lor... U decide...\r\nham\tHello! How's you and how did saturday go? I was just texting to see if you'd decided to do anything tomo. Not that i'm trying to invite myself or anything!\r\nham\tPls go ahead with watts. I just wanted to be sure. Do have a great weekend. Abiola\r\nham\tDid I forget to tell you ? I want you , I need you, I crave you ... But most of all ... I love you my sweet Arabian steed ... Mmmmmm ... Yummy\r\nspam\t07732584351 - Rodger Burns - MSG = We tried to call you re your reply to our sms for a free nokia mobile + free camcorder. Please call now 08000930705 for delivery tomorrow\r\nham\tWHO ARE YOU SEEING?\r\nham\tGreat! I hope you like your man well endowed. I am <#> inches...\r\nham\tNo calls..messages..missed calls\r\nham\tDidn't you get hep b immunisation in nigeria.\r\nham\tFair enough, anything going on?\r\nham\tYeah hopefully, if tyler can't do it I could maybe ask around a bit\r\nham\tU don't know how stubborn I am. I didn't even want to go to the hospital. I kept telling Mark I'm not a weak sucker. Hospitals are for weak suckers.\r\nham\tWhat you thinked about me. First time you saw me in class.\r\nham\tA gram usually runs like <#> , a half eighth is smarter though and gets you almost a whole second gram for <#>\r\nham\tK fyi x has a ride early tomorrow morning but he's crashing at our place tonight\r\nham\tWow. I never realized that you were so embarassed by your accomodations. I thought you liked it, since i was doing the best i could and you always seemed so happy about \"the cave\". I'm sorry I didn't and don't have more to give. I'm sorry i offered. I'm sorry your room was so embarassing.\r\nspam\tSMS. ac Sptv: The New Jersey Devils and the Detroit Red Wings play Ice Hockey. Correct or Incorrect? End? Reply END SPTV\r\nham\tDo you know what Mallika Sherawat did yesterday? Find out now @ <URL>\r\nspam\tCongrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out! \r\nham\tSorry, I'll call later in meeting.\r\nham\tTell where you reached\r\nham\tYes..gauti and sehwag out of odi series.\r\nham\tYour gonna have to pick up a $1 burger for yourself on your way home. I can't even move. Pain is killing me.\r\nham\tHa ha ha good joke. Girls are situation seekers.\r\nham\tIts a part of checking IQ\r\nham\tSorry my roommates took forever, it ok if I come by now?\r\nham\tOk lar i double check wif da hair dresser already he said wun cut v short. He said will cut until i look nice.\r\nspam\tAs a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066364589\r\nham\tToday is \"song dedicated day..\" Which song will u dedicate for me? Send this to all ur valuable frnds but first rply me...\r\nspam\tUrgent UR awarded a complimentary trip to EuroDisinc Trav, Aco&Entry41 Or £1000. To claim txt DIS to 87121 18+6*£1.50(moreFrmMob. ShrAcomOrSglSuplt)10, LS1 3AJ\r\nspam\tDid you hear about the new \"Divorce Barbie\"? It comes with all of Ken's stuff!\r\nham\tI plane to give on this month end.\r\nham\tWah lucky man... Then can save money... Hee...\r\nham\tFinished class where are you.\r\nham\tHI BABE IM AT HOME NOW WANNA DO SOMETHING? XX\r\nham\tK..k:)where are you?how did you performed?\r\nham\tU can call me now...\r\nham\tI am waiting machan. Call me once you free.\r\nham\tThats cool. i am a gentleman and will treat you with dignity and respect.\r\nham\tI like you peoples very much:) but am very shy pa.\r\nham\tDoes not operate after <#> or what\r\nham\tIts not the same here. Still looking for a job. How much do Ta's earn there.\r\nham\tSorry, I'll call later\r\nham\tK. Did you call me just now ah? \r\nham\tOk i am on the way to home hi hi\r\nham\tYou will be in the place of that man\r\nham\tYup next stop.\r\nham\tI call you later, don't have network. If urgnt, sms me.\r\nham\tFor real when u getting on yo? I only need 2 more tickets and one more jacket and I'm done. I already used all my multis.\r\nham\tYes I started to send requests to make it but pain came back so I'm back in bed. Double coins at the factory too. I gotta cash in all my nitros.\r\nham\tI'm really not up to it still tonight babe\r\nham\tEla kano.,il download, come wen ur free..\r\nham\tYeah do! Don‘t stand to close tho- you‘ll catch something!\r\nham\tSorry to be a pain. Is it ok if we meet another night? I spent late afternoon in casualty and that means i haven't done any of y stuff42moro and that includes all my time sheets and that. Sorry. \r\nham\tSmile in Pleasure Smile in Pain Smile when trouble pours like Rain Smile when sum1 Hurts U Smile becoz SOMEONE still Loves to see u Smiling!!\r\nspam\tPlease call our customer service representative on 0800 169 6031 between 10am-9pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nham\tHavent planning to buy later. I check already lido only got 530 show in e afternoon. U finish work already?\r\nspam\tYour free ringtone is waiting to be collected. Simply text the password \"MIX\" to 85069 to verify. Get Usher and Britney. FML, PO Box 5249, MK17 92H. 450Ppw 16\r\nham\tWatching telugu movie..wat abt u?\r\nham\ti see. When we finish we have loads of loans to pay\r\nham\tHi. Wk been ok - on hols now! Yes on for a bit of a run. Forgot that i have hairdressers appointment at four so need to get home n shower beforehand. Does that cause prob for u?\"\r\nham\tI see a cup of coffee animation\r\nham\tPlease don't text me anymore. I have nothing else to say.\r\nham\tOkay name ur price as long as its legal! Wen can I pick them up? Y u ave x ams xx\r\nham\tI'm still looking for a car to buy. And have not gone 4the driving test yet.\r\nham\tAs per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\r\nham\twow. You're right! I didn't mean to do that. I guess once i gave up on boston men and changed my search location to nyc, something changed. Cuz on my signin page it still says boston.\r\nham\tUmma my life and vava umma love you lot dear\r\nham\tThanks a lot for your wishes on my birthday. Thanks you for making my birthday truly memorable.\r\nham\tAight, I'll hit you up when I get some cash\r\nham\tHow would my ip address test that considering my computer isn't a minecraft server\r\nham\tI know! Grumpy old people. My mom was like you better not be lying. Then again I am always the one to play jokes...\r\nham\tDont worry. I guess he's busy.\r\nham\tWhat is the plural of the noun research?\r\nham\tGoing for dinner.msg you after.\r\nham\tI'm ok wif it cos i like 2 try new things. But i scared u dun like mah. Cos u said not too loud.\r\nspam\tGENT! We are trying to contact you. Last weekends draw shows that you won a £1000 prize GUARANTEED. Call 09064012160. Claim Code K52. Valid 12hrs only. 150ppm\r\nham\tWa, ur openin sentence very formal... Anyway, i'm fine too, juz tt i'm eatin too much n puttin on weight...Haha... So anythin special happened?\r\nham\tAs I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.\r\nspam\tYou are a winner U have been specially selected 2 receive £1000 or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810910p/min (18+) \r\nham\tGoodo! Yes we must speak friday - egg-potato ratio for tortilla needed! \r\nham\tHmm...my uncle just informed me that he's paying the school directly. So pls buy food.\r\nspam\tPRIVATE! Your 2004 Account Statement for 07742676969 shows 786 unredeemed Bonus Points. To claim call 08719180248 Identifier Code: 45239 Expires\r\nspam\tURGENT! Your Mobile No. was awarded £2000 Bonus Caller Prize on 5/9/03 This is our final try to contact U! Call from Landline 09064019788 BOX42WR29C, 150PPM\r\nham\there is my new address -apples&pairs&all that malarky\r\nspam\tTodays Voda numbers ending 7548 are selected to receive a $350 award. If you have a match please call 08712300220 quoting claim code 4041 standard rates app\r\nham\tI am going to sao mu today. Will be done only at 12 \r\nham\tÜ predict wat time ü'll finish buying?\r\nham\tGood stuff, will do.\r\nham\tJust so that you know,yetunde hasn't sent money yet. I just sent her a text not to bother sending. So its over, you dont have to involve yourself in anything. I shouldn't have imposed anything on you in the first place so for that, i apologise.\r\nham\tAre you there in room.\r\nham\tHEY GIRL. HOW R U? HOPE U R WELL ME AN DEL R BAK! AGAIN LONG TIME NO C! GIVE ME A CALL SUM TIME FROM LUCYxx\r\nham\tK..k:)how much does it cost?\r\nham\tI'm home.\r\nham\tDear, will call Tmorrow.pls accomodate.\r\nham\tFirst answer my question.\r\nspam\tSunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country the Algarve is in? Txt ansr to 82277. £1.50 SP:Tyrone\r\nspam\tWant 2 get laid tonight? Want real Dogging locations sent direct 2 ur mob? Join the UK's largest Dogging Network bt Txting GRAVEL to 69888! Nt. ec2a. 31p.msg@150p\r\nham\tI only haf msn. It's [email protected]\r\nham\tHe is there. You call and meet him\r\nham\tNo no. I will check all rooms befor activities\r\nspam\tYou'll not rcv any more msgs from the chat svc. For FREE Hardcore services text GO to: 69988 If u get nothing u must Age Verify with yr network & try again\r\nham\tGot c... I lazy to type... I forgot ü in lect... I saw a pouch but like not v nice...\r\nham\tK, text me when you're on the way\r\nham\tSir, Waiting for your mail.\r\nham\tA swt thought: \"Nver get tired of doing little things 4 lovable persons..\" Coz..somtimes those little things occupy d biggest part in their Hearts.. Gud ni8\r\nham\tI know you are. Can you pls open the back?\r\nham\tYes see ya not on the dot\r\nham\tWhats the staff name who is taking class for us?\r\nspam\tFreeMsg Why haven't you replied to my text? I'm Randy, sexy, female and live local. Luv to hear from u. Netcollex Ltd 08700621170150p per msg reply Stop to end\r\nham\tUmmma.will call after check in.our life will begin from qatar so pls pray very hard.\r\nham\tK..i deleted my contact that why?\r\nham\tSindu got job in birla soft ..\r\nham\tThe wine is flowing and i'm i have nevering..\r\nham\tYup i thk cine is better cos no need 2 go down 2 plaza mah.\r\nham\tOk... Ur typical reply...\r\nham\tAs per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\r\nham\tYou are everywhere dirt, on the floor, the windows, even on my shirt. And sometimes when i open my mouth, you are all that comes flowing out. I dream of my world without you, then half my chores are out too. A time of joy for me, lots of tv shows i.ll see. But i guess like all things you just must exist, like rain, hail and mist, and when my time here is done, you and i become one.\r\nham\tAaooooright are you at work?\r\nham\tI'm leaving my house now...\r\nham\tHello, my love. What are you doing? Did you get to that interview today? Are you you happy? Are you being a good boy? Do you think of me?Are you missing me ?\r\nspam\tCustomer service annoncement. You have a New Years delivery waiting for you. Please call 07046744435 now to arrange delivery\r\nspam\tYou are a winner U have been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810810\r\nham\tKeep yourself safe for me because I need you and I miss you already and I envy everyone that see's you in real life\r\nham\tNew car and house for my parents.:)i have only new job in hand:)\r\nham\tI'm so in love with you. I'm excited each day i spend with you. You make me so happy.\r\nspam\t-PLS STOP bootydelious (32/F) is inviting you to be her friend. Reply YES-434 or NO-434 See her: www.SMS.ac/u/bootydelious STOP? Send STOP FRND to 62468\r\nspam\tBangBabes Ur order is on the way. U SHOULD receive a Service Msg 2 download UR content. If U do not, GoTo wap. bangb. tv on UR mobile internet/service menu\r\nham\tI place all ur points on e cultures module already.\r\nspam\tURGENT! We are trying to contact you. Last weekends draw shows that you have won a £900 prize GUARANTEED. Call 09061701939. Claim code S89. Valid 12hrs only\r\nham\tHi frnd, which is best way to avoid missunderstding wit our beloved one's?\r\nham\tGreat escape. I fancy the bridge but needs her lager. See you tomo \r\nham\tYes :)it completely in out of form:)clark also utter waste.\r\nham\tSir, I need AXIS BANK account no and bank address.\r\nham\tHmmm.. Thk sure got time to hop ard... Ya, can go 4 free abt... Muz call u to discuss liao... \r\nham\tWhat time you coming down later? \r\nham\tBloody hell, cant believe you forgot my surname Mr . Ill give u a clue, its spanish and begins with m... \r\nham\tWell, i'm gonna finish my bath now. Have a good...fine night.\r\nham\tLet me know when you've got the money so carlos can make the call\r\nham\tU still going to the mall?\r\nham\tTurns out my friends are staying for the whole show and won't be back til ~ <#> , so feel free to go ahead and smoke that $ <#> worth\r\nham\tText her. If she doesnt reply let me know so i can have her log in\r\nham\tHi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.\r\nham\tYou lifted my hopes with the offer of money. I am in need. Especially when the end of the month approaches and it hurts my studying. Anyways have a gr8 weekend\r\nham\tLol no. U can trust me.\r\nham\tok. I am a gentleman and will treat you with dignity and respect.\r\nham\tHe will, you guys close?\r\nham\tGoing on nothing great.bye\r\nham\tHello handsome ! Are you finding that job ? Not being lazy ? Working towards getting back that net for mummy ? Where's my boytoy now ? Does he miss me ?\r\nham\tHaha awesome, be there in a minute\r\nspam\tPlease call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nham\tHave you got Xmas radio times. If not i will get it now\r\nham\tI jus reached home. I go bathe first. But my sis using net tell u when she finishes k...\r\nspam\tAre you unique enough? Find out from 30th August. www.areyouunique.co.uk\r\nham\tI'm sorry. I've joined the league of people that dont keep in touch. You mean a great deal to me. You have been a friend at all times even at great personal cost. Do have a great week.|\r\nham\tHi :)finally i completed the course:)\r\nham\tIt will stop on itself. I however suggest she stays with someone that will be able to give ors for every stool.\r\nham\tHow are you doing? Hope you've settled in for the new school year. Just wishin you a gr8 day\r\nham\tGud mrng dear hav a nice day\r\nham\tDid u got that persons story\r\nham\tis your hamster dead? Hey so tmr i meet you at 1pm orchard mrt? \r\nham\tHi its Kate how is your evening? I hope i can see you tomorrow for a bit but i have to bloody babyjontet! Txt back if u can. :) xxx\r\nham\tFound it, ENC <#> , where you at?\r\nham\tI sent you <#> bucks\r\nham\tHello darlin ive finished college now so txt me when u finish if u can love Kate xxx\r\nham\tYour account has been refilled successfully by INR <DECIMAL> . Your KeralaCircle prepaid account balance is Rs <DECIMAL> . Your Transaction ID is KR <#> .\r\nham\tGoodmorning sleeping ga.\r\nham\tU call me alter at 11 ok.\r\nham\tÜ say until like dat i dun buy ericsson oso cannot oredi lar...\r\nham\tAs I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.\r\nham\tAight yo, dats straight dogg\r\nham\tYou please give us connection today itself before <DECIMAL> or refund the bill\r\nham\tBoth :) i shoot big loads so get ready!\r\nham\tWhat's up bruv, hope you had a great break. Do have a rewarding semester.\r\nham\tHome so we can always chat\r\nham\tK:)k:)good:)study well.\r\nham\tYup... How ü noe leh...\r\nham\tSounds great! Are you home now?\r\nham\tFinally the match heading towards draw as your prediction.\r\nham\tTired. I haven't slept well the past few nights.\r\nham\tEasy ah?sen got selected means its good..\r\nham\tI have to take exam with march 3\r\nham\tYeah you should. I think you can use your gt atm now to register. Not sure but if there's anyway i can help let me know. But when you do be sure you are ready.\r\nham\tOk no prob. Take ur time.\r\nham\tThere is os called ubandu which will run without installing in hard disk...you can use that os to copy the important files in system and give it to repair shop..\r\nham\tSorry, I'll call later\r\nham\tU say leh... Of course nothing happen lar. Not say v romantic jus a bit only lor. I thk e nite scenery not so nice leh.\r\nspam\t500 New Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today!From ONLY £1 www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18\r\nham\tWould really appreciate if you call me. Just need someone to talk to.\r\nspam\tWill u meet ur dream partner soon? Is ur career off 2 a flyng start? 2 find out free, txt HORO followed by ur star sign, e. g. HORO ARIES\r\nham\tHey company elama po mudyadhu.\r\nham\tLife is more strict than teacher... Bcoz Teacher teaches lesson & then conducts exam, But Life first conducts Exam & then teaches Lessons. Happy morning. . .\r\nham\tDear good morning now only i am up\r\nham\tGet down in gandhipuram and walk to cross cut road. Right side <#> street road and turn at first right.\r\nham\tDear we are going to our rubber place\r\nham\tSorry battery died, yeah I'm here\r\nham\tYes:)here tv is always available in work place..\r\nspam\tText & meet someone sexy today. U can find a date or even flirt its up to U. Join 4 just 10p. REPLY with NAME & AGE eg Sam 25. 18 -msg recd@thirtyeight pence\r\nham\tI have printed it oh. So <#> come upstairs\r\nham\tOr ill be a little closer like at the bus stop on the same street\r\nham\tWhere are you?when wil you reach here?\r\nham\tNew Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!\r\nspam\tU 447801259231 have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094597\r\nham\tTomarrow final hearing on my laptop case so i cant.\r\nham\tPLEASSSSSSSEEEEEE TEL ME V AVENT DONE SPORTSx\r\nham\tOkay. No no, just shining on. That was meant to be signing, but that sounds better.\r\nham\tAlthough i told u dat i'm into baig face watches now but i really like e watch u gave cos it's fr u. Thanx 4 everything dat u've done today, i'm touched...\r\nham\tU don't remember that old commercial?\r\nham\tToo late. I said i have the website. I didn't i have or dont have the slippers\r\nham\tI asked you to call him now ok\r\nham\tKallis wont bat in 2nd innings.\r\nham\tIt didnt work again oh. Ok goodnight then. I.ll fix and have it ready by the time you wake up. You are very dearly missed have a good night sleep.\r\nspam\tCongratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16\r\nham\tRanjith cal drpd Deeraj and deepak 5min hold\r\nham\tWen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.\r\nham\tWhat you doing?how are you?\r\nham\tUps which is 3days also, and the shipping company that takes 2wks. The other way is usps which takes a week but when it gets to lag you may have to bribe nipost to get your stuff.\r\nham\tI'm back, lemme know when you're ready\r\nham\tDon't necessarily expect it to be done before you get back though because I'm just now headin out\r\nham\tMmm so yummy babe ... Nice jolt to the suzy\r\nham\tWhere are you lover ? I need you ...\r\nspam\tWe tried to contact you re your reply to our offer of a Video Handset? 750 anytime networks mins? UNLIMITED TEXT? Camcorder? Reply or call 08000930705 NOW\r\nham\tI‘m parked next to a MINI!!!! When are you coming in today do you think?\r\nham\tYup\r\nham\tAnyway i'm going shopping on my own now. Cos my sis not done yet. Dun disturb u liao.\r\nham\tMY NO. IN LUTON 0125698789 RING ME IF UR AROUND! H*\r\nspam\tHey I am really horny want to chat or see me naked text hot to 69698 text charged at 150pm to unsubscribe text stop 69698\r\nham\tWhy you Dint come with us.\r\nham\tSame. Wana plan a trip sometme then\r\nham\tNot sure yet, still trying to get a hold of him\r\nspam\tUr ringtone service has changed! 25 Free credits! Go to club4mobiles.com to choose content now! Stop? txt CLUB STOP to 87070. 150p/wk Club4 PO Box1146 MK45 2WT\r\nham\tThe evo. I just had to download flash. Jealous?\r\nspam\tRingtone Club: Get the UK singles chart on your mobile each week and choose any top quality ringtone! This message is free of charge.\r\nham\tCome to mu, we're sorting out our narcotics situation\r\nham\tNight has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay.\r\nspam\tHMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com\r\nham\tUsf I guess, might as well take 1 car\r\nham\tNo objection. My bf not coming.\r\nham\tThanx...\r\nham\tTell rob to mack his gf in the theater\r\nham\tAwesome, I'll see you in a bit\r\nham\tJust sent it. So what type of food do you like?\r\nham\tAll done? All handed in? Celebrations in full swing yet?\r\nham\tYou got called a tool?\r\nham\t\"Wen u miss someone, the person is definitely special for u..... But if the person is so special, why to miss them, just Keep-in-touch\" gdeve..\r\nham\tOk. I asked for money how far\r\nham\tOkie...\r\nham\tYeah I think my usual guy's still passed out from last night, if you get ahold of anybody let me know and I'll throw down\r\nham\tK, I might come by tonight then if my class lets out early\r\nham\tOk..\r\nham\thi baby im cruisin with my girl friend what r u up 2? give me a call in and hour at home if thats alright or fone me on this fone now love jenny xxx\r\nham\tMy life Means a lot to me, Not because I love my life, But because I love the people in my life, The world calls them friends, I call them my World:-).. Ge:-)..\r\nham\tDear,shall mail tonite.busy in the street,shall update you tonite.things are looking ok.varunnathu edukkukayee raksha ollu.but a good one in real sense.\r\nham\tHey you told your name to gautham ah?\r\nham\tHaf u found him? I feel so stupid da v cam was working.\r\nham\tOops. 4 got that bit.\r\nham\tAre you this much buzy\r\nham\tI accidentally deleted the message. Resend please.\r\nspam\tT-Mobile customer you may now claim your FREE CAMERA PHONE upgrade & a pay & go sim card for your loyalty. Call on 0845 021 3680.Offer ends 28thFeb.T&C's apply\r\nham\tUnless it's a situation where YOU GO GURL would be more appropriate\r\nham\tHurt me... Tease me... Make me cry... But in the end of my life when i die plz keep one rose on my grave and say STUPID I MISS U.. HAVE A NICE DAY BSLVYL\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tNeed a coffee run tomo?Can't believe it's that time of week already\r\nham\tAwesome, I remember the last time we got somebody high for the first time with diesel :V\r\nham\tShit that is really shocking and scary, cant imagine for a second. Def up for night out. Do u think there is somewhere i could crash for night, save on taxi?\r\nham\tOh and by the way you do have more food in your fridge! Want to go out for a meal tonight? \r\nham\tHe is a womdarfull actor\r\nspam\tSMS. ac Blind Date 4U!: Rodds1 is 21/m from Aberdeen, United Kingdom. Check Him out http://img. sms. ac/W/icmb3cktz8r7!-4 no Blind Dates send HIDE\r\nham\tYup... From what i remb... I think should be can book... \r\nham\tJos ask if u wana meet up?\r\nham\tLol yes. Our friendship is hanging on a thread cause u won't buy stuff.\r\nspam\tTheMob> Check out our newest selection of content, Games, Tones, Gossip, babes and sport, Keep your mobile fit and funky text WAP to 82468\r\nham\tWhere are the garage keys? They aren't on the bookshelf\r\nham\tToday is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy\r\nspam\tThink ur smart ? Win £200 this week in our weekly quiz, text PLAY to 85222 now!T&Cs WinnersClub PO BOX 84, M26 3UZ. 16+. GBP1.50/week\r\nham\tHe says he'll give me a call when his friend's got the money but that he's definitely buying before the end of the week\r\nham\tHi the way I was with u 2day, is the normal way&this is the real me. UR unique&I hope I know u 4 the rest of mylife. Hope u find wot was lost.\r\nham\tYou made my day. Do have a great day too.\r\nham\tK.k:)advance happy pongal.\r\nham\tHmmm... Guess we can go 4 kb n power yoga... Haha, dunno we can tahan power yoga anot... Thk got lo oso, forgot liao...\r\nham\tNot really dude, have no friends i'm afraid :(\r\nspam\tDecember only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906\r\nham\tCoffee cake, i guess...\r\nham\tMerry Christmas to you too babe, i love ya *kisses*\r\nham\tHey... Why dont we just go watch x men and have lunch... Haha \r\nham\tcud u tell ppl im gona b a bit l8 cos 2 buses hav gon past cos they were full & im still waitin 4 1. Pete x\r\nham\tThat would be great. We'll be at the Guild. Could meet on Bristol road or somewhere - will get in touch over weekend. Our plans take flight! Have a good week\r\nham\tNo problem. How are you doing?\r\nham\tNo calls..messages..missed calls\r\nham\tHi da:)how is the todays class?\r\nham\tI'd say that's a good sign but, well, you know my track record at reading women\r\nham\tCool, text me when you're parked\r\nham\tI'm reading the text i just sent you. Its meant to be a joke. So read it in that light\r\nham\tK.k:)apo k.good movie.\r\nham\tMaybe i could get book out tomo then return it immediately ..? Or something.\r\nspam\tCall Germany for only 1 pence per minute! Call from a fixed line via access number 0844 861 85 85. No prepayment. Direct access!\r\nham\tAny chance you might have had with me evaporated as soon as you violated my privacy by stealing my phone number from your employer's paperwork. Not cool at all. Please do not contact me again or I will report you to your supervisor.\r\nspam\tValentines Day Special! Win over £1000 in our quiz and take your partner on the trip of a lifetime! Send GO to 83600 now. 150p/msg rcvd. CustCare:08718720201.\r\nham\tTa-Daaaaa! I am home babe, are you still up ?\r\nham\tCool. So how come you havent been wined and dined before?\r\nham\tJust sleeping..and surfing\r\nham\tSorry, I'll call later\r\nham\tU calling me right? Call my hand phone...\r\nham\tOk that's great thanx a lot.\r\nham\tI take it the post has come then! You must have 1000s of texts now! Happy reading. My one from wiv hello caroline at the end is my favourite. Bless him\r\nham\tWhere u been hiding stranger?\r\nham\tAm not interested to do like that.\r\nham\tMy sister cleared two round in birla soft yesterday.\r\nham\tGudnite....tc...practice going on\r\nham\tDis is yijue. I jus saw ur mail. In case huiming havent sent u my num. Dis is my num.\r\nham\tOne small prestige problem now.\r\nspam\tFancy a shag? I do.Interested? sextextuk.com txt XXUK SUZY to 69876. Txts cost 1.50 per msg. TnCs on website. X\r\nham\tJust checking in on you. Really do miss seeing Jeremiah. Do have a great month\r\nham\tNah can't help you there, I've never had an iphone\r\nham\tIf you're not in my car in an hour and a half I'm going apeshit\r\nham\tTODAY is Sorry day.! If ever i was angry with you, if ever i misbehaved or hurt you? plz plz JUST SLAP URSELF Bcoz, Its ur fault, I'm basically GOOD\r\nham\tYo you guys ever figure out how much we need for alcohol? Jay and I are trying to figure out how much we can safely spend on weed\r\nham\t<#> ISH MINUTES WAS 5 MINUTES AGO. WTF.\r\nham\tThank You for calling.Forgot to say Happy Onam to you Sirji.I am fine here and remembered you when i met an insurance person.Meet You in Qatar Insha Allah.Rakhesh, ex Tata AIG who joined TISSCO,Tayseer.\r\nspam\tCongratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send CASH to 86688 only 150p/msg. CC: 08708800282 HG/Suite342/2Lands Row/W1J6HL\r\nham\tI'm an actor. When i work, i work in the evening and sleep late. Since i'm unemployed at the moment, i ALWAYS sleep late. When you're unemployed, every day is saturday.\r\nham\tHello! Just got here, st andrews-boy its a long way! Its cold. I will keep you posted\r\nham\tHa ha cool cool chikku chikku:-):-DB-)\r\nham\tOh ok no prob..\r\nham\tCheck audrey's status right now\r\nham\tBusy here. Trying to finish for new year. I am looking forward to finally meeting you...\r\nham\tGood afternoon sunshine! How dawns that day ? Are we refreshed and happy to be alive? Do we breathe in the air and smile ? I think of you, my love ... As always\r\nham\tWell i know Z will take care of me. So no worries.\r\nspam\tUpdate_Now - Xmas Offer! Latest Motorola, SonyEricsson & Nokia & FREE Bluetooth! Double Mins & 1000 Txt on Orange. Call MobileUpd8 on 08000839402 or call2optout/F4Q=\r\nspam\tHere is your discount code RP176781. To stop further messages reply stop. www.regalportfolio.co.uk. Customer Services 08717205546\r\nham\tWat uniform? In where get?\r\nham\tCool, text me when you're ready\r\nham\tHello my boytoy ... Geeee I miss you already and I just woke up. I wish you were here in bed with me, cuddling me. I love you ...\r\nham\tI will spoil you in bed as well :)\r\nham\tI'm going for bath will msg you next <#> min..\r\nham\tI cant keep talking to people if am not sure i can pay them if they agree to price. So pls tell me what you want to really buy and how much you are willing to pay\r\nspam\tThanks for your Ringtone Order, Reference T91. You will be charged GBP 4 per week. You can unsubscribe at anytime by calling customer services on 09057039994\r\nham\tCan you say what happen\r\nham\tYou could have seen me..i did't recognise you Face.:)\r\nham\tWell there's not a lot of things happening in Lindsay on New years *sighs* Some bars in Ptbo and the blue heron has something going\r\nham\tKeep my payasam there if rinu brings\r\nham\tI taught that Ranjith sir called me. So only i sms like that. Becaus hes verifying about project. Prabu told today so only pa dont mistake me..\r\nham\tI guess that's why you re worried. You must know that there's a way the body repairs itself. And i'm quite sure you shouldn't worry. We'll take it slow. First the tests, they will guide when your ovulation is then just relax. Nothing you've said is a reason to worry but i.ll keep on followin you up.\r\nham\tYeah sure, give me a couple minutes to track down my wallet\r\nham\tHey leave it. not a big deal:-) take care.\r\nham\tHey i will be late ah... Meet you at 945+\r\nspam\tDouble mins and txts 4 6months FREE Bluetooth on Orange. Available on Sony, Nokia Motorola phones. Call MobileUpd8 on 08000839402 or call2optout/N9DX\r\nham\tIt took Mr owl 3 licks\r\nham\tCustomer place i will call you.\r\nham\tMm that time you dont like fun\r\nspam\t4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths ? Call MobilesDirect free on 08000938767 to update now! or2stoptxt\r\nham\tYup having my lunch buffet now.. U eat already?\r\nham\tHuh so late... Fr dinner?\r\nham\tHey so this sat are we going for the intro pilates only? Or the kickboxing too? \r\nham\tMorning only i can ok.\r\nham\tYes i think so. I am in office but my lap is in room i think thats on for the last few days. I didnt shut that down\r\nham\tPick you up bout 7.30ish? What time are and that going?\r\nham\tFrom here after The performance award is calculated every two month.not for current one month period..\r\nham\tWas actually sleeping and still might when u call back. So a text is gr8. You rock sis. Will send u a text wen i wake.\r\nham\tYou are always putting your business out there. You put pictures of your ass on facebook. You are one of the most open people i've ever met. Why would i think a picture of your room would hurt you, make you feel violated.\r\nham\tGood evening Sir, Al Salam Wahleykkum.sharing a happy news.By the grace of God, i got an offer from Tayseer,TISSCO and i joined.Hope you are fine.Inshah Allah,meet you sometime.Rakhesh,visitor from India.\r\nham\tHmmm...k...but i want to change the field quickly da:-)i wanna get system administrator or network administrator..\r\nspam\tFREE RINGTONE text FIRST to 87131 for a poly or text GET to 87131 for a true tone! Help? 0845 2814032 16 after 1st free, tones are 3x£150pw to e£nd txt stop\r\nham\tDear how is chechi. Did you talk to her\r\nham\tThe hair cream has not been shipped.\r\nham\tNone of that's happening til you get here though\r\nham\tYep, the great loxahatchee xmas tree burning of <#> starts in an hour\r\nham\tHaha get used to driving to usf man, I know a lot of stoners\r\nham\tAll was well until slightly disastrous class this pm with my fav darlings! Hope day off ok. Coffee wld be good as can't stay late tomorrow. Same time + place as always?\r\nham\tHello! Good week? Fancy a drink or something later?\r\nham\tHeadin towards busetop\r\nham\tMessage:some text missing* Sender:Name Missing* *Number Missing *Sent:Date missing *Missing U a lot thats y everything is missing sent via fullonsms.com\r\nham\tCome by our room at some point so we can iron out the plan for this weekend\r\nham\tCos i want it to be your thing\r\nham\tOkies... I'll go yan jiu too... We can skip ard oso, go cine den go mrt one, blah blah blah... \r\nham\tBring home some Wendy =D\r\nspam\t100 dating service cal;l 09064012103 box334sk38ch\r\nham\tWhatsup there. Dont u want to sleep\r\nham\tAlright i have a new goal now\r\nspam\tFREE entry into our £250 weekly competition just text the word WIN to 80086 NOW. 18 T&C www.txttowin.co.uk\r\nham\tAlright, I'll head out in a few minutes, text me where to meet you\r\nspam\tSend a logo 2 ur lover - 2 names joined by a heart. Txt LOVE NAME1 NAME2 MOBNO eg LOVE ADAM EVE 07123456789 to 87077 Yahoo! POBox36504W45WQ TxtNO 4 no ads 150p\r\nham\tYes:)from last week itself i'm taking live call.\r\nspam\tSomeone has contacted our dating service and entered your phone because they fancy you! To find out who it is call from a landline 09111032124 . PoBox12n146tf150p\r\nham\tSiva is in hostel aha:-.\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09058094455 from land line. Claim 3030. Valid 12hrs only\r\nham\tSend this to ur friends and receive something about ur voice..... How is my speaking expression? 1.childish 2.naughty 3.Sentiment 4.rowdy 5.ful of attitude 6.romantic 7.shy 8.Attractive 9.funny <#> .irritating <#> .lovable. reply me..\r\nham\tOk. She'll be ok. I guess\r\nham\taathi..where are you dear..\r\nham\tAny pain on urination any thing else?\r\nham\t7 at esplanade.. Do ü mind giving me a lift cos i got no car today..\r\nham\tI wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob\r\nham\tAt home watching tv lor.\r\nham\tDoes she usually take fifteen fucking minutes to respond to a yes or no question\r\nspam\tCongrats! Nokia 3650 video camera phone is your Call 09066382422 Calls cost 150ppm Ave call 3mins vary from mobiles 16+ Close 300603 post BCM4284 Ldn WC1N3XX\r\nham\tBooked ticket for pongal?\r\nham\tYou available now? I'm like right around hillsborough & <#> th\r\nham\tThe message sent is askin for <#> dollars. Shoul i pay <#> or <#> ?\r\nham\tAsk g or iouri, I've told the story like ten times already\r\nham\tHow long does applebees fucking take\r\nham\tHi hope u get this txt~journey hasnt been gd,now about 50 mins late I think.\r\nham\tBut i have to. I like to have love and arrange.\r\nham\tYes..he is really great..bhaji told kallis best cricketer after sachin in world:).very tough to get out.\r\nham\tYou were supposed to wake ME up >:(\r\nham\tOic... I saw him too but i tot he din c me... I found a group liao...\r\nham\tSorry, I'll call later\r\nham\t\"HEY HEY WERETHE MONKEESPEOPLE SAY WE MONKEYAROUND! HOWDY GORGEOUS, HOWU DOIN? FOUNDURSELF A JOBYET SAUSAGE?LOVE JEN XXX\"\r\nham\tSorry, my battery died, I can come by but I'm only getting a gram for now, where's your place?\r\nham\tWell done, blimey, exercise, yeah, i kinda remember wot that is, hmm. \r\nham\tI wont get concentration dear you know you are my mind and everything :-)\r\nham\tLOL ... Have you made plans for new years?\r\nham\t10 min later k...\r\nham\thanks lotsly!\r\nham\tThanks for this hope you had a good day today\r\nham\tK:)k:)what are detail you want to transfer?acc no enough?\r\nham\tOk i will tell her to stay out. Yeah its been tough but we are optimistic things will improve this month.\r\nspam\tLoan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'\r\nham\tSi si. I think ill go make those oreo truffles.\r\nham\tLOOK AT AMY URE A BEAUTIFUL, INTELLIGENT WOMAN AND I LIKE U A LOT. I KNOW U DONT LIKE ME LIKE THAT SO DONT WORRY.\r\nham\tI hope you that's the result of being consistently intelligent and kind. Start asking him about practicum links and keep your ears open and all the best. ttyl\r\nham\t1.20 that call cost. Which i guess isnt bad. Miss ya, need ya, want ya, love ya\r\nham\tGoing thru a very different feeling.wavering decisions and coping up with the same is the same individual.time will heal everything i believe.\r\nham\tWhere did u go? My phone is gonna die you have to stay in here\r\nham\tGreat. Never been better. Each day gives even more reasons to thank God\r\nspam\tUpgrdCentre Orange customer, you may now claim your FREE CAMERA PHONE upgrade for your loyalty. Call now on 0207 153 9153. Offer ends 26th July. T&C's apply. Opt-out available\r\nham\tSorry, I'll call later ok bye\r\nham\tOk i am on the way to railway\r\nham\tgreat princess! I love giving and receiving oral. Doggy style is my fave position. How about you? I enjoy making love <#> times per night :)\r\nham\tThey don't put that stuff on the roads to keep it from getting slippery over there?\r\nham\tWhen are you going to ride your bike?\r\nham\tYup, no need. I'll jus wait 4 e rain 2 stop.\r\nham\tThere are many company. Tell me the language.\r\nspam\tokmail: Dear Dave this is your final notice to collect your 4* Tenerife Holiday or #5000 CASH award! Call 09061743806 from landline. TCs SAE Box326 CW25WX 150ppm\r\nham\tHow long has it been since you screamed, princess?\r\nham\tNothing. I meant that once the money enters your account here, the bank will remove its flat rate. Someone transfered <#> to my account and <#> dollars got removed. So the banks differ and charges also differ.be sure you trust the 9ja person you are sending account details to cos...\r\nspam\tWant 2 get laid tonight? Want real Dogging locations sent direct 2 ur Mob? Join the UK's largest Dogging Network by txting MOAN to 69888Nyt. ec2a. 31p.msg@150p\r\nham\tNice line said by a broken heart- Plz don't cum 1 more times infront of me... Other wise once again I ll trust U... Good 9t:)\r\nham\tOk I'm gonna head up to usf in like fifteen minutes\r\nham\tLove you aathi..love u lot..\r\nham\tTension ah?what machi?any problem?\r\nham\tK, can I pick up another 8th when you're done?\r\nham\tWhen're you guys getting back? G said you were thinking about not staying for mcr\r\nham\tAlmost there, see u in a sec\r\nham\tYo carlos, a few friends are already asking me about you, you working at all this weekend?\r\nham\tWatching tv lor...\r\nham\tThank you baby! I cant wait to taste the real thing...\r\nham\tYou should change your fb to jaykwon thuglyfe falconerf\r\nham\tIf we win its really no 1 side for long time.\r\nspam\tFREE MESSAGE Activate your 500 FREE Text Messages by replying to this message with the word FREE For terms & conditions, visit www.07781482378.com\r\nham\tDear reached railway. What happen to you\r\nham\tDepends on quality. If you want the type i sent boye, faded glory, then about 6. If you want ralphs maybe 2\r\nham\tI think i've fixed it can you send a test message?\r\nham\tSorry man my account's dry or I would, if you want we could trade back half or I could buy some shit with my credit card\r\nspam\tCongrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out!\r\nham\tSorry,in meeting I'll call later\r\nham\tWhat class of <#> reunion?\r\nham\tAre you free now?can i call now?\r\nham\tGot meh... When?\r\nham\tNope... Think i will go for it on monday... Sorry i replied so late\r\nham\tSome of them told accenture is not confirm. Is it true.\r\nham\tKate jackson rec center before 7ish, right?\r\nham\tDear i have reache room\r\nham\tFighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.\r\nham\tWhen can ü come out?\r\nham\tCheck with nuerologist.\r\nham\tLolnice. I went from a fish to ..water.?\r\nspam\t+123 Congratulations - in this week's competition draw u have won the £1450 prize to claim just call 09050002311 b4280703. T&Cs/stop SMS 08718727868. Over 18 only 150ppm\r\nham\tNo it's waiting in e car dat's bored wat. Cos wait outside got nothing 2 do. At home can do my stuff or watch tv wat.\r\nham\tMaybe westshore or hyde park village, the place near my house?\r\nham\tYou should know now. So how's anthony. Are you bringing money. I've school fees to pay and rent and stuff like that. Thats why i need your help. A friend in need....|\r\nham\tWhat's the significance?\r\nham\tYour opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..\r\nham\t8 at the latest, g's still there if you can scrounge up some ammo and want to give the new ak a try\r\nham\tPrabha..i'm soryda..realy..frm heart i'm sory\r\nham\tLol ok your forgiven :)\r\nham\tNo..jst change tat only..\r\nspam\tYou are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18+\r\nham\tS:)no competition for him.\r\nspam\tBoltblue tones for 150p Reply POLY# or MONO# eg POLY3 1. Cha Cha Slide 2. Yeah 3. Slow Jamz 6. Toxic 8. Come With Me or STOP 4 more tones txt MORE\r\nspam\tYour credits have been topped up for http://www.bubbletext.com Your renewal Pin is tgxxrz\r\nham\tThat way transport is less problematic than on sat night. By the way, if u want to ask n to join my bday, feel free. But need to know definite nos as booking on fri. \r\nham\tUsually the person is unconscious that's in children but in adults they may just behave abnormally. I.ll call you now\r\nham\tBut that's on ebay it might be less elsewhere.\r\nham\tShall i come to get pickle\r\nham\tWere gonna go get some tacos\r\nham\tThat's very rude, you on campus?\r\nspam\tURGENT!: Your Mobile No. was awarded a £2,000 Bonus Caller Prize on 02/09/03! This is our 2nd attempt to contact YOU! Call 0871-872-9755 BOX95QU\r\nham\tHi i won't b ard 4 christmas. But do enjoy n merry x'mas.\r\nspam\tToday's Offer! Claim ur £150 worth of discount vouchers! Text YES to 85023 now! SavaMob, member offers mobile! T Cs 08717898035. £3.00 Sub. 16 . Unsub reply X\r\nham\tYes! How is a pretty lady like you single?\r\nspam\tYou will recieve your tone within the next 24hrs. For Terms and conditions please see Channel U Teletext Pg 750\r\nham\tJay says that you're a double-faggot\r\nspam\tPRIVATE! Your 2003 Account Statement for 07815296484 shows 800 un-redeemed S.I.M. points. Call 08718738001 Identifier Code 41782 Expires 18/11/04 \r\nham\tWhat Today-sunday..sunday is holiday..so no work..\r\nham\tGudnite....tc...practice going on\r\nham\tI'll be late...\r\nham\tI've not called you in a while. This is hoping it was l8r malaria and that you know that we miss you guys. I miss Bani big, so pls give her my love especially. Have a great day.\r\nham\tGood afternoon, my love! How goes that day ? I hope maybe you got some leads on a job. I think of you, boytoy and send you a passionate kiss from across the sea\r\nham\tProbably gonna be here for a while, see you later tonight <)\r\nham\tOr maybe my fat fingers just press all these buttons and it doesn't know what to do.\r\nham\tUmmmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear\r\nham\tI am in tirupur da, once you started from office call me.\r\nspam\tfrom www.Applausestore.com MonthlySubscription@50p/msg max6/month T&CsC web age16 2stop txt stop\r\nham\tA famous quote : when you develop the ability to listen to 'anything' unconditionally without losing your temper or self confidence, it means you are ......... 'MARRIED'\r\nham\tBut am going to college pa. What to do. are else ill come there it self. Pa.\r\nham\t4 oclock at mine. Just to bash out a flat plan.\r\nham\tThis girl does not stay in bed. This girl doesn't need recovery time. Id rather pass out while having fun then be cooped up in bed\r\nham\tThen any special there?\r\nham\tI know but you need to get hotel now. I just got my invitation but i had to apologise. Cali is to sweet for me to come to some english bloke's weddin\r\nham\tSorry that took so long, omw now\r\nham\tWait <#> min..\r\nham\tOk give me 5 minutes I think I see her. BTW you're my alibi. You were cutting my hair the whole time.\r\nham\tImagine you finally get to sink into that bath after I have put you through your paces, maybe even having you eat me for a while before I left ... But also imagine the feel of that cage on your cock surrounded by the bath water, reminding you always who owns you ... Enjoy, my cuck\r\nham\tHurry up, I've been weed-deficient for like three days\r\nham\tSure, if I get an acknowledgement from you that it's astoundingly tactless and generally faggy to demand a blood oath fo\r\nham\tOk. Every night take a warm bath drink a cup of milk and you'll see a work of magic. You still need to loose weight. Just so that you know\r\nham\tI‘ll have a look at the frying pan in case it‘s cheap or a book perhaps. No that‘s silly a frying pan isn‘t likely to be a book\r\nham\tO. Well uv causes mutations. Sunscreen is like essential thesedays\r\nham\tHaving lunch:)you are not in online?why?\r\nham\tI know that my friend already told that.\r\nham\tHi Princess! Thank you for the pics. You are very pretty. How are you?\r\nham\tAiyo... U always c our ex one... I dunno abt mei, she haven reply... First time u reply so fast... Y so lucky not workin huh, got bao by ur sugardad ah...gee.. \r\nham\tHi msg me:)i'm in office..\r\nham\tThanx 4 e brownie it's v nice...\r\nham\tGeeeee ... I love you so much I can barely stand it\r\nspam\tGENT! We are trying to contact you. Last weekends draw shows that you won a £1000 prize GUARANTEED. Call 09064012160. Claim Code K52. Valid 12hrs only. 150ppm \r\nham\tFuck babe ... I miss you already, you know ? Can't you let me send you some money towards your net ? I need you ... I want you ... I crave you ...\r\nham\tIll call u 2mrw at ninish, with my address that icky American freek wont stop callin me 2 bad Jen k eh?\r\nham\tOooh bed ridden ey? What are YOU thinking of?\r\nham\tSo anyways, you can just go to your gym or whatever, my love *smiles* I hope your ok and having a good day babe ... I miss you so much already\r\nham\tLove it! Daddy will make you scream with pleasure! I am going to slap your ass with my dick!\r\nham\tWOT U WANNA DO THEN MISSY?\r\nham\tYar lor wait 4 my mum 2 finish sch then have lunch lor... I whole morning stay at home clean my room now my room quite clean... Hee...\r\nham\tDo you know where my lab goggles went\r\nham\tCan you open the door?\r\nham\tWaiting for your call.\r\nham\tNope i waiting in sch 4 daddy... \r\nspam\tYou have won ?1,000 cash or a ?2,000 prize! To claim, call09050000327\r\nham\tI'm tired of arguing with you about this week after week. Do what you want and from now on, i'll do the same.\r\nham\tÜ wait 4 me in sch i finish ard 5..\r\nspam\tour mobile number has won £5000, to claim calls us back or ring the claims hot line on 09050005321.\r\nham\tArngd marriage is while u r walkin unfortuntly a snake bites u. bt love marriage is dancing in frnt of d snake & sayin Bite me, bite me.\r\nham\tHuh so early.. Then ü having dinner outside izzit?\r\nham\tOk anyway no need to change with what you said\r\nspam\tWe tried to contact you re your reply to our offer of 750 mins 150 textand a new video phone call 08002988890 now or reply for free delivery tomorrow\r\nham\tmy ex-wife was not able to have kids. Do you want kids one day?\r\nham\tSo how's scotland. Hope you are not over showing your JJC tendencies. Take care. Live the dream\r\nham\tTell them u have a headache and just want to use 1 hour of sick time.\r\nham\tI dun thk i'll quit yet... Hmmm, can go jazz ? Yogasana oso can... We can go meet em after our lessons den... \r\nham\t\"Pete can you please ring meive hardly gotany credit\"\r\nham\tYa srsly better than yi tho\r\nham\tI'm in a meeting, call me later at\r\nspam\tFor ur chance to win a £250 wkly shopping spree TXT: SHOP to 80878. T's&C's www.txt-2-shop.com custcare 08715705022, 1x150p/wk\r\nspam\tYou have been specially selected to receive a 2000 pound award! Call 08712402050 BEFORE the lines close. Cost 10ppm. 16+. T&Cs apply. AG Promo\r\nspam\tPRIVATE! Your 2003 Account Statement for 07753741225 shows 800 un-redeemed S. I. M. points. Call 08715203677 Identifier Code: 42478 Expires 24/10/04\r\nham\tYou still at grand prix?\r\nham\tI met you as a stranger and choose you as my friend. As long as the world stands, our friendship never ends. Lets be Friends forever!!! Gud nitz...\r\nham\tI am great! How are you?\r\nham\tGud mrng dear have a nice day\r\nspam\tYou have an important customer service announcement. Call FREEPHONE 0800 542 0825 now!\r\nham\tWill do. Was exhausted on train this morning. Too much wine and pie. You sleep well too\r\nham\tI'm going out to buy mum's present ar.\r\nham\tMind blastin.. No more Tsunamis will occur from now on.. Rajnikant stopped swimming in Indian Ocean..:-D\r\nham\tIf u sending her home first it's ok lor. I'm not ready yet.\r\nham\tSpeaking of does he have any cash yet?\r\nham\tBe happy there. I will come after noon\r\nham\tMeet after lunch la...\r\nham\tTaKe CaRE n gET WeLL sOOn\r\nspam\tXCLUSIVE@CLUBSAISAI 2MOROW 28/5 SOIREE SPECIALE ZOUK WITH NICHOLS FROM PARIS.FREE ROSES 2 ALL LADIES !!! info: 07946746291/07880867867 \r\nham\twhat I meant to say is cant wait to see u again getting bored of this bridgwater banter\r\nham\tNeva mind it's ok..\r\nham\tIt's fine, imma get a drink or somethin. Want me to come find you?\r\nspam\t22 days to kick off! For Euro2004 U will be kept up to date with the latest news and results daily. To be removed send GET TXT STOP to 83222\r\nham\tIts a valentine game. . . Send dis msg to all ur friends. .. If 5 answers r d same then someone really loves u. Ques- which colour suits me the best?rply me\r\nham\tI have many dependents\r\nham\tTHANX4 TODAY CER IT WAS NICE 2 CATCH UP BUT WE AVE 2 FIND MORE TIME MORE OFTEN OH WELL TAKE CARE C U SOON.C\r\nham\tI called and said all to him:)then he have to choose this future.\r\nham\t\"Happy valentines day\" I know its early but i have hundreds of handsomes and beauties to wish. So i thought to finish off aunties and uncles 1st...\r\nham\tHe like not v shock leh. Cos telling shuhui is like telling leona also. Like dat almost all know liao. He got ask me abt ur reaction lor.\r\nham\tFor my family happiness..\r\nham\tI come n pick ü up... Come out immediately aft ur lesson...\r\nham\tLet there be snow. Let there be snow. This kind of weather brings ppl together so friendships can grow.\r\nham\tDear we got <#> dollars hi hi\r\nham\tGood words.... But words may leave u in dismay many times.\r\nham\tMAKE SURE ALEX KNOWS HIS BIRTHDAY IS OVER IN FIFTEEN MINUTES AS FAR AS YOU'RE CONCERNED\r\nham\tsorry, no, have got few things to do. may be in pub later.\r\nham\tNah it's straight, if you can just bring bud or drinks or something that's actually a little more useful than straight cash\r\nham\tHaha good to hear, I'm officially paid and on the market for an 8th\r\nham\tHow many licks does it take to get to the center of a tootsie pop?\r\nham\tYup i thk they r e teacher said that will make my face look longer. Darren ask me not 2 cut too short.\r\nspam\tNew TEXTBUDDY Chat 2 horny guys in ur area 4 just 25p Free 2 receive Search postcode or at gaytextbuddy.com. TXT ONE name to 89693\r\nspam\tTodays Vodafone numbers ending with 4882 are selected to a receive a £350 award. If your number matches call 09064019014 to receive your £350 award.\r\nham\tPlease dont say like that. Hi hi hi\r\nham\tThank u!\r\nham\tOh that was a forwarded message. I thought you send that to me\r\nham\tGot it. Seventeen pounds for seven hundred ml – hope ok.\r\nspam\tDear Voucher Holder, 2 claim this weeks offer, at your PC go to http://www.e-tlp.co.uk/expressoffer Ts&Cs apply.2 stop texts txt STOP to 80062.\r\nham\tMe n him so funny...\r\nham\tSweetheart, hope you are not having that kind of day! Have one with loads of reasons to smile. Biola\r\nham\tWhen ü login dat time... Dad fetching ü home now?\r\nham\tWhat will we do in the shower, baby?\r\nham\tI had askd u a question some hours before. Its answer\r\nham\tWell imma definitely need to restock before thanksgiving, I'll let you know when I'm out\r\nham\t said kiss, kiss, i can't do the sound effects! He is a gorgeous man isn't he! Kind of person who needs a smile to brighten his day! \r\nham\tProbably gonna swing by in a wee bit\r\nham\tYa very nice. . .be ready on thursday\r\nham\tAllo! We have braved the buses and taken on the trains and triumphed. I mean we‘re in b‘ham. Have a jolly good rest of week\r\nham\tWatching cartoon, listening music & at eve had to go temple & church.. What about u?\r\nham\tDo you mind if I ask what happened? You dont have to say if it is uncomfortable.\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08715203694 Identifier Code: 40533 Expires 31/10/04\r\nham\tNo prob. I will send to your email.\r\nspam\tYou have won ?1,000 cash or a ?2,000 prize! To claim, call09050000327. T&C: RSTM, SW7 3SS. 150ppm\r\nham\tThats cool! Sometimes slow and gentle. Sonetimes rough and hard :)\r\nham\tI'm gonna say no. Sorry. I would but as normal am starting to panic about time. Sorry again! Are you seeing on Tuesday?\r\nham\tWait, do you know if wesleys in town? I bet she does hella drugs!\r\nham\tFine i miss you very much.\r\nham\tDid u got that persons story\r\nham\tTell them the drug dealer's getting impatient\r\nham\tSun cant come to earth but send luv as rays. cloud cant come to river but send luv as rain. I cant come to meet U, but can send my care as msg to U. Gud evng\r\nham\tYou will be in the place of that man\r\nham\tIt doesnt make sense to take it there unless its free. If you need to know more, wikipedia.com\r\nspam\t88800 and 89034 are premium phone services call 08718711108\r\nham\tUnder the sea, there lays a rock. In the rock, there is an envelope. In the envelope, there is a paper. On the paper, there are 3 words... '\r\nham\tThen mum's repent how?\r\nham\tSorry me going home first... Daddy come fetch ü later...\r\nham\tLeave it de:-). Start Prepare for next:-)..\r\nham\tYes baby! We can study all the positions of the kama sutra ;)\r\nham\tEn chikku nange bakra msg kalstiya..then had tea/coffee?\r\nham\tCarlos'll be here in a minute if you still need to buy\r\nham\tThis pay is <DECIMAL> lakhs:)\r\nham\tHave a good evening! Ttyl\r\nham\tDid u receive my msg?\r\nham\tHo ho - big belly laugh! See ya tomo\r\nspam\tSMS. ac sun0819 posts HELLO:\"You seem cool, wanted to say hi. HI!!!\" Stop? Send STOP to 62468\r\nspam\tGet ur 1st RINGTONE FREE NOW! Reply to this msg with TONE. Gr8 TOP 20 tones to your phone every week just £1.50 per wk 2 opt out send STOP 08452810071 16\r\nham\tDitto. And you won't have to worry about me saying ANYTHING to you anymore. Like i said last night, you do whatever you want and i'll do the same. Peace.\r\nham\tI've got <#> , any way I could pick up?\r\nham\tI dont knw pa, i just drink milk..\r\nham\tMaybe?! Say hi to and find out if got his card. Great escape or wetherspoons? \r\nham\tPiggy, r u awake? I bet u're still sleeping. I'm going 4 lunch now...\r\nham\tCause I'm not freaky lol\r\nham\tMissed your call cause I was yelling at scrappy. Miss u. Can't wait for u to come home. I'm so lonely today.\r\nham\tWhat is this 'hex' place you talk of? Explain!\r\nham\tÜ log off 4 wat. It's sdryb8i\r\nham\tIs xy going 4 e lunch?\r\nspam\tHi I'm sue. I am 20 years old and work as a lapdancer. I love sex. Text me live - I'm i my bedroom now. text SUE to 89555. By TextOperator G2 1DA 150ppmsg 18+\r\nham\tI wanted to ask ü to wait 4 me to finish lect. Cos my lect finishes in an hour anyway.\r\nham\tHave you finished work yet? :)\r\nham\tEvery King Was Once A Crying Baby And Every Great Building Was Once A Map.. Not Imprtant Where U r TODAY, BUT Where U Wil Reach TOMORW. Gud ni8\r\nham\tDear,Me at cherthala.in case u r coming cochin pls call bfore u start.i shall also reach accordingly.or tell me which day u r coming.tmorow i am engaged ans its holiday.\r\nham\tThanks love. But am i doing torch or bold.\r\nspam\t<Forwarded from 448712404000>Please CALL 08712404000 immediately as there is an urgent message waiting for you.\r\nham\tWas the farm open?\r\nham\tSorry to trouble u again. Can buy 4d for my dad again? 1405, 1680, 1843. All 2 big 1 small, sat n sun. Thanx.\r\nham\tMy sister in law, hope you are having a great month. Just saying hey. Abiola\r\nham\tWill purchase d stuff today and mail to you. Do you have a po box number?\r\nham\tAh poop. Looks like ill prob have to send in my laptop to get fixed cuz it has a gpu problem\r\nham\tGood. Good job. I like entrepreneurs\r\nham\tAight, you close by or still down around alex's place?\r\nham\tmeet you in corporation st outside gap … you can see how my mind is working!\r\nham\tMum ask ü to buy food home...\r\nham\tK..u also dont msg or reply to his msg..\r\nham\tHow much r ü willing to pay?\r\nham\tSorry, I'll call later\r\nham\tWhat is important is that you prevent dehydration by giving her enough fluids\r\nham\tThats a bit weird, even ?- where is the do supposed to be happening? But good idea, sure they will be in pub!\r\nham\tTrue dear..i sat to pray evening and felt so.so i sms'd you in some time...\r\nham\tI don't think I can get away for a trek that long with family in town, sorry\r\nham\tSo when do you wanna gym harri\r\nham\tQuite late lar... Ard 12 anyway i wun b drivin...\r\nspam\tTo review and KEEP the fantastic Nokia N-Gage game deck with Club Nokia, go 2 www.cnupdates.com/newsletter. unsubscribe from alerts reply with the word OUT\r\nspam\t4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths+? Call MobilesDirect free on 08000938767 to update now! or2stoptxt T&Cs\r\nham\tHeight of Confidence: All the Aeronautics professors wer calld & they wer askd 2 sit in an aeroplane. Aftr they sat they wer told dat the plane ws made by their students. Dey all hurried out of d plane.. Bt only 1 didnt move... He said:\"if it is made by my students,this wont even start........ Datz confidence..\r\nham\tIt just seems like weird timing that the night that all you and g want is for me to come smoke is the same day as when a shitstorm is attributed to me always coming over and making everyone smoke\r\nspam\t08714712388 between 10am-7pm Cost 10p\r\nham\tSave yourself the stress. If the person has a dorm account, just send your account details and the money will be sent to you.\r\nham\tHe also knows about lunch menu only da. . I know\r\nham\tWhen i have stuff to sell i.ll tell you\r\nspam\t+449071512431 URGENT! This is the 2nd attempt to contact U!U have WON £1250 CALL 09071512433 b4 050703 T&CsBCM4235WC1N3XX. callcost 150ppm mobilesvary. max£7. 50\r\nham\tBook which lesson? then you msg me... I will call up after work or sth... I'm going to get specs. My membership is PX3748\r\nspam\tYou have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712394 between 10am-7pm\r\nham\tMacha dont feel upset.i can assume your mindset.believe me one evening with me and i have some wonderful plans for both of us.LET LIFE BEGIN AGAIN.call me anytime\r\nham\tOh is it? Send me the address\r\nham\tS'fine. Anytime. All the best with it.\r\nham\tThat is wondar full flim.\r\nham\tYa even those cookies have jelly on them\r\nham\tThe world is running and i am still.maybe all are feeling the same,so be it.or i have to admit,i am mad.then where is the correction?or let me call this is life.and keep running with the world,may be u r also running.lets run.\r\nham\tGot it! It looks scrumptious... daddy wants to eat you all night long!\r\nham\tOf cos can lar i'm not so ba dao ok... 1 pm lor... Y u never ask where we go ah... I said u would ask on fri but he said u will ask today...\r\nham\tAlright omw, gotta change my order to a half8th\r\nham\tExactly. Anyways how far. Is jide her to study or just visiting\r\nham\tDunno y u ask me.\r\nspam\tEmail AlertFrom: Jeri StewartSize: 2KBSubject: Low-cost prescripiton drvgsTo listen to email call 123\r\nham\tNo he didn't. Spring is coming early yay!\r\nham\tLol you won't feel bad when I use her money to take you out to a steak dinner =D\r\nham\tEven u dont get in trouble while convincing..just tel him once or twice and just tel neglect his msgs dont c and read it..just dont reply\r\nham\tLeaving to qatar tonite in search of an opportunity.all went fast.pls add me in ur prayers dear.Rakhesh\r\nham\tThen why no one talking to me\r\nham\tThanks for looking out for me. I really appreciate.\r\nspam\tHi. Customer Loyalty Offer:The NEW Nokia6650 Mobile from ONLY £10 at TXTAUCTION! Txt word: START to No: 81151 & get yours Now! 4T&Ctxt TC 150p/MTmsg\r\nham\tWish i were with you now!\r\nham\tHaha mayb u're rite... U know me well. Da feeling of being liked by someone is gd lor. U faster go find one then all gals in our group attached liao.\r\nham\tYes i will be there. Glad you made it.\r\nham\tDo well :)all will for little time. Thing of good times ahead:\r\nham\tJust got up. have to be out of the room very soon. …. i hadn't put the clocks back til at 8 i shouted at everyone to get up and then realised it was 7. wahay. another hour in bed.\r\nham\tOk. There may be a free gym about.\r\nham\tMen like shorter ladies. Gaze up into his eyes.\r\nham\tDunno he jus say go lido. Same time 930.\r\nham\tI promise to take good care of you, princess. I have to run now. Please send pics when you get a chance. Ttyl!\r\nspam\tU are subscribed to the best Mobile Content Service in the UK for £3 per 10 days until you send STOP to 82324. Helpline 08706091795\r\nham\tIs there a reason we've not spoken this year? Anyways have a great week and all the best in your exam\r\nham\tBy monday next week. Give me the full gist\r\nspam\tDo you realize that in about 40 years, we'll have thousands of old ladies running around with tattoos?\r\nspam\tYou have an important customer service announcement from PREMIER.\r\nham\tDont gimme that lip caveboy\r\nham\tWhen did you get to the library\r\nham\tRealy sorry-i don't recognise this number and am now confused :) who r u please?! \r\nham\tSo why didnt you holla?\r\nham\tCant think of anyone with * spare room off * top of my head\r\nham\tFaith makes things possible,Hope makes things work,Love makes things beautiful,May you have all three this Christmas!Merry Christmas!\r\nham\tU should have made an appointment\r\nham\tCall me when you/carlos is/are here, my phone's vibrate is acting up and I might not hear texts\r\nspam\tRomantic Paris. 2 nights, 2 flights from £79 Book now 4 next year. Call 08704439680Ts&Cs apply.\r\nham\tWe are at grandmas. Oh dear, u still ill? I felt Shit this morning but i think i am just hungover! Another night then. We leave on sat.\r\nspam\tUrgent Ur £500 guaranteed award is still unclaimed! Call 09066368327 NOW closingdate04/09/02 claimcode M39M51 £1.50pmmorefrommobile2Bremoved-MobyPOBox734LS27YF\r\nham\tNothing but we jus tot u would ask cos u ba gua... But we went mt faber yest... Yest jus went out already mah so today not going out... Jus call lor...\r\nham\tWishing you and your family Merry \"X\" mas and HAPPY NEW Year in advance..\r\nspam\tUR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039 . SkilGme. TsCs087147403231Winawk!Age16 £1.50perWKsub\r\nham\tI'm nt goin, got somethin on, unless they meetin 4 dinner lor... Haha, i wonder who will go tis time...\r\nham\tSorry, I'll call later\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tLol I know! They're so dramatic. Schools already closed for tomorrow. Apparently we can't drive in the inch of snow were supposed to get.\r\nham\tNot getting anywhere with this damn job hunting over here!\r\nham\tLol! U drunkard! Just doing my hair at d moment. Yeah still up 4 tonight. Wats the plan? \r\nham\tidc get over here, you are not weaseling your way out of this shit twice in a row\r\nham\tI wil be there with in <#> minutes. Got any space\r\nham\tJust sleeping..and surfing\r\nham\tThanks for picking up the trash.\r\nham\tWhy don't you go tell your friend you're not sure you want to live with him because he smokes too much then spend hours begging him to come smoke\r\nham\t\"Hi its Kate it was lovely to see you tonight and ill phone you tomorrow. I got to sing and a guy gave me his card! xxx\"\r\nham\tHappy New year my dear brother. I really do miss you. Just got your number and decided to send you this text wishing you only happiness. Abiola\r\nham\tThat means get the door\r\nham\tYour opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..\r\nham\tHmmm ... I thought we said 2 hours slave, not 3 ... You are late ... How should I punish you ?\r\nham\tBeerage?\r\nspam\tYou have an important customer service announcement from PREMIER. Call FREEPHONE 0800 542 0578 now!\r\nham\tDont think so. It turns off like randomlly within 5min of opening\r\nham\tShe was supposed to be but couldn't make it, she's still in town though\r\nham\tIt does it on its own. Most of the time it fixes my spelling. But sometimes it gets a completely diff word. Go figure\r\nspam\tEver thought about living a good life with a perfect partner? Just txt back NAME and AGE to join the mobile community. (100p/SMS)\r\nspam\t5 Free Top Polyphonic Tones call 087018728737, National Rate. Get a toppoly tune sent every week, just text SUBPOLY to 81618, £3 per pole. UnSub 08718727870.\r\nham\tGud mrng dear hav a nice day\r\nham\tThis is hoping you enjoyed your game yesterday. Sorry i've not been in touch but pls know that you are fondly bein thot off. Have a great week. Abiola\r\nham\tAll e best 4 ur driving tmr :-)\r\nham\tY?WHERE U AT DOGBREATH? ITS JUST SOUNDING LIKE JAN C THATS AL!!!!!!!!!\r\nham\tOmg I want to scream. I weighed myself and I lost more weight! Woohoo!\r\nham\tThere generally isn't one. It's an uncountable noun - u in the dictionary. pieces of research?\r\nham\tit's really getting me down just hanging around.\r\nspam\tOrange customer, you may now claim your FREE CAMERA PHONE upgrade for your loyalty. Call now on 0207 153 9996. Offer ends 14thMarch. T&C's apply. Opt-out availa\r\nham\t\"Petey boy whereare you me and all your friendsare in theKingshead come down if you canlove Nic\"\r\nham\tOk i msg u b4 i leave my house.\r\nham\t\"Gimme a few\" was <#> minutes ago\r\nspam\tLast Chance! Claim ur £150 worth of discount vouchers today! Text SHOP to 85023 now! SavaMob, offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Sub. 16\r\nham\tAppt is at <TIME> am. Not my fault u don't listen. I told u twice\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mobile every week just txt NOKIA to 8077 Get txting and tell ur mates. www.getzed.co.uk POBox 36504 W45WQ 16+ norm150p/tone\r\nspam\tYou have won a guaranteed £200 award or even £1000 cashto claim UR award call free on 08000407165 (18+) 2 stop getstop on 88222 PHP. RG21 4JX\r\nham\tK I'll be there before 4.\r\nham\tI dled 3d its very imp\r\nham\tsure, but make sure he knows we ain't smokin yet\r\nham\tBoooo you always work. Just quit.\r\nham\tI am taking half day leave bec i am not well\r\nham\tUgh I don't wanna get out of bed. It's so warm.\r\nham\tS:)s.nervous <#> :)\r\nham\tSo there's a ring that comes with the guys costumes. It's there so they can gift their future yowifes. Hint hint\r\nspam\tCongratulations ur awarded either £500 of CD gift vouchers & Free entry 2 our £100 weekly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16\r\nham\tI borrow ur bag ok.\r\nspam\tU were outbid by simonwatson5120 on the Shinco DVD Plyr. 2 bid again, visit sms. ac/smsrewards 2 end bid notifications, reply END OUT\r\nham\tWhere's my boytoy? I miss you ... What happened?\r\nham\tHe has lots of used ones babe, but the model doesn't help. Youi have to bring it over and he'll match it up\r\nham\tAlso are you bringing galileo or dobby\r\nham\tThen why you not responding\r\nham\t\"BOO BABE! U ENJOYIN YOURJOB? U SEEMED 2 B GETTIN ON WELL HUNNY!HOPE URE OK?TAKE CARE & ILLSPEAK 2U SOONLOTS OF LOVEME XXXX.\"\r\nham\tGood afternoon starshine! How's my boytoy? Does he crave me yet? Ache to fuck me ? *sips cappuccino* I miss you babe *teasing kiss*\r\nham\tOn the road so cant txt\r\nspam\tSMSSERVICES. for yourinclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE\r\nspam\t25p 4 alfie Moon's Children in need song on ur mob. Tell ur m8s. Txt Tone charity to 8007 for Nokias or Poly charity for polys: zed 08701417012 profit 2 charity.\r\nham\tHave a good evening! Ttyl\r\nham\tHmm .. Bits and pieces lol ... *sighs* ...\r\nham\tHahaha..use your brain dear\r\nham\tHey. You got any mail?\r\nham\tSorry light turned green, I meant another friend wanted <#> worth but he may not be around\r\nham\tThanks for yesterday sir. You have been wonderful. Hope you enjoyed the burial. MojiBiola\r\nspam\tU have a secret admirer. REVEAL who thinks U R So special. Call 09065174042. To opt out Reply REVEAL STOP. 1.50 per msg recd. Cust care 07821230901\r\nham\tHi mate its RV did u hav a nice hol just a message 3 say hello coz havent sent u 1 in ages started driving so stay off roads!RVx\r\nspam\tDear Voucher Holder, To claim this weeks offer, at you PC please go to http://www.e-tlp.co.uk/expressoffer Ts&Cs apply. To stop texts, txt STOP to 80062\r\nham\tThank you so much. When we skyped wit kz and sura, we didnt get the pleasure of your company. Hope you are good. We've given you ultimatum oh! We are countin down to aburo. Enjoy! This is the message i sent days ago\r\nham\tSurely result will offer:)\r\nham\tGood Morning my Dear........... Have a great & successful day.\r\nspam\tDo you want 750 anytime any network mins 150 text and a NEW VIDEO phone for only five pounds per week call 08002888812 or reply for delivery tomorrow\r\nham\tSir, I have been late in paying rent for the past few months and had to pay a $ <#> charge. I felt it would be inconsiderate of me to nag about something you give at great cost to yourself and that's why i didnt speak up. I however am in a recession and wont be able to pay the charge this month hence my askin well ahead of month's end. Can you please help. Thanks\r\nspam\tWe tried to contact you re our offer of New Video Phone 750 anytime any network mins HALF PRICE Rental camcorder call 08000930705 or reply for delivery Wed\r\nspam\tLast chance 2 claim ur £150 worth of discount vouchers-Text YES to 85023 now!SavaMob-member offers mobile T Cs 08717898035. £3.00 Sub. 16 . Remove txt X or STOP\r\nham\tI luv u soo much u dont understand how special u r 2 me ring u 2morrow luv u xxx\r\nham\tPls send me a comprehensive mail about who i'm paying, when and how much.\r\nham\tOur Prashanthettan's mother passed away last night. pray for her and family.\r\nspam\tUrgent! call 09066350750 from your landline. Your complimentary 4* Ibiza Holiday or 10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150 ppm 18+ \r\nham\tK.k:)when are you going?\r\nham\tMeanwhile in the shit suite: xavier decided to give us <#> seconds of warning that samantha was coming over and is playing jay's guitar to impress her or some shit. Also I don't think doug realizes I don't live here anymore\r\nham\tMy stomach has been thru so much trauma I swear I just can't eat. I better lose weight.\r\nham\tI am in office:)whats the matter..msg me now.i will call you at break:).\r\nham\tYeah there's barely enough room for the two of us, x has too many fucking shoes. Sorry man, see you later\r\nspam\tToday's Offer! Claim ur £150 worth of discount vouchers! Text YES to 85023 now! SavaMob, member offers mobile! T Cs 08717898035. £3.00 Sub. 16 . Unsub reply X\r\nham\tU reach orchard already? U wan 2 go buy tickets first?\r\nham\tI am real, baby! I want to bring out your inner tigress...\r\nham\tNo da if you run that it activate the full version da.\r\nham\t\"AH POOR BABY!HOPE URFEELING BETTERSN LUV! PROBTHAT OVERDOSE OF WORK HEY GO CAREFUL SPK 2 U SN LOTS OF LOVEJEN XXX.\"\r\nham\tStop the story. I've told him i've returned it and he's saying i should not re order it.\r\nspam\tTalk sexy!! Make new friends or fall in love in the worlds most discreet text dating service. Just text VIP to 83110 and see who you could meet.\r\nham\tGoing to take your babe out ?\r\nham\tHai ana tomarrow am coming on morning. <DECIMAL> ill be there in sathy then we ll go to RTO office. Reply me after came to home.\r\nham\tSpoons it is then okay?\r\nham\tDid he just say somebody is named tampa\r\nham\tIn work now. Going have in few min.\r\nham\tYour brother is a genius\r\nham\tSorry, I guess whenever I can get a hold of my connections, maybe an hour or two? I'll text you\r\nham\tDid u find out what time the bus is at coz i need to sort some stuff out.\r\nham\tDude ive been seeing a lotta corvettes lately\r\nspam\tCongratulations ur awarded either a yrs supply of CDs from Virgin Records or a Mystery Gift GUARANTEED Call 09061104283 Ts&Cs www.smsco.net £1.50pm approx 3mins\r\nham\tSame here, but I consider walls and bunkers and shit important just because I never play on peaceful but I guess your place is high enough that it don't matter\r\nspam\tPRIVATE! Your 2003 Account Statement for 07808 XXXXXX shows 800 un-redeemed S. I. M. points. Call 08719899217 Identifier Code: 41685 Expires 07/11/04\r\nspam\tHello. We need some posh birds and chaps to user trial prods for champneys. Can i put you down? I need your address and dob asap. Ta r\r\nspam\tWhat do U want for Xmas? How about 100 free text messages & a new video phone with half price line rental? Call free now on 0800 0721072 to find out more!\r\nham\tWell am officially in a philosophical hole, so if u wanna call am at home ready to be saved!\r\nham\tIts going good...no problem..but still need little experience to understand american customer voice...\r\nham\tI'll text you when I drop x off\r\nham\tUgh its been a long day. I'm exhausted. Just want to cuddle up and take a nap\r\nham\tTalk With Yourself Atleast Once In A Day...!!! Otherwise You Will Miss Your Best FRIEND In This WORLD...!!! -Shakespeare- SHESIL <#>\r\nspam\tShop till u Drop, IS IT YOU, either 10K, 5K, £500 Cash or £100 Travel voucher, Call now, 09064011000. NTT PO Box CR01327BT fixedline Cost 150ppm mobile vary\r\nham\tAre you in castor? You need to see something\r\nspam\tSunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country Liverpool played in mid week? Txt ansr to 82277. £1.50 SP:Tyrone\r\nspam\tU have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094565\r\nspam\tU have a Secret Admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09065171142-stopsms-08\r\nspam\tReminder: You have not downloaded the content you have already paid for. Goto http://doit. mymoby. tv/ to collect your content.\r\nham\tsee, i knew giving you a break a few times woul lead to you always wanting to miss curfew. I was gonna gibe you 'til one, but a MIDNIGHT movie is not gonna get out til after 2. You need to come home. You need to getsleep and, if anything, you need to b studdying ear training.\r\nham\tI love to give massages. I use lots of baby oil... What is your fave position?\r\nham\tDude we should go sup again\r\nham\tYoyyooo u know how to change permissions for a drive in mac. My usb flash drive\r\nham\tGibbs unsold.mike hussey\r\nham\tI like to talk pa but am not able to. I dont know y.\r\nham\tY dun cut too short leh. U dun like ah? She failed. She's quite sad.\r\nham\tYou unbelievable faglord\r\nham\tWife.how she knew the time of murder exactly\r\nham\tWhy do you ask princess?\r\nham\tI am great princess! What are you thinking about me? :)\r\nham\tNutter. Cutter. Ctter. Cttergg. Cttargg. Ctargg. Ctagg. ie you\r\nham\tIt's ok i noe u're busy but i'm really too bored so i msg u. I oso dunno wat colour she choose 4 me one.\r\nham\tDoesn't g have class early tomorrow and thus shouldn't be trying to smoke at <#>\r\nham\tSuperb Thought- \"Be grateful that u dont have everything u want. That means u still have an opportunity to be happier tomorrow than u are today.\":-)\r\nham\tHope you are having a good week. Just checking in\r\nham\tI'm used to it. I just hope my agents don't drop me since i've only booked a few things this year. This whole me in boston, them in nyc was an experiment.\r\nham\tThursday night? Yeah, sure thing, we'll work it out then\r\nspam\tYour free ringtone is waiting to be collected. Simply text the password \"MIX\" to 85069 to verify. Get Usher and Britney. FML, PO Box 5249, MK17 92H. 450Ppw 16\r\nham\tProbably money worries. Things are coming due and i have several outstanding invoices for work i did two and three months ago.\r\nham\tHow is it possible to teach you. And where.\r\nham\tI wonder if your phone battery went dead ? I had to tell you, I love you babe\r\nham\tLovely smell on this bus and it ain't tobacco... \r\nham\tWe're all getting worried over here, derek and taylor have already assumed the worst\r\nham\tHey what's up charles sorry about the late reply.\r\nspam\tall the lastest from Stereophonics, Marley, Dizzee Racal, Libertines and The Strokes! Win Nookii games with Flirt!! Click TheMob WAP Bookmark or text WAP to 82468\r\nham\tI.ll give her once i have it. Plus she said grinule greet you whenever we speak\r\nham\tWHITE FUDGE OREOS ARE IN STORES\r\nspam\tJanuary Male Sale! Hot Gay chat now cheaper, call 08709222922. National rate from 1.5p/min cheap to 7.8p/min peak! To stop texts call 08712460324 (10p/min)\r\nham\tMy love ! How come it took you so long to leave for Zaher's? I got your words on ym and was happy to see them but was sad you had left. I miss you\r\nham\tI am sorry it hurt you.\r\nham\tCan't. I feel nauseous. I'm so pissed. I didn't eat any sweets all week cause today I was planning to pig out. I was dieting all week. And now I'm not hungry :/\r\nham\tOk lor but not too early. Me still having project meeting now.\r\nham\tCall me da, i am waiting for your call.\r\nham\tI could ask carlos if we could get more if anybody else can chip in\r\nham\tWas actually about to send you a reminder today. Have a wonderful weekend\r\nham\tWhen people see my msgs, They think Iam addicted to msging... They are wrong, Bcoz They don\\'t know that Iam addicted to my sweet Friends..!! BSLVYL\r\nham\tHey you gave them your photo when you registered for driving ah? Tmr wanna meet at yck? \r\nham\tDont talk to him ever ok its my word.\r\nham\tWhen u wana see it then\r\nham\tOn ma way to school. Can you pls send me ashley's number\r\nham\tIt shall be fine. I have avalarr now. Will hollalater\r\nham\tShe went to attend another two rounds today..but still did't reach home..\r\nham\tActually i deleted my old website..now i m blogging at magicalsongs.blogspot.com\r\nham\tK, wait chikku..il send aftr <#> mins\r\nham\tBut I'm on a diet. And I ate 1 too many slices of pizza yesterday. Ugh I'm ALWAYS on a diet.\r\nham\tK:)i will give my kvb acc details:)\r\nham\tOh all have to come ah?\r\nspam\tmoney!!! you r a lucky winner ! 2 claim your prize text money 2 88600 over £1million to give away ! ppt150x3+normal text rate box403 w1t1jy\r\nham\tI'm really sorry i won't b able 2 do this friday.hope u can find an alternative.hope yr term's going ok:-)\r\nham\tCongratulations ore mo owo re wa. Enjoy it and i wish you many happy moments to and fro wherever you go\r\nham\tSo do you have samus shoulders yet\r\nham\tWhat time you think you'll have it? Need to know when I should be near campus\r\nspam\tDear Matthew please call 09063440451 from a landline, your complimentary 4*Lux Tenerife holiday or £1000 CASH await collection. ppm150 SAE T&Cs Box334 SK38XH.\r\nham\tThen dun wear jeans lor...\r\nham\tSince when, which side, any fever, any vomitin.\r\nham\tK:)k.are you in college?\r\nspam\tUrgent! call 09061749602 from Landline. Your complimentary 4* Tenerife Holiday or £10,000 cash await collection SAE T&Cs BOX 528 HP20 1YF 150ppm 18+\r\nham\tBetter. Made up for Friday and stuffed myself like a pig yesterday. Now I feel bleh. But at least its not writhing pain kind of bleh.\r\nham\tNo we sell it all so we'll have tons if coins. Then sell our coins to someone thru paypal. Voila! Money back in life pockets:)\r\nham\tTheyre doing it to lots of places. Only hospitals and medical places are safe.\r\nspam\tHow about getting in touch with folks waiting for company? Just txt back your NAME and AGE to opt in! Enjoy the community (150p/SMS)\r\nham\tAnd also I've sorta blown him off a couple times recently so id rather not text him out of the blue looking for weed\r\nham\tI sent my scores to sophas and i had to do secondary application for a few schools. I think if you are thinking of applying, do a research on cost also. Contact joke ogunrinde, her school is one me the less expensive ones\r\nham\tI cant wait to see you! How were the photos were useful? :)\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1\r\nham\tHey i booked the kb on sat already... what other lessons are we going for ah? Keep your sat night free we need to meet and confirm our lodging \r\nham\tChk in ur belovd ms dict\r\nham\tIs that what time you want me to come?\r\nham\tAwesome, lemme know whenever you're around\r\nham\tShb b ok lor... Thanx...\r\nham\tBeautiful Truth against Gravity.. Read carefully: \"Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it..\" GOOD NIGHT\r\nham\tAlso remember to get dobby's bowl from your car\r\nspam\tFilthy stories and GIRLS waiting for your\r\nham\tSorry i now then c ur msg... Yar lor so poor thing... But only 4 one night... Tmr u'll have a brand new room 2 sleep in...\r\nham\tLove isn't a decision, it's a feeling. If we could decide who to love, then, life would be much simpler, but then less magical\r\nham\tWelp apparently he retired\r\nham\tMy sort code is and acc no is . The bank is natwest. Can you reply to confirm i've sent this to the right person!\r\nham\tWhere @\r\nham\tU sure u can't take any sick time?\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001808 from land line. Claim M95. Valid12hrs only\r\nham\tWatching cartoon, listening music & at eve had to go temple & church.. What about u?\r\nham\tYo chad which gymnastics class do you wanna take? The site says Christians class is full..\r\nham\tAre you this much buzy\r\nham\tOr better still can you catch her and let ask her if she can sell <#> for me.\r\nham\tI am not sure about night menu. . . I know only about noon menu\r\nham\tWhat do u want when i come back?.a beautiful necklace as a token of my heart for you.thats what i will give but ONLY to MY WIFE OF MY LIKING.BE THAT AND SEE..NO ONE can give you that.dont call me.i will wait till i come.\r\nham\tAre you willing to go for aptitude class.\r\nham\tIt wont b until 2.15 as trying 2 sort house out, is that ok?\r\nham\tYar lor he wan 2 go c horse racing today mah, so eat earlier lor. I ate chicken rice. U?\r\nham\tHaha awesome, omw back now then\r\nham\tYup i thk so until e shop closes lor.\r\nham\twhat is your account number?\r\nham\tEh u send wrongly lar...\r\nham\tHey no I ad a crap nite was borin without ya 2 boggy with me u boring biatch! Thanx but u wait til nxt time il ave ya \r\nham\tOk i shall talk to him\r\nham\tDont hesitate. You know this is the second time she has had weakness like that. So keep i notebook of what she eat and did the day before or if anything changed the day before so that we can be sure its nothing\r\nham\tHey you can pay. With salary de. Only <#> .\r\nham\tAnother month. I need chocolate weed and alcohol.\r\nham\tIf he started searching he will get job in few days.he have great potential and talent.\r\nham\tReckon need to be in town by eightish to walk from * carpark.\r\nspam\tCongrats! 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid your mates, play java games, Dload polyPH music, noline rentl.\r\nham\tLOOK AT THE FUCKIN TIME. WHAT THE FUCK YOU THINK IS UP\r\nham\tYo guess what I just dropped\r\nham\tCarlos says he'll be at mu in <#> minutes\r\nham\tI'm in office now . I will call you <#> min:)\r\nham\tGeeee ... I miss you already, you know ? Your all I can think about. Fuck, I can't wait till next year when we will be together ... *loving kiss*\r\nham\tYun ah.the ubi one say if ü wan call by tomorrow.call 67441233 look for irene.ere only got bus8,22,65,61,66,382. Ubi cres,ubi tech park.6ph for 1st 5wkg days.èn\r\nham\tUgh. Gotta drive back to sd from la. My butt is sore.\r\nham\t26th OF JULY\r\nham\tHi im having the most relaxing time ever! we have to get up at 7am every day! was the party good the other night? I get home tomorrow at 5ish.\r\nham\tUp to ü... Ü wan come then come lor... But i din c any stripes skirt...\r\nham\tThe Xmas story is peace.. The Xmas msg is love.. The Xmas miracle is jesus.. Hav a blessed month ahead & wish U Merry Xmas...\r\nham\tI can't, I don't have her number!\r\nham\tChange again... It's e one next to escalator...\r\nham\tYetunde i'm in class can you not run water on it to make it ok. Pls now.\r\nham\tNot a lot has happened here. Feels very quiet. Beth is at her aunts and charlie is working lots. Just me and helen in at the mo. How have you been? \r\nham\tThen ü wait 4 me at bus stop aft ur lect lar. If i dun c ü then i go get my car then come back n pick ü.\r\nham\tAight will do, thanks again for comin out\r\nham\tNo..but heard abt tat..\r\nspam\tPlease call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nham\tYes..he is really great..bhaji told kallis best cricketer after sachin in world:).very tough to get out.\r\nham\t<#> am I think? Should say on syllabus\r\nham\tUmma. Did she say anything\r\nham\tGive me a sec to think think about it\r\nspam\tPanasonic & BluetoothHdset FREE. Nokia FREE. Motorola FREE & DoubleMins & DoubleTxt on Orange contract. Call MobileUpd8 on 08000839402 or call 2optout\r\nham\tI don't quite know what to do. I still can't get hold of anyone. I cud pick you up bout 7.30pm and we can see if they're in the pub?\r\nham\tPoyyarikatur,kolathupalayam,unjalur post,erode dis, <#> .\r\nham\tDear Hero,i am leaving to qatar tonite for an apt opportunity.pls do keep in touch at <EMAIL> ,kerala\r\nham\tLol I would but my mom would have a fit and tell the whole family how crazy and terrible I am\r\nham\tI just got home babe, are you still awake ?\r\nham\tI dunno they close oredi not... Ü v ma fan...\r\nham\tJust buy a pizza. Meat lovers or supreme. U get to pick.\r\nham\tYa, told..she was asking wats matter?\r\nham\tDear,regret i cudnt pick call.drove down frm ctla now at cochin home.left mobile in car..ente style ishtamayoo?happy bakrid!\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+\r\nham\tShall i send that exe to your mail id.\r\nham\tNope watching tv at home... Not going out. V bored...\r\nham\tDon know..wait i will check it.\r\nham\tGood afternoon on this glorious anniversary day, my sweet J !! I hope this finds you happy and content, my Prey. I think of you and send a teasing kiss from across the sea coaxing images of fond souveniers ... You Cougar-Pen\r\nspam\tGuess what! Somebody you know secretly fancies you! Wanna find out who it is? Give us a call on 09065394514 From Landline DATEBox1282EssexCM61XN 150p/min 18\r\nham\tWe still on for tonight?\r\nham\tMay i call You later Pls\r\nham\tHasn't that been the pattern recently crap weekends?\r\nham\tI have a sore throat. It's scratches when I talk\r\nham\tYes da. Any plm at ur office\r\nham\tAre you not around or just still asleep? :V\r\nham\tLol you forgot it eh ? Yes, I'll bring it in babe\r\nham\tIts good, we'll find a way\r\nham\tCan not use foreign stamps in this country. Good lecture .\r\nham\tYup bathe liao...\r\nham\tHAPPY NEW YEAR MY NO.1 MAN\r\nham\tOH MR SHEFFIELD! You wanna play THAT game, okay. You're the boss and I'm the nanny. You give me a raise and I'll give YOU one!!\r\nham\tZOE IT JUST HIT ME 2 IM FUCKING SHITIN MYSELF IL DEFO TRY MY HARDEST 2 CUM 2MOROW LUV U MILLIONS LEKDOG\r\nham\tHello baby, did you get back to your mom's ? Are you setting up the computer now ? Filling your belly ? How goes it loverboy ? I miss you already ... *sighs*\r\nham\tNo my blankets are sufficient, thx\r\nham\tnaughty little thought: 'its better to flirt, flirt n flirt, rather than loving someone n gettin hurt, hurt n hurt...:-) Gud nyt\r\nham\tEdison has rightly said, \"A fool can ask more questions than a wise man can answer\" Now you know why all of us are speechless during ViVa.. GM,GN,GE,GNT:-)\r\nham\tThey just talking thats it de. They wont any other.\r\nham\tToday am going to college so am not able to atten the class.\r\nham\tI'm in class. Will holla later\r\nham\tEasy ah?sen got selected means its good..\r\nham\tMmm thats better now i got a roast down me! id b better if i had a few drinks down me 2! Good indian?\r\nspam\tWe know someone who you know that fancies you. Call 09058097218 to find out who. POBox 6, LS15HB 150p\r\nham\tCome round, it's . \r\nham\tDo 1 thing! Change that sentence into: \"Because i want 2 concentrate in my educational career im leaving here..\"\r\nspam\t1000's flirting NOW! Txt GIRL or BLOKE & ur NAME & AGE, eg GIRL ZOE 18 to 8007 to join and get chatting!\r\nham\tI walked an hour 2 c u! doesnt that show I care y wont u believe im serious?\r\nspam\t18 days to Euro2004 kickoff! U will be kept informed of all the latest news and results daily. Unsubscribe send GET EURO STOP to 83222.\r\nham\tAre you available for soiree on June 3rd?\r\nham\tDo u noe wat time e place dat sells 4d closes?\r\nham\tI got another job! The one at the hospital doing data analysis or something, starts on monday! Not sure when my thesis will got finished\r\nham\tJay's getting really impatient and belligerent\r\nham\tHIYA COMIN 2 BRISTOL 1 ST WEEK IN APRIL. LES GOT OFF + RUDI ON NEW YRS EVE BUT I WAS SNORING.THEY WERE DRUNK! U BAK AT COLLEGE YET? MY WORK SENDS INK 2 BATH.\r\nham\tI'm at work. Please call\r\nham\tThen u drive lor.\r\nham\tArd 515 like dat. Y?\r\nham\tTell me they're female :V how're you throwing in? We're deciding what all to get now\r\nspam\tEASTENDERS TV Quiz. What FLOWER does DOT compare herself to? D= VIOLET E= TULIP F= LILY txt D E or F to 84025 NOW 4 chance 2 WIN £100 Cash WKENT/150P16+\r\nham\tI'm working technical support :)voice process.networking field.\r\nham\tI might come to kerala for 2 days.so you can be prepared to take a leave once i finalise .dont plan any travel during my visit.need to finish urgent works.\r\nham\tOk. Not sure what time tho as not sure if can get to library before class. Will try. See you at some point! Have good eve.\r\nspam\tWe have new local dates in your area - Lots of new people registered in YOUR AREA. Reply DATE to start now! 18 only www.flirtparty.us REPLYS150\r\nham\tThat's fine, I'll bitch at you about it later then\r\nham\tNo my mum went 2 dentist.\r\nham\tOnce free call me sir. I am waiting for you.\r\nham\tMeeting u is my work. . . Tel me when shall i do my work tomorrow\r\nspam\tSomeone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058091854 NOW all will be revealed. PO BOX385 M6 6WU\r\nham\tJus finish bathing...\r\nham\talright, I'll make sure the car is back tonight\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050003091 from land line. Claim C52. Valid12hrs only\r\nspam\tDear U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688\r\nham\tLul im gettin some juicy gossip at the hospital. Two nurses are talking about how fat they are gettin. And one thinks shes obese. Oyea.\r\nham\tAight ill get on fb in a couple minutes\r\nham\tOi. Ami parchi na re. Kicchu kaaj korte iccha korche na. Phone ta tul na. Plz. Plz.\r\nham\tWhere can download clear movies. Dvd copies.\r\nham\tYep, by the pretty sculpture\r\nham\tConvey my regards to him\r\nham\tMe too watching surya movie only. . .after 6 pm vijay movie POKKIRI\r\nham\tYou tell what happen dont behave like this to me. Ok no need to say\r\nham\tCan u get pic msgs to your phone?\r\nham\tSend to someone else :-)\r\nham\tWat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!\r\nham\tFor me the love should start with attraction.i should feel that I need her every time around me.she should be the first thing which comes in my thoughts.I would start the day and end it with her.she should be there every time I dream.love will be then when my every breath has her name.my life should happen around her.my life will be named to her.I would cry for her.will give all my happiness and take all her sorrows.I will be ready to fight with anyone for her.I will be in love when I will be doing the craziest things for her.love will be when I don't have to proove anyone that my girl is the most beautiful lady on the whole planet.I will always be singing praises for her.love will be when I start up making chicken curry and end up makiing sambar.life will be the most beautiful then.will get every morning and thank god for the day because she is with me.I would like to say a lot..will tell later..\r\nham\tFR'NDSHIP is like a needle of a clock. Though V r in d same clock, V r nt able 2 met. Evn if V meet,itz only 4few seconds. Bt V alwys stay conected. Gud 9t;-)\r\nham\tI don't think he has spatula hands!\r\nham\tYou can never do NOTHING\r\nspam\tYou are awarded a SiPix Digital Camera! call 09061221061 from landline. Delivery within 28days. T Cs Box177. M221BP. 2yr warranty. 150ppm. 16 . p p£3.99\r\nham\tGoodmorning today i am late for <DECIMAL> min.\r\nspam\tWIN URGENT! Your mobile number has been awarded with a £2000 prize GUARANTEED call 09061790121 from land line. claim 3030 valid 12hrs only 150ppm \r\nham\tPlease da call me any mistake from my side sorry da. Pls da goto doctor.\r\nham\tWhere r we meeting?\r\nham\tWell the weather in cali's great. But its complexities are great. You need a car to move freely, its taxes are outrageous. But all in all its a great place. The sad part is i missing home.\r\nham\tNow only i reached home. . . I am very tired now. . I will come tomorro\r\nham\tRyder unsold.now gibbs.\r\nspam\tDear Subscriber ur draw 4 £100 gift voucher will b entered on receipt of a correct ans. When was Elvis Presleys Birthday? TXT answer to 80062\r\nham\tDon't fret. I'll buy the ovulation test strips and send them to you. You wont get them til like march. Can you send me your postal address.u'll be alright.Okay.\r\nham\tNO GIFTS!! You trying to get me to throw myself off a cliff or something?\r\nham\tBeen up to ne thing interesting. Did you have a good birthday? When are u wrking nxt? I started uni today.\r\nham\tYou busy or can I come by at some point and figure out what we're doing tomorrow\r\nham\tYeah go on then, bored and depressed sittin waitin for phone to ring... Hope the wind drops though, scary\r\nham\tBlack shirt n blue jeans... I thk i c ü...\r\nham\tAiyah sorry lor... I watch tv watch until i forgot 2 check my phone.\r\nspam\tMessage Important information for O2 user. Today is your lucky day! 2 find out why log onto http://www.urawinner.com there is a fantastic surprise awaiting you\r\nham\ton hen night. Going with a swing\r\nham\tGood afternoon, my love. How goes your day ? What are you up to ? I woke early and am online waiting for you ... Hmmm ... Italian boy is online I see . *grins*\r\nham\tFrom someone not to smoke when every time I've smoked in the last two weeks is because of you calling or texting me that you wanted to smoke\r\nham\tNo you'll just get a headache trying to figure it out. U can trust me to do the math. I promise. O:-)\r\nham\tS s..first time..dhoni rocks...\r\nham\tOk ill tell the company\r\nham\tAwesome, think we can get an 8th at usf some time tonight?\r\nham\tSo that means you still think of teju\r\nham\tNo I'm good for the movie, is it ok if I leave in an hourish?\r\nham\tNo no:)this is kallis home ground.amla home town is durban:)\r\nham\tSo lets make it saturday or monday as per convenience.\r\nham\tHey... What time is your driving on fri? We go for evaluation on fri?\r\nspam\t449050000301 You have won a £2,000 price! To claim, call 09050000301.\r\nham\tI'm going 4 lunch now wif my family then aft dat i go str 2 orchard lor.\r\nspam\tBored of speed dating? Try SPEEDCHAT, txt SPEEDCHAT to 80155, if you don't like em txt SWAP and get a new chatter! Chat80155 POBox36504W45WQ 150p/msg rcd 16\r\nham\tCancel cheyyamo?and get some money back?\r\nspam\tDo you want 750 anytime any network mins 150 text and a NEW video phone for only five pounds per week call 08000776320 now or reply for delivery Tomorrow\r\nham\tOk.ok ok..then..whats ur todays plan\r\nham\tGood morning princess! How are you?\r\nham\tAiyar sorry lor forgot 2 tell u...\r\nspam\tFor taking part in our mobile survey yesterday! You can now have 500 texts 2 use however you wish. 2 get txts just send TXT to 80160 T&C www.txt43.com 1.50p\r\nham\tNot tonight mate. Catching up on some sleep. This is my new number by the way. \r\nham\tHeight of \"Oh shit....!!\" situation: A guy throws a luv letter on a gal but falls on her brothers head whos a gay,.;-):-D\r\nspam\tUr HMV Quiz cash-balance is currently £500 - to maximize ur cash-in now send HMV1 to 86688 only 150p/msg\r\nham\tSo check your errors and if you had difficulties, do correction.\r\nham\tHowz pain?hope u r fine..\r\nham\tSorry, I'll call later\r\nham\tGood morning princess! How are you?\r\nham\tAs I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.\r\nham\tU wake up already? Thanx 4 e tau sar piah it's quite nice.\r\nham\tK do I need a login or anything\r\nspam\tDont forget you can place as many FREE Requests with 1stchoice.co.uk as you wish. For more Information call 08707808226.\r\nham\tLOL ... No just was busy\r\nham\tWhat * u wearing?\r\nham\tMessage:some text missing* Sender:Name Missing* *Number Missing *Sent:Date missing *Missing U a lot thats y everything is missing sent via fullonsms.com\r\nham\tOh:)as usual vijay film or its different?\r\nspam\tI don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.\r\nham\tHave you had a good day? Mine was really busy are you up to much tomorrow night?\r\nham\tAnd is there a way you can send shade's stuff to her. And she has been wonderful too.\r\nham\tReally... I tot ur paper ended long ago... But wat u copied jus now got use? U happy lar... I still haf 2 study :-(\r\nspam\tThank you, winner notified by sms. Good Luck! No future marketing reply STOP to 84122 customer services 08450542832\r\nham\tBabe ? I lost you ... :-(\r\nham\tOk... Help me ask if she's working tmr a not?\r\nham\tI'm not driving... Raining! Then i'll get caught at e mrt station lor.\r\nham\tNot a drop in the tank\r\nham\t(That said can you text him one more time?)\r\nham\tSorry, I'll call later\r\nham\tOk i go change also...\r\nspam\t1000's of girls many local 2 u who r virgins 2 this & r ready 2 4fil ur every sexual need. Can u 4fil theirs? text CUTE to 69911(£1.50p. m)\r\nham\tDid u find a sitter for kaitlyn? I was sick and slept all day yesterday.\r\nham\tSorry man, accidentally left my phone on silent last night and didn't check it til I got up\r\nham\tHey.. Something came up last min.. Think i wun be signing up tmr.. Hee\r\nham\tHe's an adult and would learn from the experience. There's no real danger. I just dont like peeps using drugs they dont need. But no comment\r\nham\tHey! There's veggie pizza... :/\r\nham\tYun buying... But school got offer 2000 plus only...\r\nham\tYou sure your neighbors didnt pick it up\r\nham\tK. I will sent it again\r\nspam\tFree entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's\r\nham\tNew Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!\r\nham\tWell. Im computerless. Time to make some oreo truffles\r\nham\tHaha yeah I see that now, be there in a sec\r\nham\tI am not having her number sir\r\nham\tLol now I'm after that hot air balloon!\r\nham\tOk . . now i am in bus. . If i come soon i will come otherwise tomorrow\r\nham\tMsgs r not time pass.They silently say that I am thinking of U right now and also making U think of me at least 4 a moment. Gd nt.swt drms @Shesil\r\nham\tYeah, we can probably swing by once my roommate finishes up with his girl\r\nspam\tGot what it takes 2 take part in the WRC Rally in Oz? U can with Lucozade Energy! Text RALLY LE to 61200 (25p), see packs or lucozade.co.uk/wrc & itcould be u!\r\nham\tHappy new years melody!\r\nham\tÜ dun need to pick ur gf?\r\nham\tYay! You better not have told that to 5 other girls either.\r\nham\tHorrible u eat macs eat until u forgot abt me already rite... U take so long 2 reply. I thk it's more toot than b4 so b prepared. Now wat shall i eat?\r\nham\tDid he say how fantastic I am by any chance, or anything need a bigger life lift as losing the will 2 live, do you think I would be the first person 2 die from N V Q? \r\nham\tJust nw i came to hme da..\r\nham\tI'm outside islands, head towards hard rock and you'll run into me\r\nham\tTo day class is there are no class.\r\nham\tI'm in chennai velachery:)\r\nham\tYou flippin your shit yet?\r\nham\tK give me a sec, breaking a <#> at cstore\r\nham\tAm i that much bad to avoid like this?\r\nham\tYo, you around? Just got my car back\r\nham\tAnnoying isn't it.\r\nham\tGoodmorning, Today i am late for <#> min.\r\nham\tThere's no point hangin on to mr not right if he's not makin u happy\r\nham\tAll will come alive.better correct any good looking figure there itself..\r\nham\tIn that case I guess I'll see you at campus lodge\r\nham\tWe're done...\r\nham\tCome to my home for one last time i wont do anything. Trust me.\r\nham\tI was up all night too worrying about this appt. It's a shame we missed a girls night out with quizzes popcorn and you doing my hair.\r\nspam\tSex up ur mobile with a FREE sexy pic of Jordan! Just text BABE to 88600. Then every wk get a sexy celeb! PocketBabe.co.uk 4 more pics. 16 £3/wk 087016248\r\nham\tOk... C ya...\r\nspam\tYou have 1 new voicemail. Please call 08719181503\r\nham\tWhat he said is not the matter. My mind saying some other matter is there.\r\nham\tHe also knows about lunch menu only da. . I know\r\nham\tAl he does is moan at me if n e thin goes wrong its my fault&al de arguments r my fault&fed up of him of himso y bother? Hav 2go, thanx.xx\r\nham\tNEFT Transaction with reference number <#> for Rs. <DECIMAL> has been credited to the beneficiary account on <#> at <TIME> : <#>\r\nham\tOtherwise had part time job na-tuition..\r\nham\tI know she called me\r\nham\tMe also da, i feel yesterday night wait til 2day night dear.\r\nham\tThanks for understanding. I've been trying to tell sura that.\r\nspam\tWIN a year supply of CDs 4 a store of ur choice worth £500 & enter our £100 Weekly draw txt MUSIC to 87066 Ts&Cs www.Ldew.com.subs16+1win150ppmx3\r\nham\tThe whole car appreciated the last two! Dad and are having a map reading semi argument but apart from that things are going ok. P.\r\nspam\tAs a SIM subscriber, you are selected to receive a Bonus! Get it delivered to your door, Txt the word OK to No: 88600 to claim. 150p/msg, EXP. 30Apr\r\nham\tI need you to be in my strong arms...\r\nham\tAlso maaaan are you missing out\r\nham\tHis bday real is in april .\r\nham\tGuessin you ain't gonna be here before 9?\r\nham\tOk then i will come to ur home after half an hour\r\nham\tYo, the game almost over? Want to go to walmart soon\r\nham\tYeah, probably but not sure. Ilol let u know, but personally I wuldnt bother, then again if ur goin to then I mite as well!!\r\nham\tI'll text now! All creepy like so he won't think that we forgot\r\nham\tthat would be good … I'll phone you tomo lunchtime, shall I, to organise something?\r\nspam\tYou have 1 new voicemail. Please call 08719181513.\r\nham\tDamn, can you make it tonight or do you want to just wait til tomorrow\r\nham\tK..k..i'm also fine:)when will you complete the course?\r\nham\tTrue. It is passable. And if you get a high score and apply for phd, you get 5years of salary. So it makes life easier.\r\nspam\tNo. 1 Nokia Tone 4 ur mob every week! Just txt NOK to 87021. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info\r\nham\tPrakesh is there know.\r\nham\tTeach me apps da. When you come to college.\r\nham\tRofl betta invest in some anti aging products\r\nspam\tYou are a winner U have been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810810\r\nham\tsir, you will receive the account no another 1hr time. Sorry for the delay.\r\nspam\tReply with your name and address and YOU WILL RECEIVE BY POST a weeks completely free accommodation at various global locations www.phb1.com ph:08700435505150p\r\nham\tSo ü'll be submitting da project tmr rite?\r\nspam\tFREE entry into our £250 weekly comp just send the word ENTER to 84128 NOW. 18 T&C www.textcomp.com cust care 08712405020.\r\nham\tJus ans me lar. U'll noe later.\r\nham\tI want to send something that can sell fast. <#> k is not easy money.\r\nham\thave got * few things to do. may be in * pub later.\r\nham\t1's finish meeting call me.\r\nham\tLol ok. I'll snatch her purse too.\r\nham\t\"Hello-/@drivby-:0quit edrunk sorry iff pthis makes no senrd-dnot no how ^ dancce 2 drum n basq!ihave fun 2nhite x ros xxxxxxx\"\r\nham\tYour opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..\r\nham\tHow much are we getting?\r\nham\tIs ur paper in e morn or aft tmr?\r\nham\tDear relieved of westonzoyland, all going to plan this end too!\r\nham\tHope you are having a great new semester. Do wish you the very best. You are made for greatness.\r\nham\tOh yes I can speak txt 2 u no! Hmm. Did u get email?\r\nham\tI want to show you the world, princess :) how about europe?\r\nham\tNobody can decide where to eat and dad wants Chinese\r\nham\tNo shoot me. I'm in the docs waiting room. :/\r\nham\tNow? I'm going out 4 dinner soon..\r\nham\tHello which the site to download songs its urgent pls\r\nham\tI do know what u mean, is the king of not havin credit! I'm goin2bed now. Night night sweet! Only1more sleep! \r\nham\tHorrible gal. Me in sch doing some stuff. How come u got mc?\r\nham\tHI HUN! IM NOT COMIN 2NITE-TELL EVERY1 IM SORRY 4 ME, HOPE U AVA GOODTIME!OLI RANG MELNITE IFINK IT MITE B SORTED,BUT IL EXPLAIN EVERYTHIN ON MON.L8RS.x\r\nham\tI call you later, don't have network. If urgnt, sms me.\r\nham\tUmmmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear\r\nspam\tPlease CALL 08712402779 immediately as there is an urgent message waiting for you\r\nham\tYeah like if it goes like it did with my friends imma flip my shit in like half an hour\r\nham\tMum say we wan to go then go... Then she can shun bian watch da glass exhibition... \r\nham\tWhat your plan for pongal?\r\nham\tJust wait till end of march when el nino gets himself. Oh.\r\nham\tNot yet chikku..going to room nw, i'm in bus..\r\nham\tAm also doing in cbe only. But have to pay.\r\nham\tHoney boo I'm missing u.\r\nham\tWe have sent JD for Customer Service cum Accounts Executive to ur mail id, For details contact us\r\nham\tYo, I'm at my parents' gettin cash. Good news: we picked up a downstem\r\nham\tThank you so much. When we skyped wit kz and sura, we didnt get the pleasure of your company. Hope you are good. We've given you ultimatum oh! We are countin down to aburo. Enjoy!\r\nspam\tHungry gay guys feeling hungry and up 4 it, now. Call 08718730555 just 10p/min. To stop texts call 08712460324 (10p/min)\r\nham\tOk. No wahala. Just remember that a friend in need ...\r\nham\tI will see in half an hour\r\nham\tIm in inperialmusic listening2the weirdest track ever byleafcutter john-sounds like insects being molested&someone plumbing,remixed by evil men on acid!\r\nham\t\"Hey sorry I didntgive ya a a bellearlier hunny,just been in bedbut mite go 2 thepub l8tr if uwana mt up?loads a luv Jenxxx.\"\r\nham\tSERIOUSLY. TELL HER THOSE EXACT WORDS RIGHT NOW.\r\nspam\tCan U get 2 phone NOW? I wanna chat 2 set up meet Call me NOW on 09096102316 U can cum here 2moro Luv JANE xx Calls£1/minmoremobsEMSPOBox45PO139WA\r\nham\tTee hee. Off to lecture, cheery bye bye.\r\nham\tSorry chikku, my cell got some problem thts y i was nt able to reply u or msg u..\r\nham\tIf you still havent collected the dough pls let me know so i can go to the place i sent it to get the control number\r\nham\tOk...\r\nspam\tnetwork operator. The service is free. For T & C's visit 80488.biz\r\nham\tLet me know how to contact you. I've you settled in a room. Lets know you are ok.\r\nham\tWot u up 2 u weirdo?\r\nham\tCan do lor...\r\nham\tDont put your phone on silent mode ok\r\nham\tCan i meet ü at 5.. As 4 where depends on where ü wan 2 in lor..\r\nham\tWaiting 4 my tv show 2 start lor... U leh still busy doing ur report?\r\nham\tOh ho. Is this the first time u use these type of words\r\nham\tAm I the only one who doesn't stalk profiles?\r\nham\tEver green quote ever told by Jerry in cartoon \"A Person Who Irritates u Always Is the one Who Loves u Vry Much But Fails to Express It...!..!! :-) :-) gud nyt\r\nham\tYes i thought so. Thanks.\r\nham\tBut if she.s drinkin i'm ok.\r\nham\tJust wondering, the others just took off\r\nham\tNight has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng\r\nham\tWhat do you do, my dog ? Must I always wait till the end of your day to have word from you ? Did you run out of time on your cell already?\r\nham\tHappy new year to u too!\r\nham\tHey...Great deal...Farm tour 9am to 5pm $95/pax, $50 deposit by 16 May\r\nham\tEat jap done oso aft ur lect wat... Ü got lect at 12 rite... \r\nham\tHey babe! I saw you came online for a second and then you disappeared, what happened ?\r\nham\tDa my birthdate in certificate is in april but real date is today. But dont publish it. I shall give you a special treat if you keep the secret. Any way thanks for the wishes\r\nham\tHappy birthday... May all ur dreams come true...\r\nham\tAiyah u did ok already lar. E nydc at wheellock?\r\nham\tTELL HER I SAID EAT SHIT.\r\nham\tSure! I am driving but will reach my destination soon.\r\nham\tK so am I, how much for an 8th? Fifty?\r\nham\tYour daily text from me – a favour this time\r\nham\tGreat to hear you are settling well. So what's happenin wit ola?\r\nham\tThose cocksuckers. If it makes you feel better ipads are worthless garbage novelty items and you should feel bad for even wanting one\r\nham\tI tot u reach liao. He said t-shirt.\r\nham\tFRAN I DECIDED 2 GO N E WAY IM COMPLETELY BROKE AN KNACKERED I GOT UP BOUT 3 C U 2MRW LOVE JANX P.S THIS IS MY DADS FONE, -NO CREDIT\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tYour right! I'll make the appointment right now.\r\nham\tDesignation is software developer and may be she get chennai:)\r\nspam\tEnjoy the jamster videosound gold club with your credits for 2 new videosounds+2 logos+musicnews! get more fun from jamster.co.uk! 16+only Help? call: 09701213186 \r\nspam\tGet 3 Lions England tone, reply lionm 4 mono or lionp 4 poly. 4 more go 2 www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply\r\nham\tI jokin oni lar.. Ü busy then i wun disturb ü.\r\nham\tOk, be careful ! Don't text and drive !\r\nham\tI.ll always be there, even if its just in spirit. I.ll get a bb soon. Just trying to be sure i need it.\r\nham\tU r too much close to my heart. If u go away i will be shattered. Plz stay with me.\r\nham\tI love u 2 babe! R u sure everything is alrite. Is he being an idiot? Txt bak girlie\r\nham\tHow abt making some of the pics bigger?\r\nham\tGot but got 2 colours lor. One colour is quite light n e other is darker lor. Actually i'm done she's styling my hair now.\r\nham\tWhenevr ur sad, Whenevr ur gray, Remembr im here 2 listn 2 watevr u wanna say, Jus walk wid me a little while,& I promise I'll bring back ur smile.:-)\r\nham\tWhy nothing. Ok anyway give me treat\r\nspam\tWin the newest “Harry Potter and the Order of the Phoenix (Book 5) reply HARRY, answer 5 questions - chance to be the first among readers!\r\nham\tOk...\r\nham\tCorrect. So how was work today\r\nham\tJust sent again. Do you scream and moan in bed, princess?\r\nham\tI wake up long ago already... Dunno, what other thing?\r\nham\tOh just getting even with u.... u?\r\nham\tI thk 50 shd be ok he said plus minus 10.. Did ü leave a line in between paragraphs?\r\nham\tCan you call me plz. Your number shows out of coveragd area. I have urgnt call in vasai & have to reach before 4'o clock so call me plz\r\nham\tYeah jay's sort of a fucking retard\r\nham\tSorry, was in the bathroom, sup\r\nspam\tUr balance is now £500. Ur next question is: Who sang 'Uptown Girl' in the 80's ? 2 answer txt ur ANSWER to 83600. Good luck!\r\nham\tMy exam is for february 4. Wish you a great day.\r\nham\tI dont know what to do to come out of this so only am ask questions like this dont mistake me.\r\nham\tAight no rush, I'll ask jay\r\nham\tGood Morning plz call me sir\r\nham\tIt's ok lar. U sleep early too... Nite...\r\nham\tOh... Icic... K lor, den meet other day...\r\nham\tOh ! A half hour is much longer in Syria than Canada, eh ? Wow you must get SO much more work done in a day than us with all that extra time ! *grins*\r\nham\tSometimes we put walls around our hearts,not just to be safe from getting hurt.. But to find out who cares enough to break the walls & get closer.. GOODNOON:)\r\nham\tSweet, we may or may not go to 4U to meet carlos so gauge patty's interest in that\r\nham\tThen she buying today? Ü no need to c meh...\r\nham\tAight sorry I take ten years to shower. What's the plan?\r\nham\tEvery monday..nxt week vl be completing..\r\nham\tMight ax well im there.\r\nham\tJust chill for another 6hrs. If you could sleep the pain is not a surgical emergency so see how it unfolds. Okay\r\nham\tYeah I'll try to scrounge something up\r\nham\tCrazy ar he's married. Ü like gd looking guys not me. My frens like say he's korean leona's fave but i dun thk he is. Aft some thinking mayb most prob i'll go.\r\nham\tWere somewhere on Fredericksburg\r\nham\tQue pases un buen tiempo or something like that\r\nham\tIs it ok if I stay the night here? Xavier has a sleeping bag and I'm getting tired\r\nham\tShe doesnt need any test.\r\nham\tNothing much, chillin at home. Any super bowl plan?\r\nspam\tFREE2DAY sexy St George's Day pic of Jordan!Txt PIC to 89080 dont miss out, then every wk a saucy celeb!4 more pics c PocketBabe.co.uk 0870241182716 £3/wk\r\nham\tBugis oso near wat... \r\nham\tYo theres no class tmrw right?\r\nham\tLet Ur Heart Be Ur Compass Ur Mind Ur Map Ur Soul Ur Guide And U Will Never loose in world....gnun - Sent via WAY2SMS.COM\r\nham\tGoodnight, sleep well da please take care pa. Please.\r\nham\tBaaaaabe! I misss youuuuu ! Where are you ? I have to go and teach my class at 5 ...\r\nham\tConvey my regards to him\r\nham\tU ned to convince him tht its not possible witot hurting his feeling its the main\r\nham\tGood afternoon loverboy ! How goes you day ? Any luck come your way? I think of you, sweetie and send my love across the sea to make you smile and happy\r\nham\tIf i start sending blackberry torch to nigeria will you find buyer for me?like 4a month. And tell dad not to buy bb from anyone oh.\r\nham\t<#> %of pple marry with their lovers... becz they hav gud undrstndng dat avoids problems. i sent dis 2 u, u wil get gud news on friday by d person you like. And tomorrow will be the best day of your life. Dont break this chain. If you break you will suffer. send this to <#> frnds in <#> mins whn u read...\r\nham\tYo dude guess who just got arrested the other day\r\nham\tShuhui say change 2 suntec steamboat? U noe where? Where r u now?\r\nham\tWhat does the dance river do?\r\nham\tYetunde, i'm sorry but moji and i seem too busy to be able to go shopping. Can you just please find some other way to get what you wanted us to get. Please forgive me. You can reply free via yahoo messenger.\r\nham\tHey i will be really pretty late... You want to go for the lesson first? I will join you. I'm only reaching tp mrt\r\nspam\tHOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870..k\r\nham\tBbq this sat at mine from 6ish. Ur welcome 2 come\r\nham\tI don't know, same thing that's wrong everyso often, he panicks starts goin on bout not bein good enough …\r\nham\tAlright. I'm out--have a good night!\r\nham\tDid you try making another butt.\r\nham\tHope you are feeling great. Pls fill me in. Abiola\r\nham\tI though we shd go out n have some fun so bar in town or something – sound ok?\r\nham\t1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)\r\nspam\tBears Pic Nick, and Tom, Pete and ... Dick. In fact, all types try gay chat with photo upload call 08718730666 (10p/min). 2 stop texts call 08712460324\r\nspam\t500 New Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today!From ONLY £1 www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18 TXTAUCTION\r\nham\tWe're finally ready fyi\r\nham\tAuntie huai juan never pick up her phone\r\nspam\tDouble Mins & Double Txt & 1/2 price Linerental on Latest Orange Bluetooth mobiles. Call MobileUpd8 for the very latest offers. 08000839402 or call2optout/LF56\r\nham\tYa tel, wats ur problem..\r\nspam\tNo. 1 Nokia Tone 4 ur mob every week! Just txt NOK to 87021. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info\r\nham\ti dnt wnt to tlk wid u\r\nham\tWe spend our days waiting for the ideal path to appear in front of us.. But what we forget is.. \"paths are made by walking.. not by waiting..\" Goodnight!\r\nham\tIts ok my arm is feeling weak cuz i got a shot so we can go another time\r\nham\tPlease reserve ticket on saturday eve from chennai to thirunelvali and again from tirunelvali to chennai on sunday eve...i already see in net..no ticket available..i want to book ticket through tackle ..\r\nham\tStorming msg: Wen u lift d phne, u say \"HELLO\" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? \"Margaret Hello\" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@\r\nham\tThat's ok. I popped in to ask bout something and she said you'd been in. Are you around tonght wen this girl comes?\r\nham\tAll e best 4 ur exam later.\r\nham\tHope ur head doesn't hurt 2 much ! Am ploughing my way through a pile of ironing ! Staying in with a chinky tonight come round if you like.\r\nham\tOh k.i think most of wi and nz players unsold.\r\nham\tHaha... Where got so fast lose weight, thk muz go 4 a month den got effect... Gee,later we go aust put bk e weight.\r\nham\tI wonder how you got online, my love ? Had you gone to the net cafe ? Did you get your phone recharged ? Were you on a friends net ? I think of you, boytoy\r\nham\tHaha just kidding, papa needs drugs\r\nham\tThk shld b can... Ya, i wana go 4 lessons... Haha, can go for one whole stretch...\r\nham\tOh ok..\r\nham\tR we still meeting 4 dinner tonight?\r\nham\tThats cool! I am a gentleman and will treat you with dignity and respect.\r\nham\tShall i start from hear.\r\nham\tThen we wait 4 u lor... No need 2 feel bad lar...\r\nham\tNo did you check? I got his detailed message now\r\nham\tYou have registered Sinco as Payee. Log in at icicibank.com and enter URN <#> to confirm. Beware of frauds. Do NOT share or disclose URN to anyone.\r\nham\tNo, I decided that only people who care about stuff vote and caring about stuff is for losers\r\nham\tKaiez... Enjoy ur tuition... Gee... Thk e second option sounds beta... I'll go yan jiu den msg u...\r\nham\tYou have registered Sinco as Payee. Log in at icicibank.com and enter URN <#> to confirm. Beware of frauds. Do NOT share or disclose URN to anyone.\r\nham\tcool. We will have fun practicing making babies!\r\nham\tActually getting ready to leave the house.\r\nham\tK..k..any special today?\r\nspam\tURGENT, IMPORTANT INFORMATION FOR O2 USER. TODAY IS YOUR LUCKY DAY! 2 FIND OUT WHY LOG ONTO HTTP://WWW.URAWINNER.COM THERE IS A FANTASTIC SURPRISE AWAITING FOR YOU\r\nham\tThen we gotta do it after that\r\nham\tI've got ten bucks, jay is being noncomittal\r\nham\tWhere at were hungry too\r\nham\tPls speak to that customer machan.\r\nham\tsomewhere out there beneath the pale moon light someone think in of u some where out there where dreams come true... goodnite & sweet dreams\r\nham\tWen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.\r\nspam\tDear U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18 yrs\r\nham\tSo wats ur opinion abt him and how abt is character?\r\nham\tJay is snickering and tells me that x is totally fucking up the chords as we speak\r\nham\tNo..few hours before.went to hair cut .\r\nham\tNo wonder... Cos i dun rem seeing a silver car... But i thk i saw a black one...\r\nham\tLmao. Take a pic and send it to me.\r\nham\t\"Speak only when you feel your words are better than the silence...\" Gud mrng:-)\r\nham\tNo. She's currently in scotland for that.\r\nham\tDo you work all this week ?\r\nspam\tCongratulations ur awarded either £500 of CD gift vouchers & Free entry 2 our £100 weekly draw txt MUSIC to 87066 TnCs www.Ldew.com 1 win150ppmx3age16\r\nham\tLol great now im getting hungry.\r\nham\tYes.. now only saw your message..\r\nham\tI'll be at mu in like <#> seconds\r\nham\tOk...\r\nham\tTHING R GOOD THANX GOT EXAMS IN MARCH IVE DONE NO REVISION? IS FRAN STILL WITH BOYF? IVE GOTTA INTERVIW 4 EXETER BIT WORRIED!x\r\nham\tTell you what, if you make a little spreadsheet and track whose idea it was to smoke to determine who \"smokes too much\" for the entire month of february, I'll come up\r\nspam\tFor sale - arsenal dartboard. Good condition but no doubles or trebles!\r\nham\tDon't look back at the building because you have no coat and i don't want you to get more sick. Just hurry home and wear a coat to the gym!!!\r\nham\tMy painful personal thought- \"I always try to keep everybody happy all the time. But nobody recognises me when i am alone\"\r\nham\tThanks for ve lovely wisheds. You rock\r\nham\tYou intrepid duo you! Have a great time and see you both soon. \r\nham\tI asked sen to come chennai and search for job.\r\nham\tDad went out oredi... \r\nham\tI jus hope its true that missin me cos i'm really missin him! You haven't done anything to feel guilty about, yet.\r\nham\tWat so late still early mah. Or we juz go 4 dinner lor. Aiya i dunno...\r\nham\tArms fine, how's Cardiff and uni? \r\nham\tIn fact when do you leave? I think addie goes back to school tues or wed\r\nham\tCool breeze... Bright sun... Fresh flower... Twittering birds... All these waiting to wish u: \"GOODMORNING & HAVE A NICE DAY\" :)\r\nham\tYa:)going for restaurant..\r\nham\tIts ok., i just askd did u knw tht no?\r\nspam\tFree 1st week entry 2 TEXTPOD 4 a chance 2 win 40GB iPod or £250 cash every wk. Txt POD to 84128 Ts&Cs www.textpod.net custcare 08712405020.\r\nham\tThose ducking chinchillas\r\nham\tI am in a marriage function\r\nham\tLooks like u wil b getting a headstart im leaving here bout 2.30ish but if u r desperate for my company I could head in earlier-we were goin to meet in rummer.\r\nham\tDon‘t give a flying monkeys wot they think and I certainly don‘t mind. Any friend of mine and all that!\r\nspam\tAs a registered optin subscriber ur draw 4 £100 gift voucher will be entered on receipt of a correct ans to 80062 Whats No1 in the BBC charts\r\nham\tsay thanks2. \r\nham\tMsg me when rajini comes.\r\nham\tYa! when are ü taking ure practical lessons? I start in june.. \r\nham\tThat's good, because I need drugs\r\nham\tStupid.its not possible\r\nham\tCan ü all decide faster cos my sis going home liao..\r\nspam\tSummers finally here! Fancy a chat or flirt with sexy singles in yr area? To get MATCHED up just reply SUMMER now. Free 2 Join. OptOut txt STOP Help08714742804\r\nham\tU sleeping now.. Or you going to take? Haha.. I got spys wat.. Me online checking n replying mails lor..\r\nspam\tCLAIRE here am havin borin time & am now alone U wanna cum over 2nite? Chat now 09099725823 hope 2 C U Luv CLAIRE xx Calls£1/minmoremobsEMSPOBox45PO139WA\r\nham\tFighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.\r\nspam\tBought one ringtone and now getting texts costing 3 pound offering more tones etc\r\nham\tYalru lyfu astne chikku.. Bt innu mundhe lyf ali halla ke bilo (marriage)program edhae, so lyf is nt yet ovr chikku..ali vargu lyfu meow meow:-D\r\nham\tKinda. First one gets in at twelve! Aah. Speak tomo\r\nspam\t09066362231 URGENT! Your mobile No 07xxxxxxxxx won a £2,000 bonus caller prize on 02/06/03! this is the 2nd attempt to reach YOU! call 09066362231 ASAP!\r\nham\tOk good then i later come find ü... C lucky i told ü to go earlier... Later pple take finish ü no more again...\r\nham\tWat makes u thk i'll fall down. But actually i thk i'm quite prone 2 falls. Lucky my dad at home i ask him come n fetch me already.\r\nspam\tYOU 07801543489 are guaranteed the latests Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word:COLLECT to No:83355! TC-LLC NY-USA 150p/Mt msgrcvd18+\r\nham\tYour account has been refilled successfully by INR <DECIMAL> . Your KeralaCircle prepaid account balance is Rs <DECIMAL> . Your Transaction ID is KR <#> .\r\nham\tI wont touch you with out your permission.\r\nspam\tHi its LUCY Hubby at meetins all day Fri & I will B alone at hotel U fancy cumin over? Pls leave msg 2day 09099726395 Lucy x Calls£1/minMobsmoreLKPOBOX177HP51FL\r\nham\t7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st \"Ur Lovely Friendship\"... good morning dear\r\nham\tTake some small dose tablet for fever\r\nham\tOh. U must have taken your REAL Valentine out shopping first.\r\nham\tJust sent you an email – to an address with incomm in it, is that right?\r\nham\tWill do, you gonna be at blake's all night? I might be able to get out of here a little early\r\nham\tFriendship is not a game to play, It is not a word to say, It doesn\\'t start on March and ends on May, It is tomorrow, yesterday, today and e\r\nham\tNice. Wait...should you be texting right now? I'm not gonna pay your ticket, ya know!\r\nham\tI'm watching lotr w my sis dis aft. So u wan 2 meet me 4 dinner at nite a not?\r\nham\tWhy you keeping me away like this\r\nham\tI think its far more than that but find out. Check google maps for a place from your dorm.\r\nham\tMy trip was ok but quite tiring lor. Uni starts today but it's ok 4 me cos i'm not taking any modules but jus concentrating on my final yr project.\r\nham\tHave you always been saying welp?\r\nham\tI'm a guy, browsin is compulsory\r\nham\tOk...\r\nham\tPurity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng musthu\r\nham\tSorry, I'll call later\r\nham\t(I should add that I don't really care and if you can't I can at least get this dude to fuck off but hey, your money if you want it)\r\nham\tHello lover! How goes that new job? Are you there now? Are you happy? Do you think of me? I wake, my slave and send you a teasing kiss from across the sea\r\nham\tI told your number to gautham..\r\nham\tTell them no need to investigate about me anywhere.\r\nham\tOk i juz receive..\r\nham\tCant believe i said so many things to you this morning when all i really wanted to say was good morning, i love you! Have a beautiful morning. See you in the library later.\r\nspam\tYour account has been credited with 500 FREE Text Messages. To activate, just txt the word: CREDIT to No: 80488 T&Cs www.80488.biz\r\nham\tIn the end she might still vomit but its okay. Not everything will come out.\r\nham\tHow are you with moneY...as in to you...money aint a thing....how are you sha!\r\nham\tIt has everything to do with the weather. Keep extra warm. Its a cold but nothing serious. Pls lots of vitamin c\r\nham\tHey gals.. Anyone of u going down to e driving centre tmr?\r\nham\tI'm always on yahoo messenger now. Just send the message to me and i.ll get it you may have to send it in the mobile mode sha but i.ll get it. And will reply.\r\nham\tI'm putting it on now. It should be ready for <TIME> \r\nham\tTime n Smile r the two crucial things in our life. Sometimes time makes us to forget smile, and sometimes someone's smile makes us to forget time gud noon\r\nspam\tSMS. ac JSco: Energy is high, but u may not know where 2channel it. 2day ur leadership skills r strong. Psychic? Reply ANS w/question. End? Reply END JSCO\r\nham\tHost-based IDPS for linux systems.\r\nspam\tHOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call\r\nham\tDon no da:)whats you plan?\r\nham\tIll be there on <#> ok.\r\nham\tOh my God. I'm almost home\r\nham\tTotal video converter free download type this in google search:)\r\nspam\tThanks for the Vote. Now sing along with the stars with Karaoke on your mobile. For a FREE link just reply with SING now.\r\nham\tWen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.\r\nham\tSounds like something that someone testing me would sayy\r\nham\tWhen u love someone Dont make them to love u as much as u do. But Love them so much that they dont want to be loved by anyone except you... Gud nit.\r\nham\tPete,is this your phone still? Its Jenny from college and Leanne.what are you up to now?:)\r\nham\tOops sorry. Just to check that you don't mind picking me up tomo at half eight from station. Would that be ok?\r\nham\tHey sweet, I was wondering when you had a moment if you might come to me ? I want to send a file to someone but it won't go over yahoo for them because their connection sucks, remember when you set up that page for me to go to and download the format disc ? Could you tell me how to do that ? Or do you know some other way to download big files ? Because they can download stuff directly from the internet. Any help would be great, my prey ... *teasing kiss*\r\nham\tHows the champ just leaving glasgow!\r\nham\tK:)all the best:)congrats...\r\nham\tI wonder if you'll get this text?\r\nham\tI need to come home and give you some good lovin...\r\nspam\tOur brand new mobile music service is now live. The free music player will arrive shortly. Just install on your phone to browse content from the top artists.\r\nham\tShall i ask one thing if you dont mistake me.\r\nham\tCheck wid corect speling i.e. Sarcasm\r\nspam\tURGENT! Your Mobile No was awarded a £2,000 Bonus Caller Prize on 1/08/03! This is our 2nd attempt to contact YOU! Call 0871-4719-523 BOX95QU BT National Rate\r\nham\tAre you angry with me. What happen dear\r\nham\tI thk u dun haf 2 hint in e forum already lor... Cos i told ron n darren is going 2 tell shuhui.\r\nham\tYup ok thanx...\r\nham\tHi:)cts employee how are you?\r\nham\tPls pls find out from aunt nike.\r\nham\tWow ... I love you sooo much, you know ? I can barely stand it ! I wonder how your day goes and if you are well, my love ... I think of you and miss you\r\nham\tNo screaming means shouting..\r\nham\tHey what happen de. Are you alright.\r\nham\tShould I have picked up a receipt or something earlier\r\nham\tI think chennai well settled?\r\nham\tOh dang! I didn't mean o send that to you! Lol!\r\nham\tUnfortunately i've just found out that we have to pick my sister up from the airport that evening so don't think i'll be going out at all. We should try to go out one of th\r\nham\tHorrible bf... I now v hungry...\r\nham\tRemember on that day..\r\nspam\tYou have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+\r\nham\tHow's it feel? Mr. Your not my real Valentine just my yo Valentine even tho u hardly play!!\r\nham\tAll sounds good. Fingers . Makes it difficult to type\r\nham\tMidnight at the earliest\r\nham\tYou're not sure that I'm not trying to make xavier smoke because I don't want to smoke after being told I smoke too much?\r\nham\tK come to nordstrom when you're done\r\nham\tDo u konw waht is rael FRIENDSHIP Im gving yuo an exmpel: Jsut ese tihs msg.. Evrey splleing of tihs msg is wrnog.. Bt sitll yuo can raed it wihtuot ayn mitsake.. GOODNIGHT & HAVE A NICE SLEEP..SWEET DREAMS..\r\nham\tNow press conference da:)\r\nspam\tHello from Orange. For 1 month's free access to games, news and sport, plus 10 free texts and 20 photo messages, reply YES. Terms apply: www.orange.co.uk/ow\r\nham\tAfter completed degree. There is no use in joining finance.\r\nham\tGood afternoon, my love ! Any job prospects ? Are you missing me ? What do you do ? Are you being lazy and bleak, hmmm ? Or happy and filled with my love ?\r\nham\tShant disturb u anymore... Jia you...\r\nham\tBishan lar nearer... No need buy so early cos if buy now i gotta park my car...\r\nham\tMe, i dont know again oh\r\nham\tDude sux for snake. He got old and raiden got buff\r\nham\tHe says hi and to get your ass back to south tampa (preferably at a kegger)\r\nham\tIn e msg jus now. U said thanks for gift.\r\nham\tU too...\r\nham\tOk how you dear. Did you call chechi\r\nham\tYeah we do totes. When u wanna?\r\nham\tOk i found dis pierre cardin one which looks normal costs 20 its on sale.\r\nham\tGood sleep is about rhythm. The person has to establish a rhythm that the body will learn and use. If you want to know more :-)\r\nham\tWat r u doing?\r\nham\tMessage from . I am at Truro Hospital on ext. You can phone me here. as I have a phone by my side\r\nham\tSingle line with a big meaning::::: \"Miss anything 4 ur \"Best Life\" but, don't miss ur best life for anything... Gud nyt...\r\nham\tJust got some gas money, any chance you and the gang want to go on a grand nature adventure?\r\nham\tDnt worry...use ice pieces in a cloth pack.also take 2 tablets.\r\nham\tDude just saw a parked car with its sunroof popped up. Sux\r\nham\tGet ready to put on your excellent sub face :)\r\nham\tTmrw. Im finishing 9 doors\r\nham\tThe <#> g that i saw a few days ago, the guy wants sell wifi only for <#> and with 3g for <#> . That's why i blanked him.\r\nham\tI am late. I will be there at\r\nham\twhatever, im pretty pissed off.\r\nham\tToday is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy\r\nham\tI dont have that much image in class.\r\nham\tNo:-)i got rumour that you going to buy apartment in chennai:-)\r\nham\tNear kalainar tv office.thenampet\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/msg. CC 08718720201 HG/Suite342/2Lands Row/W1J6HL\r\nspam\tSMS AUCTION - A BRAND NEW Nokia 7250 is up 4 auction today! Auction is FREE 2 join & take part! Txt NOKIA to 86021 now! HG/Suite342/2Lands Row/W1J6HL\r\nham\tMy sis is catching e show in e afternoon so i'm not watching w her. So c u wan 2 watch today or tmr lor.\r\nham\tSounds gd... Haha... Can... Wah, u yan jiu so fast liao...\r\nham\tNo. To be nosy I guess. Idk am I over reacting if I'm freaked?\r\nham\tRemember all those whom i hurt during days of satanic imposter in me.need to pay a price,so be it.may destiny keep me going and as u said pray that i get the mind to get over the same.\r\nham\tHow to Make a girl Happy? It's not at all difficult to make girls happy. U only need to be... 1. A friend 2. Companion 3. Lover 4. Chef . . . <#> . Good listener <#> . Organizer <#> . Good boyfriend <#> . Very clean <#> . Sympathetic <#> . Athletic <#> . Warm . . . <#> . Courageous <#> . Determined <#> . True <#> . Dependable <#> . Intelligent . . . <#> . Psychologist <#> . Pest exterminator <#> . Psychiatrist <#> . Healer . . <#> . Stylist <#> . Driver . . Aaniye pudunga venaam..\r\nham\tWhy is that, princess? I bet the brothas are all chasing you!\r\nham\tI shall book chez jules for half eight, if that's ok with you?\r\nham\tHhahhaahahah rofl wtf nig was leonardo in your room or something\r\nham\tYep, at derek's house now, see you Sunday <3\r\nham\tIt's cool, let me know before it kicks off around <#> , I'll be out and about all day\r\nham\tSorry, I'll call later\r\nham\tI was wondering if it would be okay for you to call uncle john and let him know that things are not the same in nigeria as they r here. That <#> dollars is 2years sent and that you know its a strain but i plan to pay back every dime he gives. Every dime so for me to expect anything from you is not practical. Something like that.\r\nham\tThere are no other charges after transfer charges and you can withdraw anyhow you like\r\nham\tDont search love, let love find U. Thats why its called falling in love, bcoz U dont force yourself, U just fall and U know there is smeone to hold U... BSLVYL\r\nham\tAt 4. Let's go to bill millers\r\nham\tI love you. You set my soul on fire. It is not just a spark. But it is a flame. A big rawring flame. XoXo\r\nham\tSomewhr someone is surely made 4 u. And God has decided a perfect time to make u meet dat person. . . . till den, . . . . . Enjoy ur crushes..!!!;-)\r\nham\tThat's my honeymoon outfit. :)\r\nham\tWill it help if we propose going back again tomorrow\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08719899230 Identifier Code: 41685 Expires 07/11/04\r\nham\tNever blame a day in ur life. Good days give u happiness. Bad days give u experience. Both are essential in life! All are Gods blessings! good morning.:\r\nham\tPls confirm the time to collect the cheque.\r\nspam\tAs a Registered Subscriber yr draw 4 a £100 gift voucher will b entered on receipt of a correct ans. When are the next olympics. Txt ans to 80062\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790121 from land line. Claim 3030. Valid 12hrs only 150ppm\r\nham\tDaddy will take good care of you :)\r\nham\tYeah probably, I still gotta check out with leo\r\nham\tK.then any other special?\r\nham\tCarlos is taking his sweet time as usual so let me know when you and patty are done/want to smoke and I'll tell him to haul ass\r\nham\tOk pa. Nothing problem:-)\r\nham\tHave you heard about that job? I'm going to that wildlife talk again tonight if u want2come. Its that2worzels and a wizzle or whatever it is?! \r\nham\tGod picked up a flower and dippeditinaDEW, lovingly touched itwhichturnedinto u, and the he gifted tomeandsaid,THIS FRIEND IS 4U\r\nham\tWhen you came to hostel.\r\nham\tOk no prob... I'll come after lunch then...\r\nham\tJus telling u dat i'll b leaving 4 shanghai on 21st instead so we'll haf more time 2 meet up cya...\r\nham\tAre your freezing ? Are you home yet ? Will you remember to kiss your mom in the morning? Do you love me ? Do you think of me ? Are you missing me yet ?\r\nham\tYou all ready for * big day tomorrow?\r\nham\tI'll probably be around mu a lot\r\nham\t645\r\nspam\tRT-KIng Pro Video Club>> Need help? [email protected] or call 08701237397 You must be 16+ Club credits redeemable at www.ringtoneking.co.uk! Enjoy!\r\nham\tThnx dude. u guys out 2nite?\r\nham\tMe sef dey laugh you. Meanwhile how's my darling anjie!\r\nham\tMm i had my food da from out\r\nham\tK, makes sense, btw carlos is being difficult so you guys are gonna smoke while I go pick up the second batch and get gas\r\nham\tDid u download the fring app?\r\nham\tThe 2 oz guy is being kinda flaky but one friend is interested in picking up $ <#> worth tonight if possible\r\nham\tFriends that u can stay on fb chat with\r\nham\tFuck babe, I miss you sooooo much !! I wish you were here to sleep with me ... My bed is so lonely ... I go now, to sleep ... To dream of you, my love ...\r\nham\tLiving is very simple.. Loving is also simple.. Laughing is too simple.. Winning is tooo simple.. But, being 'SIMPLE' is very difficult.. Gud nte.:-\r\nspam\tU have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094599\r\nham\tAh, well that confuses things, doesn‘t it?\r\nspam\t500 free text msgs. Just text ok to 80488 and we'll credit your account\r\nham\tHi Dear Call me its urgnt. I don't know whats your problem. You don't want to work or if you have any other problem at least tell me. Wating for your reply.\r\nham\tDear how you. Are you ok?\r\nspam\tYou have been selected to stay in 1 of 250 top British hotels - FOR NOTHING! Holiday Worth £350! To Claim, Call London 02072069400. Bx 526, SW73SS\r\nham\tYes princess! I want to make you happy...\r\nham\tSounds like you have many talents! would you like to go on a dinner date next week?\r\nham\tI am going to film 2day da. At 6pm. Sorry da.\r\nham\tWe not watching movie already. Xy wants 2 shop so i'm shopping w her now.\r\nham\tHello my little party animal! I just thought I'd buzz you as you were with your friends ...*grins*... Reminding you were loved and send a naughty adoring kiss\r\nham\tYesterday its with me only . Now am going home.\r\nspam\tEerie Nokia tones 4u, rply TONE TITLE to 8007 eg TONE DRACULA to 8007 Titles: GHOST, ADDAMSFA, MUNSTERS, EXORCIST, TWILIGHT www.getzed.co.uk POBox36504W45WQ 150p\r\nham\tYou have come into my life and brought the sun ..Shiny down on me, warming my heart. Putting a constant smile on my face ... Making me feel loved and cared for\r\nham\tNo shit, but I wasn't that surprised, so I went and spent the evening with that french guy I met in town here and we fooled around a bit but I didn't let him fuck me\r\nspam\t0A$NETWORKS allow companies to bill for SMS, so they are responsible for their \"suppliers\", just as a shop has to give a guarantee on what they sell. B. G.\r\nham\tGreat comedy..cant stop laughing da:)\r\nspam\tFreeMsg:Feelin kinda lnly hope u like 2 keep me company! Jst got a cam moby wanna c my pic?Txt or reply DATE to 82242 Msg150p 2rcv Hlp 08712317606 stop to 82242\r\nham\tAlright, we're all set here, text the man\r\nham\tHi , where are you? We're at and they're not keen to go out i kind of am but feel i shouldn't so can we go out tomo, don't mind do you?\r\nham\tSleeping nt feeling well\r\nham\tU WILL SWITCH YOUR FONE ON DAMMIT!!\r\nham\tIndia have to take lead:)\r\nham\tI.ll post her out l8r. In class\r\nham\tThts wat Wright Brother did to fly..\r\nham\tEvening * v good if somewhat event laden. Will fill you in, don't you worry … Head * ok but throat * wrecked. See you at six then!\r\nham\tIf u laugh really loud.. If u talk spontaneously.. If u dont care what others feel.. U are probably with your dear & best friends.. GOODEVENING Dear..:)\r\nham\tITS A LAPTOP TAKE IT WITH YOU.\r\nham\tI dont have any of your file in my bag..i was in work when you called me.i 'll tell you if i find anything in my room.\r\nham\tI wan but too early lei... Me outside now wun b home so early... Neva mind then...\r\nspam\tFor ur chance to win a £250 cash every wk TXT: ACTION to 80608. T's&C's www.movietrivia.tv custcare 08712405022, 1x150p/wk\r\nham\tI was at bugis juz now wat... But now i'm walking home oredi... Ü so late then reply... I oso saw a top dat i like but din buy... Where r ü now? \r\nham\tWishing you and your family Merry \"X\" mas and HAPPY NEW Year in advance..\r\nham\tAt 7 we will go ok na.\r\nham\tYes I posted a couple of pics on fb. There's still snow outside too. I'm just waking up :)\r\nham\tS:-)if we have one good partnership going we will take lead:)\r\nspam\tRGENT! This is the 2nd attempt to contact U!U have WON £1250 CALL 09071512433 b4 050703 T&CsBCM4235WC1N3XX. callcost 150ppm mobilesvary. max£7. 50\r\nham\tYeah, where's your class at?\r\nham\tNo just send to you. Bec you in temple na.\r\nham\tYou aren't coming home between class, right? I need to work out and shower!\r\nspam\tHi if ur lookin 4 saucy daytime fun wiv busty married woman Am free all next week Chat now 2 sort time 09099726429 JANINExx Calls£1/minMobsmoreLKPOBOX177HP51FL\r\nham\tS but mostly not like that.\r\nham\tÜ v ma fan...\r\nham\tDunno cos i was v late n when i reach they inside already... But we ate spageddies lor... It's e gals who r laughing at me lor...\r\nham\tGuess who spent all last night phasing in and out of the fourth dimension\r\nham\tSo now my dad is gonna call after he gets out of work and ask all these crazy questions.\r\nham\tYes..but they said its IT.,\r\nham\tVery hurting n meaningful lines ever: \"I compromised everything for my love, But at d end my love compromised me for everything:-(\".. Gud mornin:-)\r\nham\tLmao!nice 1\r\nham\tGlad to see your reply.\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001295 from land line. Claim A21. Valid 12hrs only\r\nspam\tMonthly password for wap. mobsi.com is 391784. Use your wap phone not PC.\r\nham\tNah dub but je still buff\r\nham\tPainful words- \"I thought being Happy was the most toughest thing on Earth... But, the toughest is acting Happy with all unspoken pain inside..\"\r\nham\tYeah, that's fine! It's £6 to get in, is that ok? \r\nham\tLol where do u come up with these ideas?\r\nham\tSo many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. Sh!jas\r\nham\tToday is \"song dedicated day..\" Which song will u dedicate for me? Send this to all ur valuable frnds but first rply me...\r\nham\tOkay... We wait ah\r\nham\tY lei?\r\nham\tHI BABE U R MOST LIKELY TO BE IN BED BUT IM SO SORRY ABOUT TONIGHT! I REALLY WANNA SEE U TOMORROW SO CALL ME AT 9. LOVE ME XXX\r\nham\tAlready am squatting is the new way of walking\r\nham\tDo you want bold 2 or bb torch\r\nham\tCramps stopped. Going back to sleep\r\nspam\ttodays vodafone numbers ending with 0089(my last four digits) are selected to received a £350 award. If your number matches please call 09063442151 to claim your £350 award\r\nspam\tFree Top ringtone -sub to weekly ringtone-get 1st week free-send SUBPOLY to 81618-?3 per week-stop sms-08718727870\r\nham\tNan sonathaya soladha. Why boss?\r\nham\tBring tat cd don forget\r\nspam\tSunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country the Algarve is in? Txt ansr to 82277. £1.50 SP:Tyrone\r\nham\tI don't know but I'm raping dudes at poker\r\nham\tWeightloss! No more girl friends. Make loads of money on ebay or something. And give thanks to God.\r\nham\tWas gr8 to see that message. So when r u leaving? Congrats dear. What school and wat r ur plans.\r\nham\tÜ eatin later but i'm eatin wif my frens now lei... Ü going home first? \r\nham\tFinish already... Yar they keep saying i mushy... I so embarrassed ok...\r\nham\tSorry man, my stash ran dry last night and I can't pick up more until sunday\r\nham\tHai priya are you right. What doctor said pa. Where are you.\r\nspam\tFree msg. Sorry, a service you ordered from 81303 could not be delivered as you do not have sufficient credit. Please top up to receive the service.\r\nham\tOk...\r\nham\tPlease ask mummy to call father\r\nham\tCan come my room but cannot come my house cos my house still messy... Haha...\r\nham\tI have lost 10 kilos as of today!\r\nham\tJust taste fish curry :-P\r\nham\tWhat can i do? Might accidant tookplace between somewhere ghodbandar rd. Traffic moves slovely. So plz slip & don't worry.\r\nham\tYun ah.now ü wkg where?btw if ü go nus sc. Ü wana specialise in wad?\r\nham\tYes! I am a one woman man! Please tell me your likes and dislikes in bed...\r\nham\tWas doing my test earlier. I appreciate you. Will call you tomorrow.\r\nham\tHow's my loverboy doing ? What does he do that keeps him from coming to his Queen, hmmm ? Doesn't he ache to speak to me ? Miss me desparately ?\r\nham\tU meet other fren dun wan meet me ah... Muz b a guy rite...\r\nham\t(No promises on when though, haven't even gotten dinner yet)\r\nham\tI got your back! Do you have any dislikes in bed?\r\nham\to turns out i had stereo love on mi phone under the unknown album.\r\nspam\tHard LIVE 121 chat just 60p/min. Choose your girl and connect LIVE. Call 09094646899 now! Cheap Chat UK's biggest live service. VU BCM1896WC1N3XX\r\nham\tYeah I don't see why not\r\nham\tAsking do u knw them or nt? May be ur frnds or classmates?\r\nham\tSorry about earlier. Putting out fires.Are you around to talk after 9? Or do you actually have a life, lol!\r\nspam\tWOW! The Boys R Back. TAKE THAT 2007 UK Tour. Win VIP Tickets & pre-book with VIP Club. Txt CLUB to 81303. Trackmarque Ltd info@vipclub4u.\r\nham\tAs in missionary hook up, doggy hook up, standing...|\r\nham\tThen u better go sleep.. Dun disturb u liao.. U wake up then msg me lor..\r\nham\tFighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.\r\nham\tAm watching house – very entertaining – am getting the whole hugh laurie thing – even with the stick – indeed especially with the stick.\r\nham\tThought praps you meant another one. Goodo! I'll look tomorrow \r\nham\tHi Jon, Pete here, Ive bin 2 Spain recently & hav sum dinero left, Bill said u or ur rents mayb interested in it, I hav 12,000pes, so around £48, tb, James.\r\nham\tThere bold 2 <#> . Is that yours\r\nham\tYou know there is. I shall speak to you in <#> minutes then\r\nham\t\"ALRITE HUNNY!WOT U UP 2 2NITE? DIDNT END UP GOIN DOWN TOWN JUS DA PUB INSTEAD! JUS CHILLIN AT DA MO IN ME BEDROOM!LOVE JEN XXX.\"\r\nham\tI went to project centre\r\nham\tAs per your request 'Maangalyam (Alaipayuthe)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\r\nham\tLol yeah at this point I guess not\r\nham\tDoing project w frens lor. \r\nham\tLol. Well quality aint bad at all so i aint complaining\r\nham\tK, can that happen tonight?\r\nspam\tHi, this is Mandy Sullivan calling from HOTMIX FM...you are chosen to receive £5000.00 in our Easter Prize draw.....Please telephone 09041940223 to claim before 29/03/05 or your prize will be transferred to someone else....\r\nham\tI think we're going to finn's now, come\r\nham\tWhy tired what special there you had\r\nham\tI will come tomorrow di\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tK go and sleep well. Take rest:-).\r\nham\tU guys never invite me anywhere :(\r\nspam\tUR GOING 2 BAHAMAS! CallFREEFONE 08081560665 and speak to a live operator to claim either Bahamas cruise of£2000 CASH 18+only. To opt out txt X to 07786200117\r\nham\tI can do that! I want to please you both inside and outside the bedroom...\r\nham\t\"EY! CALM DOWNON THEACUSATIONS.. ITXT U COS IWANA KNOW WOTU R DOIN AT THEW/END... HAVENTCN U IN AGES..RING ME IF UR UP4 NETHING SAT.LOVE J XXX.\"\r\nham\tI love to wine and dine my lady!\r\nspam\tSomeone has conacted our dating service and entered your phone because they fancy you!To find out who it is call from landline 09111030116. PoBox12n146tf15\r\nham\tIm cool ta luv but v.tired 2 cause i have been doin loads of planning all wk, we have got our social services inspection at the nursery! Take care & spk sn x.\r\nham\tI don know account details..i will ask my mom and send you.my mom is out of reach now.\r\nham\tI think u have the wrong number.\r\nham\tFeel Yourself That You Are Always Happy.. Slowly It Becomes Your Habit & Finally It Becomes Part Of Your Life.. Follow It.. Happy Morning & Have A Happy Day:)\r\nham\tDO NOT B LATE LOVE MUM\r\nham\tGot it..mail panren paru..\r\nham\t* Was thinking about chuckin ur red green n black trainners 2 save carryin them bac on train\r\nham\tGive one miss from that number please\r\nham\tJus came back fr lunch wif my sis only. U leh?\r\nham\tHow is your schedule next week? I am out of town this weekend.\r\nham\tReally good:)dhanush rocks once again:)\r\nham\tLmao ok I wont be needing u to do my hair anymore.\r\nham\tMiss ya, need ya, want ya, love ya.\r\nham\tSorry i'm not free...\r\nham\tDo u ever get a song stuck in your head for no reason and it won't go away til u listen to it like 5 times?\r\nham\tNt yet chikku..simple habba..hw abt u?\r\nham\tGot ur mail Dileep.thank you so muchand look forward to lots of support...very less contacts here,remember one venugopal you mentioned.tomorrow if not late,i shall try to come up till there.goodnight dear.\r\nham\tSometimes Heart Remembrs someone Very much... Forgets someone soon... Bcoz Heart will not like everyone. But liked ones will be Remembered Everytime... BSLVYL\r\nham\tJoy's father is John. Then John is the NAME of Joy's father. Mandan\r\nspam\tHi 07734396839 IBH Customer Loyalty Offer: The NEW NOKIA6600 Mobile from ONLY £10 at TXTAUCTION!Txt word:START to No:81151 & get Yours Now!4T&\r\nham\tHi this is yijue... It's regarding the 3230 textbook it's intro to algorithms second edition... I'm selling it for $50...\r\nspam\tSMS AUCTION You have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+\r\nham\tK, want us to come by now?\r\nham\tHow. Its a little difficult but its a simple way to enter this place\r\nham\tHa... Both of us doing e same thing. But i got tv 2 watch. U can thk of where 2 go tonight or u already haf smth in mind...\r\nham\tDont show yourself. How far. Put new pictures up on facebook.\r\nham\tWatching tv now. I got new job :)\r\nham\tGood afternoon sexy buns! How goes the job search ? I wake and you are my first thought as always, my love. I wish your fine and happy and know I adore you!\r\nham\tI'm not coming over, do whatever you want\r\nham\tIts ok chikku, and its my 1 of favourite song..:-)\r\nham\tDid u see what I posted on your Facebook?\r\nspam\tCall FREEPHONE 0800 542 0578 now!\r\nspam\tBuy Space Invaders 4 a chance 2 win orig Arcade Game console. Press 0 for Games Arcade (std WAP charge) See o2.co.uk/games 4 Terms + settings. No purchase\r\nham\t7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st \"Ur Lovely Friendship\"... good morning dear\r\nspam\tLoan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'\r\nspam\tBIG BROTHER ALERT! The computer has selected u for 10k cash or #150 voucher. Call 09064018838. NTT PO Box CRO1327 18+ BT Landline Cost 150ppm mobiles vary\r\nham\t;-( oh well, c u later\r\nham\tMy uncles in Atlanta. Wish you guys a great semester.\r\nham\tNo dear i do have free messages without any recharge. Hi hi hi\r\nham\tDont search love, let love find U. Thats why its called falling in love, bcoz U dont force yourself, U just fall and U know there is smeone to hold U... BSLVYL\r\nham\tI dun believe u. I thk u told him.\r\nham\tDo you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!\r\nham\tYes:)sura in sun tv.:)lol.\r\nham\tArun can u transfr me d amt\r\nham\tTakin a shower now but yeah I'll leave when I'm done\r\nham\tAm not working but am up to eyes in philosophy so will text u later when a bit more free for chat...\r\nham\tU havent lost me ill always b here 4u.i didnt intend 2 hurt u but I never knew how u felt about me when Iwas+marine&thats what itried2tell urmom.i careabout u\r\nspam\tWIN: We have a winner! Mr. T. Foley won an iPod! More exciting prizes soon, so keep an eye on ur mobile or visit www.win-82050.co.uk\r\nham\tYou bad girl. I can still remember them\r\nham\tHow much i gave to you. Morning.\r\nham\tI hope your alright babe? I worry that you might have felt a bit desparate when you learned the job was a fake ? I am here waiting when you come back, my love\r\nham\tHey, can you tell me blake's address? Carlos wanted me to meet him there but I got lost and he's not answering his phone\r\nham\tCan i get your opinion on something first?\r\nham\tThat one week leave i put know that time. Why.\r\nham\tIf we hit it off, you can move in with me :)\r\nham\texcellent. I spent <#> years in the Air Force. Iraq and afghanistan. I am stable and honest. do you like traveling?\r\nham\tI wanna watch that movie\r\nham\tOk lor thanx... Ü in school?\r\nham\tI'm in class. Did you get my text.\r\nham\tThe bus leaves at <#> \r\nham\tGod bless.get good sleep my dear...i will pray!\r\nspam\tTodays Voda numbers ending 1225 are selected to receive a £50award. If you have a match please call 08712300220 quoting claim code 3100 standard rates app \r\nham\tDo have a nice day today. I love you so dearly.\r\nham\tAiyo a bit pai seh ü noe... Scared he dun rem who i am then die... Hee... But he become better lookin oredi leh...\r\nham\tAight, I'll ask a few of my roommates\r\nham\tNow, whats your house # again ? And do you have any beer there ?\r\nham\tDo ü all wan 2 meet up n combine all the parts? How's da rest of da project going?\r\nham\t\"Getting tickets 4 walsall tue 6 th march. My mate is getting me them on sat. ill pay my treat. Want 2 go. Txt bak .Terry\"\r\nham\tYes we are chatting too.\r\nham\tHI ITS JESS I DONT KNOW IF YOU ARE AT WORK BUT CALL ME WHEN U CAN IM AT HOME ALL EVE. XXX\r\nham\tSian... Aft meeting supervisor got work 2 do liao... U working now?\r\nham\tAre you going to write ccna exam this week??\r\nham\tWell i will watch shrek in 3D!!B)\r\nham\tAm i that much dirty fellow?\r\nham\tDunno dat's wat he told me. Ok lor...\r\nham\tI'll probably be by tomorrow (or even later tonight if something's going on)\r\nham\tI couldn't say no as he is a dying man and I feel sad for him so I will go and I just wanted you to know I would probably be gone late into your night\r\nham\tIf you're thinking of lifting me one then no.\r\nham\tSame as u... Dun wan... Y u dun like me already ah... Wat u doing now? Still eating?\r\nham\tSent me ur email id soon\r\nham\tWat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!\r\nham\tDude. What's up. How Teresa. Hope you have been okay. When i didnt hear from these people, i called them and they had received the package since dec <#> . Just thot you'ld like to know. Do have a fantastic year and all the best with your reading. Plus if you can really really Bam first aid for Usmle, then your work is done.\r\nham\tHey gorgeous man. My work mobile number is. Have a good one babe. Squishy Mwahs.\r\nham\tMay i call You later Pls\r\nspam\tHottest pics straight to your phone!! See me getting Wet and Wanting, just for you xx Text PICS to 89555 now! txt costs 150p textoperator g696ga 18 XxX\r\nham\tThat's the way you should stay oh.\r\nham\tHello- thanx for taking that call. I got a job! Starts on monday!\r\nham\tWhat time is ur flight tmr?\r\nham\tWhen should I come over?\r\nham\tI have a rather prominent bite mark on my right cheek\r\nham\t* Will be september by then!\r\nham\tAre you wet right now?\r\nham\tAnd how's your husband.\r\nspam\tHack Chat. Get backdoor entry into 121 chat rooms at a fraction of the cost. Reply NEO69 or call 09050280520, to subscribe 25p pm. DPS, Bcm box 8027 Ldn, wc1n3xx\r\nham\tAre we doing the norm tomorrow? I finish just a 4.15 cos of st tests. Need to sort library stuff out at some point tomo - got letter from today - access til end march so i better get move on!\r\nham\tYeah. I got a list with only u and Joanna if I'm feeling really anti social\r\nham\tI am in your office na.\r\nham\t\"Are you comingdown later?\"\r\nham\tSuper da:)good replacement for murali\r\nham\tDa is good good player.why he is unsold.\r\nham\tHi. || Do u want | to join me with sts later? || Meeting them at five. || Call u after class.\r\nham\tIts on in engalnd! But telly has decided it won't let me watch it and mia and elliot were kissing! Damn it!\r\nspam\tFREE NOKIA Or Motorola with upto 12mths 1/2price linerental, 500 FREE x-net mins&100txt/mth FREE B'tooth*. Call Mobileupd8 on 08001950382 or call 2optout/D3WV\r\nham\tI dont want to hear philosophy. Just say what happen\r\nham\tYou got job in wipro:)you will get every thing in life in 2 or 3 years.\r\nham\tThen cant get da laptop? My matric card wif ü lei... \r\nham\tDunno da next show aft 6 is 850. Toa payoh got 650.\r\nspam\tThis is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08718726970 NOW! Only 10p per min. BT-national-rate \r\nham\tI just made some payments so dont have that much. Sorry. Would you want it fedex or the other way.\r\nham\tThey did't play one day last year know even though they have very good team.. Like india.\r\nham\tK.:)you are the only girl waiting in reception ah?\r\nham\tSay this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...\r\nham\tI hate when she does this. She turns what should be a fun shopping trip into an annoying day of how everything would look in her house.\r\nham\tSir, i am waiting for your call.\r\nham\tWhat's up. Do you want me to come online?\r\nham\tIt could work, we'll reach a consensus at the next meeting\r\nham\tAiyah then i wait lor. Then u entertain me. Hee...\r\nham\tThe last thing i ever wanted to do was hurt you. And i didn't think it would have. You'd laugh, be embarassed, delete the tag and keep going. But as far as i knew, it wasn't even up. The fact that you even felt like i would do it to hurt you shows you really don't know me at all. It was messy wednesday, but it wasn't bad. The problem i have with it is you HAVE the time to clean it, but you choose not to. You skype, you take pictures, you sleep, you want to go out. I don't mind a few things here and there, but when you don't make the bed, when you throw laundry on top of it, when i can't have a friend in the house because i'm embarassed that there's underwear and bras strewn on the bed, pillows on the floor, that's something else. You used to be good about at least making the bed.\r\nham\tI'll let you know when it kicks in\r\nham\tYou call him now ok i said call him\r\nham\tCall to the number which is available in appointment. And ask to connect the call to waheed fathima.\r\nham\tOr ü go buy wif him then i meet ü later can?\r\nham\tMmmm ... Fuck ... Not fair ! You know my weaknesses ! *grins* *pushes you to your knee's* *exposes my belly and pulls your head to it* Don't forget ... I know yours too *wicked smile*\r\nham\tToday my system sh get ready.all is well and i am also in the deep well\r\nham\tMom wants to know where you at\r\nham\tAight, I'll text you when I'm back\r\nham\tDont know supports ass and srt i thnk. I think ps3 can play through usb too\r\nham\tOh ok i didnt know what you meant. Yep i am baby jontin\r\nspam\tYou have WON a guaranteed £1000 cash or a £2000 prize.To claim yr prize call our customer service representative on\r\nspam\tWould you like to see my XXX pics they are so hot they were nearly banned in the uk!\r\nspam\tHMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com\r\nham\tWatching tv now. I got new job :)\r\nham\tThis pen thing is beyond a joke. Wont a Biro do? Don't do a masters as can't do this ever again! \r\nham\tI AM AT A PARTY WITH ALEX NICHOLS\r\nspam\tU have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094594\r\nham\tJust seeing your missed call my dear brother. Do have a gr8 day.\r\nham\tOk.. Ü finishing soon?\r\nham\tSorry, I can't help you on this.\r\nham\tCome to me, slave. Your doing it again ... Going into your shell and unconsciously avoiding me ... You are making me unhappy :-(\r\nham\tI love your ass! Do you enjoy doggy style? :)\r\nham\tI think asking for a gym is the excuse for lazy people. I jog.\r\nspam\tDear 0776xxxxxxx U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs \r\nspam\tUrgent! Please call 09061743811 from landline. Your ABTA complimentary 4* Tenerife Holiday or £5000 cash await collection SAE T&Cs Box 326 CW25WX 150ppm\r\nham\tNo. On the way home. So if not for the long dry spell the season would have been over\r\nham\tI gotta collect da car at 6 lei.\r\nham\tOk but knackered. Just came home and went to sleep! Not good at this full time work lark.\r\nham\tProbably earlier than that if the station's where I think it is\r\nspam\tCALL 09090900040 & LISTEN TO EXTREME DIRTY LIVE CHAT GOING ON IN THE OFFICE RIGHT NOW TOTAL PRIVACY NO ONE KNOWS YOUR [sic] LISTENING 60P MIN 24/7MP 0870753331018+\r\nham\tGood Morning plz call me sir\r\nspam\tFreeMsg Hey U, i just got 1 of these video/pic fones, reply WILD to this txt & ill send U my pics, hurry up Im so bored at work xxx (18 150p/rcvd STOP2stop)\r\nham\tUh, heads up we don't have THAT much left\r\nham\tI tot u outside cos darren say u come shopping. Of course we nice wat. We jus went sim lim look at mp3 player.\r\nham\tAight, sounds good. When do you want me to come down?\r\nham\tWat would u like 4 ur birthday?\r\nham\tI love working from home :)\r\nham\tAnd miss vday the parachute and double coins??? U must not know me very well...\r\nham\tSorry, I'll call later\r\nham\tMy sister got placed in birla soft da:-)\r\nspam\tFree entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+\r\nham\tWah... Okie okie... Muz make use of e unlimited... Haha... \r\nham\tThere're some people by mu, I'm at the table by lambda\r\nham\tAnd stop being an old man. You get to build snowman snow angels and snowball fights.\r\nham\tELLO BABE U OK?\r\nham\tHello beautiful r u ok? I've kinda ad a row wiv and he walked out the pub?? I wanted a night wiv u Miss u \r\nham\tThen u going ikea str aft dat?\r\nham\tBecoz its <#> jan whn al the post ofice is in holiday so she cn go fr the post ofice...got it duffer\r\nham\tLol grr my mom is taking forever with my prescription. Pharmacy is like 2 minutes away. Ugh.\r\nham\tFor real tho this sucks. I can't even cook my whole electricity is out. And I'm hungry.\r\nham\tYou want to go? \r\nspam\tNew TEXTBUDDY Chat 2 horny guys in ur area 4 just 25p Free 2 receive Search postcode or at gaytextbuddy.com. TXT ONE name to 89693. 08715500022 rpl Stop 2 cnl\r\nham\tIts not that time of the month nor mid of the time?\r\nham\tFffff. Can you text kadeem or are you too far gone\r\nham\tWe not leaving yet. Ok lor then we go elsewhere n eat. U thk...\r\nham\tIs fujitsu s series lifebook good?\r\nham\tYar i wanted 2 scold u yest but late already... I where got zhong se qing you? If u ask me b4 he ask me then i'll go out w u all lor. N u still can act so real.\r\nham\tDont know you bring some food\r\nham\tNo current and food here. I am alone also\r\nham\tI'll be in sch fr 4-6... I dun haf da book in sch... It's at home...\r\nham\tHello. They are going to the village pub at 8 so either come here or there accordingly. Ok?\r\nham\tOk\r\nham\tWe don call like <#> times oh. No give us hypertension oh.\r\nham\tDont give a monkeys wot they think and i certainly don't mind. Any friend of mine&all that! Just don't sleep wiv , that wud be annoyin!\r\nham\tOmg it could snow here tonite!\r\nspam\tCall from 08702490080 - tells u 2 call 09066358152 to claim £5000 prize. U have 2 enter all ur mobile & personal details @ the prompts. Careful!\r\nspam\tFree 1st week entry 2 TEXTPOD 4 a chance 2 win 40GB iPod or £250 cash every wk. Txt VPOD to 81303 Ts&Cs www.textpod.net custcare 08712405020.\r\nham\tCarry on not disturbing both of you\r\nham\tWhat pa tell me.. I went to bath:-)\r\nham\tJus finished avatar nigro\r\nham\tR u over scratching it?\r\nham\tHope you are having a great day.\r\nham\tDid either of you have any idea's? Do you know of anyplaces doing something?\r\nham\tMy planning usually stops at \"find hella weed, smoke hella weed\"\r\nham\tThe fact that you're cleaning shows you know why i'm upset. Your priority is constantly \"what i want to do,\" not \"what i need to do.\"\r\nham\tExcellent! Are you ready to moan and scream in ecstasy?\r\nspam\tMore people are dogging in your area now. Call 09090204448 and join like minded guys. Why not arrange 1 yourself. There's 1 this evening. A£1.50 minAPN LS278BB\r\nham\tDude avatar 3d was imp. At one point i thought there were actually flies in the room and almost tried hittng one as a reflex\r\nspam\tWELL DONE! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TCs, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tK...k:)why cant you come here and search job:)\r\nham\tI got lousy sleep. I kept waking up every 2 hours to see if my cat wanted to come in. I worry about him when its cold :(\r\nham\tYeah, I'll leave in a couple minutes & let you know when I get to mu\r\nham\tCan ü call me at 10:10 to make sure dat i've woken up...\r\nham\tHey we can go jazz power yoga hip hop kb and yogasana \r\nham\tThe battery is for mr adewale my uncle. Aka Egbon\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tWait 2 min..stand at bus stop\r\nham\tOh ic. I thought you meant mary jane.\r\nham\tHaha... Really oh no... How? Then will they deduct your lesson tmr?\r\nham\tNah im goin 2 the wrks with j wot bout u?\r\nham\tThen just eat a shit and wait for ur monkey face bitch.......... U asshole..................\r\nham\tGood night. Am going to sleep.\r\nham\tAight I'll grab something to eat too, text me when you're back at mu\r\nham\tK...k:)why cant you come here and search job:)\r\nham\tTake something for pain. If it moves however to any side in the next 6hrs see a doctor.\r\nham\tLol ... Oh no babe, I wont be sliding into your place after midnight, but thanks for the invite\r\nham\tHowz that persons story\r\nspam\tGuess what! Somebody you know secretly fancies you! Wanna find out who it is? Give us a call on 09065394973 from Landline DATEBox1282EssexCM61XN 150p/min 18\r\nham\tLOL that would be awesome payback.\r\nspam\tit to 80488. Your 500 free text messages are valid until 31 December 2005.\r\nham\tYes :)it completely in out of form:)clark also utter waste.\r\nham\tHoneybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)\r\nham\tThanks. It was only from tescos but quite nice. All gone now. Speak soon \r\nham\tWhat's a feathery bowa? Is that something guys have that I don't know about?\r\nham\tEven i cant close my eyes you are in me our vava playing umma :-D\r\nham\t2 laptop... I noe infra but too slow lar... I wan fast one\r\nspam\tYou have won a guaranteed £200 award or even £1000 cashto claim UR award call free on 08000407165 (18+) 2 stop getstop on 88222 PHP\r\nham\tNvm it's ok...\r\nham\tEnjoy ur life. . Good night\r\nham\tYes but can we meet in town cos will go to gep and then home. You could text at bus stop. And don't worry we'll have finished by march … ish!\r\nham\tI had askd u a question some hours before. Its answer\r\nham\tThats cool. Where should i cum? On you or in you? :)\r\nham\tDelhi and chennai still silent.\r\nham\tLol alright i was thinkin that too haha\r\nspam\tReply to win £100 weekly! Where will the 2006 FIFA World Cup be held? Send STOP to 87239 to end service\r\nham\tNo I'm in the same boat. Still here at my moms. Check me out on yo. I'm half naked.\r\nham\tShhhhh nobody is supposed to know!\r\nham\tSorry, I'll call later\r\nham\tSorry, I'll call later in meeting any thing related to trade please call Arul. <#> \r\nham\tHey i will be late... i'm at amk. Need to drink tea or coffee\r\nham\tI wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob\r\nspam\tUrgent! Please call 09061743810 from landline. Your ABTA complimentary 4* Tenerife Holiday or #5000 cash await collection SAE T&Cs Box 326 CW25WX 150 ppm\r\nham\tThe length is e same but e top shorter n i got a fringe now. I thk i'm not going liao. Too lazy. Dun wan 2 distract u also.\r\nham\tS..antha num corrct dane\r\nham\tNo calls..messages..missed calls\r\nham\tSorry, I'll call later\r\nham\tThe basket's gettin full so I might be by tonight\r\nham\tHI DARLIN IVE JUST GOT BACK AND I HAD A REALLY NICE NIGHT AND THANKS SO MUCH FOR THE LIFT SEE U TOMORROW XXX\r\nham\tNo other Valentines huh? The proof is on your fb page. Ugh I'm so glad I really DIDN'T watch your rupaul show you TOOL!\r\nspam\tFree tones Hope you enjoyed your new content. text stop to 61610 to unsubscribe. help:08712400602450p Provided by tones2you.co.uk\r\nham\tEh den sat u book e kb liao huh...\r\nham\tHave you been practising your curtsey?\r\nham\tShall i come to get pickle\r\nham\tLol boo I was hoping for a laugh\r\nham\t\"YEH I AM DEF UP4 SOMETHING SAT,JUST GOT PAYED2DAY & I HAVBEEN GIVEN A£50 PAY RISE 4MY WORK & HAVEBEEN MADE PRESCHOOLCO-ORDINATOR 2I AM FEELINGOOD LUV\"\r\nham\tWell, I have to leave for my class babe ... You never came back to me ... :-( ... Hope you have a nice sleep, my love\r\nham\tLMAO where's your fish memory when I need it?\r\nham\tBut i'll b going 2 sch on mon. My sis need 2 take smth.\r\nham\tIdea will soon get converted to live:)\r\nspam\tTheMob>Yo yo yo-Here comes a new selection of hot downloads for our members to get for FREE! Just click & open the next link sent to ur fone...\r\nham\tS....s...india going to draw the series after many years in south african soil..\r\nham\tGoodmorning, today i am late for <DECIMAL> min.\r\nham\tCan't take any major roles in community outreach. You rock mel\r\nham\tShopping lor. Them raining mah hard 2 leave orchard.\r\nham\tHi here. have birth at on the to at 8lb 7oz. Mother and baby doing brilliantly.\r\nham\tSee the forwarding message for proof\r\nham\tI can't keep going through this. It was never my intention to run you out, but if you choose to do that rather than keep the room clean so *I* don't have to say no to visitors, then maybe that's the best choice. Yes, I wanted you to be embarassed, so maybe you'd feel for once how I feel when i have a friend who wants to drop buy and i have to say no, as happened this morning. I've tried everything. I don't know what else to do.\r\nham\tDunno lei... I thk mum lazy to go out... I neva ask her yet...\r\nham\tDo whatever you want. You know what the rules are. We had a talk earlier this week about what had to start happening, you showing responsibility. Yet, every week it's can i bend the rule this way? What about that way? Do whatever. I'm tired of having thia same argument with you every week. And a <#> movie DOESNT inlude the previews. You're still getting in after 1.\r\nham\tBeautiful Truth against Gravity.. Read carefully: \"Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it..\" GOODMORNING\r\nspam\tGreat News! Call FREEFONE 08006344447 to claim your guaranteed £1000 CASH or £2000 gift. Speak to a live operator NOW!\r\nham\tAmbrith..madurai..met u in arun dha marrge..remembr?\r\nham\tJust re read it and I have no shame but tell me how he takes it and if he runs I will blame u 4 ever!! Not really 4 ever just a long time \r\nham\tPrincess, is your kitty shaved or natural?\r\nham\tBetter than bb. If he wont use it, his wife will or them doctor\r\nham\tYa it came a while ago\r\nham\tFrom tomorrow onwards eve 6 to 3 work.\r\nham\tAnything lor but toa payoh got place 2 walk meh...\r\nham\tI don't have anybody's number, I still haven't thought up a tactful way to ask alex\r\nspam\tU can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.ldew.com SkillGame,1Winaweek, age16.150ppermessSubscription\r\nham\tIs there any movie theatre i can go to and watch unlimited movies and just pay once?\r\nham\tU having lunch alone? I now so bored...\r\nham\tYes obviously, but you are the eggs-pert and the potato head… Speak soon! \r\nham\tNah man, my car is meant to be crammed full of people\r\nham\tNo got new job at bar in airport on satsgettin 4.47per hour but means no lie in! keep in touch\r\nham\tKallis is ready for bat in 2nd innings\r\nham\tThanx but my birthday is over already.\r\nham\tUgh y can't u just apologize, admit u were wrong and ask me to take u back?\r\nham\tI noe la... U wana pei bf oso rite... K lor, other days den...\r\nham\tYes, i'm small kid.. And boost is the secret of my energy..\r\nham\tIM GONNA MISS U SO MUCH\r\nham\tIs avatar supposed to have subtoitles\r\nham\tSimply sitting and watching match in office..\r\nham\tYou can jot down things you want to remember later.\r\nham\tOh sorry please its over\r\nham\tHey are we going for the lo lesson or gym? \r\nham\tDont pack what you can buy at any store.like cereals. If you must pack food, pack gari or something 9ja that you will miss.\r\nham\tYou always make things bigger than they are\r\nham\tÜ dun wan to watch infernal affair?\r\nham\tMe not waking up until 4 in the afternoon, sup\r\nspam\t4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths ? Call MobilesDirect free on 08000938767 to update now! or2stoptxt\r\nham\tI can send you a pic if you like :)\r\nham\tOkay... I booked all already... Including the one at bugis.\r\nham\tAight fuck it, I'll get it later\r\nham\tNo de. But call me after some time. Ill tell you k\r\nham\tSo dont use hook up any how\r\nham\tHow much is blackberry bold2 in nigeria.\r\nham\tHi where you. You in home or calicut?\r\nham\tHey darlin.. i can pick u up at college if u tell me wen & where 2 mt.. love Pete xx\r\nspam\tCall 09094100151 to use ur mins! Calls cast 10p/min (mob vary). Service provided by AOM, just GBP5/month. AOM Box61,M60 1ER until u stop. Ages 18+ only!\r\nham\tOh... I was thkin of goin yogasana at 10 den no nd to go at 3 den can rush to parco 4 nb... Okie lor, u call me when ready...\r\nham\tY so late but i need to go n get da laptop...\r\nham\tSir, I am waiting for your mail.\r\nham\t.Please charge my mobile when you get up in morning.\r\nham\tNothing, i got msg frm tht unknown no..\r\nham\tUgh fuck it I'm resubbing to eve\r\nham\tHe didn't see his shadow. We get an early spring yay\r\nham\tI did. One slice and one breadstick. Lol\r\nham\tHey ! I want you ! I crave you ! I miss you ! I need you ! I love you, Ahmad Saeed al Hallaq ...\r\nham\tIs there any training tomorrow?\r\nspam\tURGENT! Your mobile No *********** WON a £2,000 Bonus Caller Prize on 02/06/03! This is the 2nd attempt to reach YOU! Call 09066362220 ASAP! BOX97N7QP, 150ppm\r\nham\tPass dis to all ur contacts n see wat u get! Red;i'm in luv wid u. Blue;u put a smile on my face. Purple;u r realy hot. Pink;u r so swt. Orange;i thnk i lyk u. Green;i realy wana go out wid u. Yelow;i wnt u bck. Black;i'm jealous of u. Brown;i miss you Nw plz giv me one color\r\nham\tCos daddy arranging time c wat time fetch ü mah...\r\nham\tThen. You are eldest know.\r\nham\tWho's there say hi to our drugdealer\r\nham\tIts hard to believe things like this. All can say lie but think twice before saying anything to me.\r\nspam\tEerie Nokia tones 4u, rply TONE TITLE to 8007 eg TONE DRACULA to 8007 Titles: GHOST, ADDAMSFA, MUNSTERS, EXORCIST, TWILIGHT www.getzed.co.uk POBox36504W45WQ 150p \r\nspam\tSexy Singles are waiting for you! Text your AGE followed by your GENDER as wither M or F E.G.23F. For gay men text your AGE followed by a G. e.g.23G.\r\nham\tGood night my dear.. Sleepwell&Take care\r\nham\tThat is wondarfull song\r\nspam\tFreeMsg: Claim ur 250 SMS messages-Text OK to 84025 now!Use web2mobile 2 ur mates etc. Join Txt250.com for 1.50p/wk. T&C BOX139, LA32WU. 16 . Remove txtX or stop\r\nham\tYar lor actually we quite fast... Cos da ge slow wat... Haha...\r\nham\tMust come later.. I normally bathe him in da afternoon mah..\r\nham\tTrust me. Even if isn't there, its there.\r\nham\tHey hun-onbus goin 2 meet him. He wants 2go out 4a meal but I donyt feel like it cuz have 2 get last bus home!But hes sweet latelyxxx\r\nspam\t85233 FREE>Ringtone!Reply REAL\r\nham\tI can take you at like noon\r\nham\tWhere is it. Is there any opening for mca.\r\nham\tI'm aight. Wat's happening on your side.\r\nham\tI'm done oredi...\r\nham\tyou are sweet as well, princess. Please tell me your likes and dislikes in bed...\r\nham\tHow are you. Wish you a great semester\r\nham\tMoji i love you more than words. Have a rich day\r\nham\tDude how do you like the buff wind.\r\nham\t\"alright babe, justthought id sayhey! how u doin?nearly the endof me wk offdam nevamind!We will have 2Hook up sn if uwant m8? loveJen x.\"\r\nspam\tWell done ENGLAND! Get the official poly ringtone or colour flag on yer mobile! text TONE or FLAG to 84199 NOW! Opt-out txt ENG STOP. Box39822 W111WX £1.50\r\nham\tNo i'm not. I can't give you everything you want and need. You actually could do better for yourself on yor own--you've got more money than i do. I can't get work, i can't get a man, i can't pay the rent, i can't even fill my fucking gas tank. yes, i'm stressed and depressed. I didn't even call home for thanksgiving cuz i'll have to tell them i,m up to nothing.\r\nham\tS:-)kallis wont play in first two odi:-)\r\nham\tThen get some cash together and I'll text jason\r\nham\tOh, my love, it's soooo good to hear from you. Omg I missed you so much today. I'm sorry your having problems with the provider but thank you for tming me\r\nspam\tFinal Chance! Claim ur £150 worth of discount vouchers today! Text YES to 85023 now! SavaMob, member offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Subs 16\r\nspam\tPRIVATE! Your 2004 Account Statement for 07742676969 shows 786 unredeemed Bonus Points. To claim call 08719180248 Identifier Code: 45239 Expires\r\nham\tProbably, want to pick up more?\r\nham\tI'm done...\r\nham\tAre you the cutest girl in the world or what\r\nham\tNo dice, art class 6 thru 9 :( thanks though. Any idea what time I should come tomorrow?\r\nspam\tSMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= ***** unsubscribe with STOP. no extra charge. help:08700469649. PO BOX420. IP4 5WE\r\nham\tOh Howda gud gud.. Mathe en samachara chikku:-)\r\nham\tI thk 530 lor. But dunno can get tickets a not. Wat u doing now?\r\nham\tAudrie lousy autocorrect\r\nham\tIts a site to simulate the test. It just gives you very tough questions to test your readiness.\r\nham\tAnyway seriously hit me up when you're back because otherwise I have to light up with armand and he always has shit and/or is vomiting\r\nham\tI fetch yun or u fetch?\r\nham\tThank you. I like you as well...\r\nham\tHmmm ... And imagine after you've come home from that having to rub my feet, make me dinner and help me get ready for my date ! Are you sure your ready for that kind of life ?\r\nspam\tFREE2DAY sexy St George's Day pic of Jordan!Txt PIC to 89080 dont miss out, then every wk a saucy celeb!4 more pics c PocketBabe.co.uk 0870241182716 £3/wk\r\nham\tLara said she can loan me <#> .\r\nham\tDo we have any spare power supplies\r\nham\tYar he quite clever but aft many guesses lor. He got ask me 2 bring but i thk darren not so willing 2 go. Aiya they thk leona still not attach wat.\r\nspam\tYou are a winner you have been specially selected to receive £1000 cash or a £2000 award. Speak to a live operator to claim call 087123002209am-7pm. Cost 10p\r\nham\tYeah, don't go to bed, I'll be back before midnight\r\nspam\tSunshine Hols. To claim ur med holiday send a stamped self address envelope to Drinks on Us UK, PO Box 113, Bray, Wicklow, Eire. Quiz Starts Saturday! Unsub Stop\r\nham\tWell I wasn't available as I washob nobbing with last night so they had to ask Nickey Platt instead of me!;\r\nham\tIt's that time of the week again, ryan\r\nham\tWish u many many returns of the day.. Happy birthday vikky..\r\nspam\tU can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.Idew.com SkillGame, 1Winaweek, age16. 150ppermessSubscription\r\nham\tI hope you know I'm still mad at you.\r\nham\tArgh my 3g is spotty, anyway the only thing I remember from the research we did was that province and sterling were the only problem-free places we looked at\r\nham\tIn xam hall boy asked girl Tell me the starting term for dis answer I can den manage on my own After lot of hesitation n lookin around silently she said THE! intha ponnungale ipaditan;)\r\nham\tDo you know when the result.\r\nspam\t+123 Congratulations - in this week's competition draw u have won the £1450 prize to claim just call 09050002311 b4280703. T&Cs/stop SMS 08718727868. Over 18 only 150ppm\r\nham\tBeautiful Truth against Gravity.. Read carefully: \"Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it..\" GOOD NIGHT\r\nham\tSorry im getting up now, feel really bad- totally rejected that kinda me thing.\r\nham\tYou do got a shitload of diamonds though\r\nham\tTessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas\r\nham\tWell I'm going to be an aunty!\r\nham\tMine here like all fr china then so noisy.\r\nham\tLater i guess. I needa do mcat study too.\r\nham\tS...from the training manual it show there is no tech process:)its all about password reset and troubleshooting:)\r\nspam\tYour B4U voucher w/c 27/03 is MARSMS. Log onto www.B4Utele.com for discount credit. To opt out reply stop. Customer care call 08717168528\r\nham\tSpoke with uncle john today. He strongly feels that you need to sacrifice to keep me here. He's going to call you. When he does, i beg you to just listen. Dont make any promises or make it clear things are not easy. And i need you to please let us work things out. As long as i keep expecting help, my creativity will be stifled so pls just keep him happy, no promises on your part.\r\nham\tIf he started searching he will get job in few days.he have great potential and talent.\r\nham\tCarlos took a while (again), we leave in a minute\r\nham\tWell done and ! luv ya all \r\nham\tThen why you came to hostel.\r\nham\tK still are you loving me.\r\nham\tBut i juz remembered i gotta bathe my dog today..\r\nham\tAfter the drug she will be able to eat.\r\nham\tAlright took the morphine. Back in yo.\r\nham\tYou see the requirements please\r\nham\tYou stayin out of trouble stranger!!saw Dave the other day hes sorted now!still with me bloke when u gona get a girl MR!ur mum still Thinks we will get 2GETHA! \r\nspam\tFreeMsg: Hey - I'm Buffy. 25 and love to satisfy men. Home alone feeling randy. Reply 2 C my PIX! QlynnBV Help08700621170150p a msg Send stop to stop txts\r\nspam\tSunshine Hols. To claim ur med holiday send a stamped self address envelope to Drinks on Us UK, PO Box 113, Bray, Wicklow, Eire. Quiz Starts Saturday! Unsub Stop\r\nham\tSo can collect ur laptop?\r\nham\tOk. Can be later showing around 8-8:30 if you want + cld have drink before. Wld prefer not to spend money on nosh if you don't mind, as doing that nxt wk.\r\nham\tI will once i get home\r\nham\tWaaaat?? Lololo ok next time then!\r\nham\tThe table's occupied, I'm waiting by the tree\r\nham\tI surely dont forgot to come:)i will always be in touch in with you:-)\r\nham\tHi kindly give us back our documents which we submitted for loan from STAPATI\r\nham\tI dont have i shall buy one dear\r\nham\tOh god i am happy to see your message after 3 days\r\nham\tWhat year. And how many miles.\r\nham\tHey cutie. How goes it? Here in WALES its kinda ok. There is like hills and shit but i still avent killed myself. \r\nham\tSad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish. As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there. She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife, My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!\r\nham\tI think you should go the honesty road. Call the bank tomorrow. Its the tough decisions that make us great people.\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 87077 Get txting and tell ur mates. zed POBox 36504 W45WQ norm150p/tone 16+\r\nham\tNo. Its not specialisation. Can work but its slave labor. Will look for it this month sha cos no shakara 4 beggar.\r\nham\tIs she replying. Has boye changed his phone number\r\nham\t1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)\r\nham\thi my darlin im on my way to London and we have just been smashed into by another driver! and have a big dent! im really missing u what have u been up to? xxx\r\nham\tNothing really, just making sure everybody's up to speed\r\nham\tI'm not coming home 4 dinner.\r\nham\tThank you. And by the way, I just lost.\r\nham\tYes.he have good crickiting mind\r\nham\tThx. All will be well in a few months\r\nspam\tShop till u Drop, IS IT YOU, either 10K, 5K, £500 Cash or £100 Travel voucher, Call now, 09064011000. NTT PO Box CR01327BT fixedline Cost 150ppm mobile vary\r\nham\t\"CAN I PLEASE COME UP NOW IMIN TOWN.DONTMATTER IF URGOIN OUTL8R,JUST REALLYNEED 2DOCD.PLEASE DONTPLEASE DONTIGNORE MYCALLS,U NO THECD ISV.IMPORTANT TOME 4 2MORO\"\r\nham\tI wont. So wat's wit the guys\r\nham\tYavnt tried yet and never played original either\r\nham\tHiya, had a good day? Have you spoken to since the weekend?\r\nham\tSee? I thought it all through\r\nham\tI'm at work. Please call\r\nham\tget ready to moan and scream :)\r\nham\tOh k :)why you got job then whats up?\r\nham\tI don,t think so. You don't need to be going out that late on a school night. ESPECIALLY when the one class you have is the one you missed last wednesday and probably failed a test in on friday\r\nham\tAnd popping <#> ibuprofens was no help.\r\nham\tBabe ! How goes that day ? What are you doing ? Where are you ? I sip my cappuccino and think of you, my love ... I send a kiss to you from across the sea\r\nham\tOk.\r\nham\tPS U no ur a grown up now right?\r\nham\tChinatown got porridge, claypot rice, yam cake, fishhead beehoon... Either we eat cheap den go cafe n tok or go nydc or somethin...\r\nham\tI know a few people I can hit up and fuck to the yes\r\nham\tPurity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng\r\nham\tSo is there anything specific I should be doing with regards to jaklin or what because idk what the fuck\r\nham\tOh god. I'm gonna Google nearby cliffs now.\r\nspam\tFREE camera phones with linerental from 4.49/month with 750 cross ntwk mins. 1/2 price txt bundle deals also avble. Call 08001950382 or call2optout/J MF\r\nham\tYup i shd haf ard 10 pages if i add figures... Ü all got how many pages?\r\nham\tOoh, 4got, i'm gonna start belly dancing in moseley weds 6.30 if u want 2 join me, they have a cafe too. \r\nham\tThankyou so much for the call. I appreciate your care.\r\nham\tCongrats ! Treat pending.i am not on mail for 2 days.will mail once thru.Respect mother at home.check mails.\r\nham\tI called but no one pick up e phone. I ask both of them already they said ok.\r\nham\tHi my email address has changed now it is \r\nham\tV-aluable. A-ffectionate. L-oveable. E-ternal. N-oble. T-ruthful. I-ntimate. N-atural. E-namous. Happy \"VALENTINES DAY\" in advance\r\nham\tNot much, just some textin'. How bout you?\r\nham\tBring it if you got it\r\nham\tI'm in a movie. Call me 4 wat?\r\nham\tNot sure I have the stomach for it ...\r\nham\tHaha... can... But i'm having dinner with my cousin...\r\nham\tA boy was late 2 home. His father: \"POWER OF FRNDSHIP\"\r\nham\t(And my man carlos is definitely coming by mu tonight, no excuses)\r\nham\tsoon you will have the real thing princess! Do i make you wet? :)\r\nham\tRaji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.\r\nham\tHaha, my legs and neck are killing me and my amigos are hoping to end the night with a burn, think I could swing by in like an hour?\r\nspam\tURGENT! Your mobile No 07xxxxxxxxx won a £2,000 bonus caller prize on 02/06/03! this is the 2nd attempt to reach YOU! call 09066362231 ASAP! BOX97N7QP, 150PPM\r\nham\tUsually the body takes care of it buy making sure it doesnt progress. Can we pls continue this talk on saturday.\r\nspam\tURGENT!! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tHmm well, night night \r\nham\tJust wanted to say holy shit you guys weren't kidding about this bud\r\nham\tJust gettin a bit arty with my collages at the mo, well tryin 2 ne way! Got a roast in a min lovely i shall enjoy that!\r\nham\tThis is one of the days you have a billion classes, right?\r\nham\tGoodmorning, today i am late for 2hrs. Because of back pain.\r\nham\tOk then i'll let him noe later n ask him call u tmr...\r\nham\tPrabha..i'm soryda..realy..frm heart i'm sory\r\nham\tOK i'm waliking ard now... Do u wan me 2 buy anything go ur house?\r\nham\t* Will have two more cartons off u and is very pleased with shelves\r\nham\tNice talking to you! please dont forget my pix :) i want to see all of you...\r\nspam\tYou have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712379 between 10am-7pm Cost 10p\r\nham\tBut really quite funny lor wat... Then u shd haf run shorter distance wat...\r\nham\tI notice you like looking in the shit mirror youre turning into a right freak\r\nham\tGreat. I was getting worried about you. Just know that a wonderful and caring person like you will have only the best in life. Know that u r wonderful and God's love is yours.\r\nspam\tThanks for your ringtone order, ref number K718. Your mobile will be charged £4.50. Should your tone not arrive please call customer services on 09065069120\r\nham\tI prefer my free days... Tues, wed, fri oso can... Ü ask those workin lor...\r\nham\tAlrite jod hows the revision goin? Keris bin doin a smidgin. N e way u wanna cum over after college?xx\r\nham\tIf you have belive me. Come to my home.\r\nham\tOh k.k..where did you take test?\r\nham\tThose were my exact intentions\r\nham\thaha but no money leh... Later got to go for tuition... Haha and looking for empty slots for driving lessons\r\nham\tHey... Thk we juz go accordin to wat we discussed yest lor, except no kb on sun... Cos there's nt much lesson to go if we attend kb on sat...\r\nham\tK, wen ur free come to my home and also tel vikky i hav sent mail to him also.. Better come evening il be free today aftr 6pm..:-)\r\nham\tNothing just getting msgs by dis name wit different no's..\r\nham\tGood Morning plz call me sir\r\nham\tWhat's your room number again? Wanna make sure I'm knocking on the right door\r\nham\t\"Si.como no?!listened2the plaid album-quite gd&the new air1 which is hilarious-also boughtbraindancea comp.ofstuff on aphexs ;abel,u hav2hear it!c u sn xxxx\"\r\nham\tPls tell nelson that the bb's are no longer comin. The money i was expecting aint coming\r\nham\tGive her something to drink, if she takes it and doesn't vomit then you her temp might drop. If she unmits however let me know.\r\nham\tThink you sent the text to the home phone. That cant display texts. If you still want to send it his number is\r\nham\tEvery day i use to sleep after <#> so only.\r\nham\tK I'll call you when I'm close\r\nham\tU buy newspapers already?\r\nham\tNope wif my sis lor... Aft bathing my dog then i can bathe... Looks like it's going 2 rain soon.\r\nham\tBoo I'm on my way to my moms. She's making tortilla soup. Yummmm\r\nham\tNo management puzzeles.\r\nham\tHow did you find out in a way that didn't include all of these details\r\nspam\tHi ya babe x u 4goten bout me?' scammers getting smart..Though this is a regular vodafone no, if you respond you get further prem rate msg/subscription. Other nos used also. Beware!\r\nspam\tBack 2 work 2morro half term over! Can U C me 2nite 4 some sexy passion B4 I have 2 go back? Chat NOW 09099726481 Luv DENA Calls £1/minMobsmoreLKPOBOX177HP51FL\r\nham\twill you like to be spoiled? :)\r\nspam\tThanks for your ringtone order, ref number R836. Your mobile will be charged £4.50. Should your tone not arrive please call customer services on 09065069154\r\nham\tI am getting threats from your sales executive Shifad as i raised complaint against him. Its an official message.\r\nham\thope things went well at 'doctors' ;) reminds me i still need 2go.did u c d little thing i left in the lounge?\r\nham\tDen wat will e schedule b lk on sun?\r\nham\tLol enjoy role playing much?\r\nham\tOk. Me watching tv too.\r\nham\tI just lov this line: \"Hurt me with the truth, I don't mind,i wil tolerat.bcs ur my someone..... But, Never comfort me with a lie\" gud ni8 and sweet dreams\r\nham\tJust checked out, heading out to drop off my stuff now\r\nham\tHere got lots of hair dresser fr china.\r\nham\tSad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.\r\nham\tIll call you evening ill some ideas.\r\nspam\tSplashMobile: Choose from 1000s of gr8 tones each wk! This is a subscrition service with weekly tones costing 300p. U have one credit - kick back and ENJOY\r\nham\tDid you show him and wot did he say or could u not c him 4 dust?\r\nham\tIt should take about <#> min\r\nspam\tNot heard from U4 a while. Call 4 rude chat private line 01223585334 to cum. Wan 2C pics of me gettin shagged then text PIX to 8552. 2End send STOP 8552 SAM xxx\r\nham\tOk . . now i am in bus. . If i come soon i will come otherwise tomorrow\r\nham\tI cant pick the phone right now. Pls send a message\r\nspam\t<Forwarded from 88877>FREE entry into our £250 weekly comp just send the word ENTER to 88877 NOW. 18 T&C www.textcomp.com\r\nham\tFinish liao... U?\r\nspam\t88066 FROM 88066 LOST 3POUND HELP\r\nham\tHaha i think i did too\r\nham\tU know we watchin at lido?\r\nham\tLife spend with someone for a lifetime may be meaningless but a few moments spent with someone who really love you means more than life itself..\r\nham\tHaha awesome, I've been to 4u a couple times. Who all's coming?\r\nham\tCold. Dont be sad dear\r\nham\tThink I could stop by in like an hour or so? My roommate's looking to stock up for a trip\r\nham\tIs that on the telly? No its Brdget Jones!\r\nham\tLove you aathi..love u lot..\r\nham\tHello! How r u? Im bored. Inever thought id get bored with the tv but I am. Tell me something exciting has happened there? Anything! =/\r\nham\tHmm...Bad news...Hype park plaza $700 studio taken...Only left 2 bedrm-$900...\r\nham\tSorry, I'll call later in meeting\r\nham\tR ü comin back for dinner?\r\nham\tI hav almost reached. Call, i m unable to connect u.\r\nham\tWhom you waited for yesterday\r\nham\tI reach home safe n sound liao...\r\nham\tVelly good, yes please!\r\nham\tHi, wkend ok but journey terrible. Wk not good as have huge back log of marking to do\r\nham\tI have had two more letters from . I will copy them for you cos one has a message for you. Speak soon\r\nham\tAlex knows a guy who sells mids but he's down in south tampa and I don't think I could set it up before like 8\r\nham\tDont you have message offer\r\nspam\tHad your mobile 11mths ? Update for FREE to Oranges latest colour camera mobiles & unlimited weekend calls. Call Mobile Upd8 on freefone 08000839402 or 2StopTx\r\nham\tHEY THERE BABE, HOW U DOIN? WOT U UP 2 2NITE LOVE ANNIE X.\r\nham\tRemind me how to get there and I shall do so\r\nham\t:-( that's not v romantic!\r\nham\tHello. Damn this christmas thing. I think i have decided to keep this mp3 that doesnt work.\r\nspam\tYou have 1 new message. Please call 08718738034.\r\nham\tHI DARLIN IM MISSIN U HOPE YOU ARE HAVING A GOOD TIME. WHEN ARE U BACK AND WHAT TIME IF U CAN GIVE ME A CALL AT HOME. JESS XX\r\nspam\t<Forwarded from 21870000>Hi - this is your Mailbox Messaging SMS alert. You have 4 messages. You have 21 matches. Please call back on 09056242159 to retrieve your messages and matches\r\nham\tDraw va?i dont think so:)\r\nham\tDont pick up d call when something important is There to tell. Hrishi\r\nspam\tCongrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out!\r\nham\tNothin comes to my mind. Ü help me buy hanger lor. Ur laptop not heavy?\r\nham\t<#> , that's all? Guess that's easy enough\r\nham\tWe can make a baby in yo tho\r\nham\tShould I tell my friend not to come round til like <#> ish?\r\nham\tFriendship poem: Dear O Dear U R Not Near But I Can Hear Dont Get Fear Live With Cheer No More Tear U R Always my Dear. Gud ni8\r\nham\tStill in the area of the restaurant. Ill try to come back soon\r\nham\tAight that'll work, thanks\r\nspam\tWIN a year supply of CDs 4 a store of ur choice worth £500 & enter our £100 Weekly draw txt MUSIC to 87066 Ts&Cs www.Ldew.com.subs16+1win150ppmx3\r\nspam\tMoby Pub Quiz.Win a £100 High Street prize if u know who the new Duchess of Cornwall will be? Txt her first name to 82277.unsub STOP £1.50 008704050406 SP Arrow\r\nham\tI have 2 sleeping bags, 1 blanket and paper and phone details. Anything else?\r\nspam\tYou have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+ \r\nspam\tCongratulations! Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08718726971 NOW! Only 10p per minute. BT-national-rate.\r\nspam\[email protected] (More games from TheDailyDraw) Dear Helen, Dozens of Free Games - with great prizesWith..\r\nham\tSo what do you guys do.\r\nham\tAlso that chat was awesome but don't make it regular unless you can see her in person\r\nham\tThat's significant but dont worry.\r\nham\tThat's cause your old. I live to be high.\r\nham\tWaqt se pehle or naseeb se zyada kisi ko kuch nahi milta,Zindgi wo nahi he jo hum sochte hai Zindgi wo hai jo ham jeetey hai..........\r\nham\tOn the way to office da..\r\nham\tIn which place do you want da.\r\nham\tThis pain couldn't have come at a worse time.\r\nham\tOk...\r\nham\tShould I be stalking u?\r\nham\tSorry dude. Dont know how i forgot. Even after Dan reminded me. Sorry. Hope you guys had fun.\r\nham\tOk lor.\r\nham\tApps class varaya elaya.\r\nham\tThe Xmas story is peace.. The Xmas msg is love.. The Xmas miracle is jesus.. Hav a blessed month ahead & wish U Merry Xmas...\r\nspam\tURGENT! Your mobile number *************** WON a £2000 Bonus Caller prize on 10/06/03! This is the 2nd attempt to reach you! Call 09066368753 ASAP! Box 97N7QP, 150ppm\r\nham\tThat day you asked about anand number. Why:-)\r\nham\tAm surfing online store. For offers do you want to buy any thing.\r\nham\tLong beach lor. Expected... U having dinner now?\r\nham\tAt home by the way\r\nham\tWe are both fine. Thanks\r\nham\tWhat happen to her tell the truth\r\nham\tDo you like Italian food?\r\nham\tWhich is weird because I know I had it at one point\r\nham\t\"Aww you must be nearly dead!Well Jez isComing over toDo some workAnd that whillTake forever!\"\r\nham\tTell your friends what you plan to do on Valentines day @ <URL>\r\nham\tAlright, see you in a bit\r\nham\tCheers for the message Zogtorius. Ive been staring at my phone for an age deciding whether to text or not.\r\nham\tI will take care of financial problem.i will help:)\r\nham\tTell dear what happen to you. Why you talking to me like an alian\r\nspam\tDouble your mins & txts on Orange or 1/2 price linerental - Motorola and SonyEricsson with B/Tooth FREE-Nokia FREE Call MobileUpd8 on 08000839402 or2optout/HV9D\r\nham\t1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)\r\nham\tOkie...\r\nham\tHi this is yijue, can i meet u at 11 tmr?\r\nham\tIts posible dnt live in <#> century cm frwd n thnk different\r\nham\tBut i dint slept in afternoon.\r\nham\tThat seems unnecessarily affectionate\r\nham\tYar else i'll thk of all sorts of funny things.\r\nham\tYou will be in the place of that man\r\nspam\tDownload as many ringtones as u like no restrictions, 1000s 2 choose. U can even send 2 yr buddys. Txt Sir to 80082 £3 \r\nham\tThats cool. How was your day?\r\nspam\tPlease CALL 08712402902 immediately as there is an urgent message waiting for you.\r\nham\tR we going with the <#> bus?\r\nham\tHello, my love ! How went your day ? Are you alright ? I think of you, my sweet and send a jolt to your heart to remind you ... I LOVE YOU! Can you hear it ? I screamed it across the sea for all the world to hear. Ahmad al Hallaq is loved ! and owned ! *possessive passionate kiss*\r\nham\tNo..he joined today itself.\r\nham\tOkay same with me. Well thanks for the clarification\r\nham\tI'll talk to the others and probably just come early tomorrow then\r\nspam\tSpook up your mob with a Halloween collection of a logo & pic message plus a free eerie tone, txt CARD SPOOK to 8007 zed 08701417012150p per logo/pic\r\nham\tHad the money issue weigh me down but thanks to you, I can breathe easier now. I.ll make sure you dont regret it. Thanks.\r\nham\tHi. I'm sorry i missed your call. Can you pls call back.\r\nham\tHow are you doing? Hope you've settled in for the new school year. Just wishin you a gr8 day\r\nspam\tFantasy Football is back on your TV. Go to Sky Gamestar on Sky Active and play £250k Dream Team. Scoring starts on Saturday, so register now!SKY OPT OUT to 88088\r\nham\tOk then no need to tell me anything i am going to sleep good night\r\nham\tOk try to do week end course in coimbatore.\r\nspam\tTone Club: Your subs has now expired 2 re-sub reply MONOC 4 monos or POLYC 4 polys 1 weekly @ 150p per week Txt STOP 2 stop This msg free Stream 0871212025016\r\nham\tV nice! Off 2 sheffield tom 2 air my opinions on categories 2 b used 2 measure ethnicity in next census. Busy transcribing. :-)\r\nham\tIf you r @ home then come down within 5 min\r\nham\tA Boy loved a gal. He propsd bt she didnt mind. He gv lv lttrs, Bt her frnds threw thm. Again d boy decided 2 aproach d gal , dt time a truck was speeding towards d gal. Wn it was about 2 hit d girl,d boy ran like hell n saved her. She asked 'hw cn u run so fast?' D boy replied \"Boost is d secret of my energy\" n instantly d girl shouted \"our energy\" n Thy lived happily 2gthr drinking boost evrydy Moral of d story:- I hv free msgs:D;): gud ni8\r\nham\tThat day ü say ü cut ur hair at paragon, is it called hair sense? Do ü noe how much is a hair cut? \r\nham\tHmm, too many of them unfortunately... Pics obviously arent hot cakes. Its kinda fun tho\r\nham\tWatching tv lor... Y she so funny we bluff her 4 wat. Izzit because she thk it's impossible between us?\r\nspam\tXMAS Prize draws! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09058094565 from land line. Valid 12hrs only\r\nham\tDunno lei he neva say...\r\nham\tThanx 4 2day! U r a goodmate I THINK UR RITE SARY! ASUSUAL!1 U CHEERED ME UP! LOVE U FRANYxxxxx\r\nham\tI'm on my way home. Went to change batt 4 my watch then go shop a bit lor.\r\nspam\tYES! The only place in town to meet exciting adult singles is now in the UK. Txt CHAT to 86688 now! 150p/Msg.\r\nham\tHi, Mobile no. <#> has added you in their contact list on www.fullonsms.com It s a great place to send free sms to people For more visit fullonsms.com\r\nham\tGood evening Sir, hope you are having a nice day. I wanted to bring it to your notice that I have been late in paying rent for the past few months and have had to pay a $ <#> charge. I felt it would be inconsiderate of me to nag about something you give at great cost to yourself and that's why i didnt speak up. I however am in a recession and wont be able to pay the charge this month hence my askin well ahead of month's end. Can you please help. Thank you for everything.\r\nham\tIf i let you do this, i want you in the house by 8am.\r\nham\tBest line said in Love: . \"I will wait till the day I can forget u Or The day u realize that u cannot forget me.\"... Gn\r\nham\tI will reach before ten morning\r\nham\tYour pussy is perfect!\r\nham\tSorry, I'll call later\r\nspam\tSomeone has contacted our dating service and entered your phone becausethey fancy you! To find out who it is call from a landline 09058098002. PoBox1, W14RG 150p\r\nham\tNo message..no responce..what happend?\r\nham\tAlso where's the piece\r\nham\twiskey Brandy Rum Gin Beer Vodka Scotch Shampain Wine \"KUDI\"yarasu dhina vaazhthukkal. ..\r\nham\tBoo. How's things? I'm back at home and a little bored already :-(\r\nham\tFirst has she gained more than <#> kg since she took in. Second has she done the blood sugar tests. If she has and its ok and her blood pressure is within normal limits then no worries\r\nham\tPICK UR FONE UP NOW U DUMB?\r\nham\tThanks da thangam, i feel very very happy dear. I also miss you da.\r\nham\tOkey doke. I'm at home, but not dressed cos laying around ill! Speak to you later bout times and stuff. \r\nham\tI don't run away frm u... I walk slowly & it kills me that u don't care enough to stop me...\r\nham\tBabe, I'm back ... Come back to me ...\r\nham\tWell you told others you'd marry them...\r\nham\tNeshanth..tel me who r u?\r\nham\tYO YO YO BYATCH WHASSUP?\r\nham\tOh... Kay... On sat right?\r\nham\tHi! This is Roger from CL. How are you?\r\nspam\tBabe: U want me dont u baby! Im nasty and have a thing 4 filthyguys. Fancy a rude time with a sexy bitch. How about we go slo n hard! Txt XXX SLO(4msgs)\r\nham\tOh oh... Wasted... Den muz chiong on sat n sun liao...\r\nham\tJesus christ bitch I'm trying to give you drugs answer your fucking phone\r\nham\tPlease give it 2 or i will pick it up on Tuesday evening about 8 if that is ok.\r\nham\tI'm meeting Darren...\r\nham\tOne of best dialogue in cute reltnship..!! \"Wen i Die, Dont Come Near My Body..!! Bcoz My Hands May Not Come 2 Wipe Ur Tears Off That Time..!Gud ni8\r\nham\tSolve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.\r\nham\tDear where you will be when i reach there\r\nham\tAww that's the first time u said u missed me without asking if I missed u first. You DO love me! :)\r\nham\tOk... Thanx... Gd nite 2 ü too...\r\nham\tCome to me right now, Ahmad\r\nspam\tI don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.\r\nham\tLol please do. Actually send a pic of yourself right now. I wanna see. Pose with a comb and hair dryer or something.\r\nham\tO was not into fps then.\r\nham\tHuh means computational science... Y they like dat one push here n there...\r\nham\tCould you not read me, my Love ? I answered you\r\nham\tOh... Lk tt den we take e one tt ends at cine lor... Dun wan yogasana oso can... \r\nham\tMadam,regret disturbance.might receive a reference check from DLF Premarica.kindly be informed.Rgds,Rakhesh,Kerala.\r\nspam\tSMS SERVICES For your inclusive text credits pls gotto www.comuk.net login 3qxj9 unsubscribe with STOP no extra charge help 08702840625 comuk.220cm2 9AE\r\nham\tOic... Then better quickly go bathe n settle down...\r\nham\tErr... Cud do. I'm going to at 8pm. I haven't got a way to contact him until then.\r\nham\tA bloo bloo bloo I'll miss the first bowl\r\nham\tLmao but its so fun...\r\nham\tOh k k:)but he is not a big hitter.anyway good\r\nham\tHey!!! I almost forgot ... Happy B-day babe ! I love ya!!\r\nspam\tValentines Day Special! Win over £1000 in our quiz and take your partner on the trip of a lifetime! Send GO to 83600 now. 150p/msg rcvd. CustCare:08718720201\r\nham\tDo you think i can move <#> in a week\r\nham\tShe.s find. I sent you an offline message to know how anjola's now.\r\nspam\tGuess who am I?This is the first time I created a web page WWW.ASJESUS.COM read all I wrote. I'm waiting for your opinions. I want to be your friend 1/1\r\nham\tHow was txting and driving\r\nham\tThat's good. Lets thank God. Please complete the drug. Have lots of water. And have a beautiful day.\r\nham\tReally dun bluff me leh... U sleep early too. Nite...\r\nham\tIndians r poor but India is not a poor country. Says one of the swiss bank directors. He says that \" <#> lac crore\" of Indian money is deposited in swiss banks which can be used for 'taxless' budget for <#> yrs. Can give <#> crore jobs to all Indians. From any village to Delhi 4 lane roads. Forever free power suply to more than <#> social projects. Every citizen can get monthly <#> /- for <#> yrs. No need of World Bank & IMF loan. Think how our money is blocked by rich politicians. We have full rights against corrupt politicians. Itna forward karo ki pura INDIA padhe.g.m.\"\r\nham\tUncle boye. I need movies oh. Guide me. Plus you know torrents are not particularly legal here. And the system is slowing down. What should i do. Have a gr8 day. Plus have you started cos i dont meet you online. How was the honey moon.\r\nham\tOh ya ya. I remember da. .\r\nham\tBtw regarding that we should really try to see if anyone else can be our 4th guy before we commit to a random dude\r\nspam\tFor ur chance to win £250 cash every wk TXT: PLAY to 83370. T's&C's www.music-trivia.net custcare 08715705022, 1x150p/wk.\r\nham\tI not busy juz dun wan 2 go so early.. Hee.. \r\nham\tRightio. 11.48 it is then. Well arent we all up bright and early this morning.\r\nham\tGreat. I'm in church now, will holla when i get out\r\nham\tBack in brum! Thanks for putting us up and keeping us all and happy. See you soon \r\nham\tI donno if they are scorable\r\nham\t<#> great loxahatchee xmas tree burning update: you can totally see stars here\r\nham\tYes but i dont care! I need you bad, princess!\r\nham\tThe guy (kadeem) hasn't been selling since the break, I know one other guy but he's paranoid as fuck and doesn't like selling without me there and I can't be up there til late tonight\r\nham\tSorry, I'll call later\r\nham\tTmr then ü brin lar... Aiya later i come n c lar... Mayb ü neva set properly ü got da help sheet wif ü...\r\nham\tDo u knw dis no. <#> ?\r\nham\tThen she dun believe wat?\r\nham\tK..give back my thanks.\r\nham\tI know complain num only..bettr directly go to bsnl offc nd apply for it..\r\nham\tOkay. I've seen it. So i should pick it on friday?\r\nham\tHow much she payed. Suganya.\r\nham\tLeft dessert. U wan me 2 go suntec look 4 u?\r\nham\tAbeg, make profit. But its a start. Are you using it to get sponsors for the next event?\r\nham\tOnum ela pa. Normal than.\r\nham\tK.k..how is your sister kids?\r\nham\tCool, I'll text you when I'm on the way\r\nham\tNope. Meanwhile she talk say make i greet you.\r\nham\ti cant talk to you now.i will call when i can.dont keep calling.\r\nham\tAnything lar...\r\nham\tRose needs water, season needs change, poet needs imagination..My phone needs ur sms and i need ur lovely frndship forever....\r\nham\tGood afternoon, babe. How goes that day ? Any job prospects yet ? I miss you, my love ... *sighs* ... :-(\r\nham\tThey will pick up and drop in car.so no problem..\r\nham\tS.i think he is waste for rr..\r\nham\tHe is world famamus....\r\nham\tIs there coming friday is leave for pongal?do you get any news from your work place.\r\nham\tLol well don't do it without me. We could have a big sale together.\r\nham\t* Am on my way\r\nham\tEat at old airport road... But now 630 oredi... Got a lot of pple...\r\nham\tsry can't talk on phone, with parents\r\nspam\tFinal Chance! Claim ur £150 worth of discount vouchers today! Text YES to 85023 now! SavaMob, member offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Subs 16\r\nham\tOk lor wat time ü finish?\r\nham\tPrincess, i like to make love <#> times per night. Hope thats not a problem!\r\nham\tMm i am on the way to railway\r\nham\ti dnt wnt to tlk wid u\r\nham\tI'm done. I'm sorry. I hope your next space gives you everything you want. Remember all the furniture is yours. If i'm not around when you move it, just lock all the locks and leave the key with jenne.\r\nham\tNot yet. Just i'd like to keep in touch and it will be the easiest way to do that from barcelona. By the way how ru and how is the house?\r\nspam\tSppok up ur mob with a Halloween collection of nokia logo&pic message plus a FREE eerie tone, txt CARD SPOOK to 8007\r\nspam\tUrgent! call 09066612661 from landline. Your complementary 4* Tenerife Holiday or £10,000 cash await collection SAE T&Cs PO Box 3 WA14 2PX 150ppm 18+ Sender: Hol Offer\r\nham\tK.:)do it at evening da:)urgent:)\r\nham\tPansy! You've been living in a jungle for two years! Its my driving you should be more worried about!\r\nham\tMm have some kanji dont eat anything heavy ok\r\nham\tOnly if you promise your getting out as SOON as you can. And you'll text me in the morning to let me know you made it in ok.\r\nham\tLol that's different. I don't go trying to find every real life photo you ever took.\r\nham\tI dont thnk its a wrong calling between us\r\nham\tK ill drink.pa then what doing. I need srs model pls send it to my mail id pa.\r\nham\tAiyah e rain like quite big leh. If drizzling i can at least run home.\r\nham\tI have 2 docs appointments next week.:/ I'm tired of them shoving stuff up me. Ugh why couldn't I have had a normal body?\r\nham\tDun b sad.. It's over.. Dun thk abt it already. Concentrate on ur other papers k.\r\nham\tGreetings me, ! Consider yourself excused.\r\nham\tNo drama Pls.i have had enough from you and family while i am struggling in the hot sun in a strange place.No reason why there should be an ego of not going 'IF NOT INVITED' when actually its necessity to go.wait for very serious reppurcussions.\r\nham\tthey released another Italian one today and it has a cosign option\r\nham\tYou at mu? You should try to figure out how much money everyone has for gas and alcohol, jay and I are trying to figure out our weed budget\r\nspam\tWINNER! As a valued network customer you hvae been selected to receive a £900 reward! To collect call 09061701444. Valid 24 hours only. ACL03530150PM\r\nham\tHCL chennai requires FRESHERS for voice process.Excellent english needed.Salary upto <#> .Call Ms.Suman <#> for Telephonic interview -via Indyarocks.com\r\nham\tDai what this da.. Can i send my resume to this id.\r\nham\tI know where the <#> is, I'll be there around 5\r\nham\tYup i've finished c ü there...\r\nham\tRemember to ask alex about his pizza\r\nham\tNo da..today also i forgot..\r\nham\tOla would get back to you maybe not today but I ve told him you can be his direct link in the US in getting cars he bids for online, you arrange shipping and you get a cut. Or U????? For a partnership where U????? Invest money for shipping and he takes care of the rest!U??Wud b self reliant soon dnt worry\r\nham\tFwiw the reason I'm only around when it's time to smoke is that because of gas I can only afford to be around when someone tells me to be and that apparently only happens when somebody wants to light up\r\nham\tHello, my boytoy! I made it home and my constant thought is of you, my love. I hope your having a nice visit but I can't wait till you come home to me ...*kiss*\r\nham\tCongrats kano..whr s the treat maga?\r\nham\tWho u talking about?\r\nham\tYup...\r\nham\tOk...\r\nham\tU wake up already? Wat u doing? U picking us up later rite? I'm taking sq825, reaching ard 7 smth 8 like dat. U can check e arrival time. C ya soon...\r\nham\tYunny i'm walking in citylink now ü faster come down... Me very hungry...\r\nham\tEr yep sure. Props?\r\nham\tHiya , have u been paying money into my account? If so, thanks. Got a pleasant surprise when i checked my balance -u c, i don't get statements 4 that acc\r\nspam\tU have won a nokia 6230 plus a free digital camera. This is what u get when u win our FREE auction. To take part send NOKIA to 83383 now. POBOX114/14TCR/W1 16\r\nham\tOk ill send you with in <DECIMAL> ok.\r\nham\tBognor it is! Should be splendid at this time of year.\r\nham\tYes.i'm in office da:)\r\nham\tSorry, I'll call later\r\nham\tJoy's father is John. Then John is the NAME of Joy's father. Mandan\r\nham\tOk. I only ask abt e movie. U wan ktv oso?\r\nham\tMisplaced your number and was sending texts to your old number. Wondering why i've not heard from you this year. All the best in your mcat. Got this number from my atlanta friends\r\nham\tSorry, I'll call later\r\nham\tDunno lei... I might b eatin wif my frens... If ü wan to eat then i wait 4 ü lar\r\nham\tSorry, I'll call later\r\nspam\tFREE entry into our £250 weekly comp just send the word WIN to 80086 NOW. 18 T&C www.txttowin.co.uk\r\nham\tSay this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...\r\nham\tDo u noe how 2 send files between 2 computers?\r\nham\tMmmmm ... I loved waking to your words this morning ! I miss you too, my Love. I hope your day goes well and you are happy. I wait for us to be together again\r\nham\tjay says he'll put in <#>\r\nham\tCan you just come in for a sec? There's somebody here I want you to see\r\nham\tSo the sun is anti sleep medicine.\r\nham\tWhat's happening with you. Have you gotten a job and have you begun registration for permanent residency\r\nham\tYup ok...\r\nham\tGlad it went well :) come over at 11 then we'll have plenty of time before claire goes to work.\r\nham\tOk enjoy . R u there in home.\r\nham\tCan you pls pls send me a mail on all you know about relatives coming to deliver here? All you know about costs, risks, benefits and anything else. Thanks.\r\nham\tYou do what all you like\r\nham\tThat's y we haf to combine n c how lor...\r\nham\tThe monthly amount is not that terrible and you will not pay anything till 6months after finishing school.\r\nham\tHmmm:)how many players selected?\r\nham\tThey said if its gonna snow, it will start around 8 or 9 pm tonite! They are predicting an inch of accumulation.\r\nham\tI dont. Can you send it to me. Plus how's mode.\r\nham\tAiyo please ü got time meh.\r\nham\tPackage all your programs well\r\nham\tShe is our sister.. She belongs 2 our family.. She is d hope of tomorrow.. Pray 4 her,who was fated 4 d Shoranur train incident. Lets hold our hands together & fuelled by love & concern prior 2 her grief & pain. Pls join in dis chain & pass it. STOP VIOLENCE AGAINST WOMEN.\r\nham\tSo are you guys asking that i get that slippers again or its gone with last year\r\nham\tCompany is very good.environment is terrific and food is really nice:)\r\nspam\tText82228>> Get more ringtones, logos and games from www.txt82228.com. Questions: [email protected]\r\nham\tHonestly i've just made a lovely cup of tea and promptly dropped my keys in it and then burnt my fingers getting them out!\r\nham\tYup but not studying surfing lor. I'm in e lazy mode today.\r\nham\tPlease sen :)my kind advice :-)please come here and try:-)\r\nham\tI'm done. C ü there.\r\nham\tOh fine, I'll be by tonight\r\nham\tÜ give me some time to walk there.\r\nham\tI'll reach in ard 20 mins ok...\r\nspam\tFreeMSG You have been awarded a FREE mini DIGITAL CAMERA, just reply SNAP to collect your prize! (quizclub Opt out? Stop 80122300p/wk SP:RWM Ph:08704050406)\r\nham\tFuck babe ... What happened to you ? How come you never came back?\r\nspam\tThis message is brought to you by GMW Ltd. and is not connected to the\r\nham\tSome friends want me to drive em someplace, probably take a while\r\nham\tI also thk too fast... Xy suggest one not me. U dun wan it's ok. Going 2 rain leh where got gd.\r\nham\tAre you still getting the goods.\r\nham\tAnd maybe some pressies\r\nham\tYeah I am, so I'll leave maybe 7ish?\r\nham\tK..k..i'm also fine:)when will you complete the course?\r\nham\tUnder the sea, there lays a rock. In the rock, there is an envelope. In the envelope, there is a paper. On the paper, there are 3 words... '\r\nham\tI told her I had a Dr appt next week. She thinks I'm gonna die. I told her its just a check. Nothing to be worried about. But she didn't listen.\r\nham\tYou in your room? I need a few\r\nham\tI dont want to hear anything\r\nham\tHey. For me there is no leave on friday. Wait i will ask my superior and tell you..\r\nham\tUltimately tor motive tui achieve korli.\r\nham\tFrom 5 to 2 only my work timing.\r\nham\t… and don‘t worry we‘ll have finished by march … ish!\r\nham\tThe house is on the water with a dock, a boat rolled up with a newscaster who dabbles in jazz flute behind the wheel\r\nspam\tCongrats 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid ur mates, play java games, Dload polypH music, noline rentl. bx420. ip4. 5we. 150p\r\nspam\tYour next amazing xxx PICSFREE1 video will be sent to you enjoy! If one vid is not enough for 2day text back the keyword PICSFREE1 to get the next video.\r\nham\tNow thats going to ruin your thesis!\r\nham\tIn sch but neva mind u eat 1st lor..\r\nham\tHey whats up? U sleeping all morning?\r\nham\tErm. I thought the contract ran out the4th of october.\r\nham\tI dunno until when... Lets go learn pilates...\r\nspam\tU are subscribed to the best Mobile Content Service in the UK for £3 per ten days until you send STOP to 83435. Helpline 08706091795.\r\nham\tYup i'm elaborating on the safety aspects and some other issues..\r\nspam\t3 FREE TAROT TEXTS! Find out about your love life now! TRY 3 FOR FREE! Text CHANCE to 85555 16 only! After 3 Free, Msgs £1.50 each\r\nham\tGoodmorning, today i am late for 1hr.\r\nham\tHi happy birthday. Hi hi hi hi hi hi hi\r\nham\tI will be outside office take all from there\r\nham\tIf you don't respond imma assume you're still asleep and imma start calling n shit\r\nham\tAight, see you in a bit\r\nham\tMy superior telling that friday is leave for all other department except ours:)so it will be leave for you:)any way call waheed fathima hr and conform it:)\r\nspam\tJoin the UK's horniest Dogging service and u can have sex 2nite!. Just sign up and follow the instructions. Txt ENTRY to 69888 now! Nyt.EC2A.3LP.msg@150p\r\nham\tLol I have to take it. member how I said my aunt flow didn't visit for 6 months? It's cause I developed ovarian cysts. Bc is the only way to shrink them.\r\nham\tStill work going on:)it is very small house.\r\nham\tMy friend just got here and says he's upping his order by a few grams (he's got $ <#> ), when can you get here?\r\nham\tTmr timin still da same wat cos i got lesson until 6...\r\nham\tThat‘s the thing with apes, u can fight to the death to keep something, but the minute they have it when u let go, thats it!\r\nspam\tSunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country Liverpool played in mid week? Txt ansr to 82277. £1.50 SP:Tyrone\r\nham\tNo i'm not gonna be able to. || too late notice. || i'll be home in a few weeks anyway. || what are the plans\r\nham\tGot fujitsu, ibm, hp, toshiba... Got a lot of model how to say...\r\nham\tOkie... Thanx...\r\nham\tGosh that , what a pain. Spose I better come then.\r\nham\tAs usual..iam fine, happy & doing well..:)\r\nham\tOkie\r\nham\tSo when you gonna get rimac access \r\nham\t\"Im at arestaurant eating squid! i will be out about 10:30 wanna dosomething or is that to late?\"\r\nham\tYou call times job today ok umma and ask them to speed up\r\nham\t\"HELLO U.CALL WEN U FINISH WRK.I FANCY MEETIN UP WIV U ALL TONITE AS I NEED A BREAK FROM DABOOKS. DID 4 HRS LAST NITE+2 TODAY OF WRK!\"\r\nham\tR U &SAM P IN EACHOTHER. IF WE MEET WE CAN GO 2 MY HOUSE\r\nham\t:-) yeah! Lol. Luckily i didn't have a starring role like you!\r\nham\tHello madam how are you ?\r\nham\tAwesome, text me when you're restocked\r\nham\tAs usual..iam fine, happy & doing well..:)\r\nspam\tKnock Knock Txt whose there to 80082 to enter r weekly draw 4 a £250 gift voucher 4 a store of yr choice. T&Cs www.tkls.com age16 to stoptxtstop£1.50/week\r\nham\tYes. It's all innocent fun. O:-)\r\nham\tThanks for sending this mental ability question..\r\nham\tSir, hope your day is going smoothly. i really hoped i wont have to bother you about this. I have some bills that i can't settle this month. I am out of all extra cash. I know this is a challenging time for you also but i have to let you know.\r\nham\t2marrow only. Wed at <#> to 2 aha.\r\nham\tI went to ur hon lab but no one is there.\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tHey pple...$700 or $900 for 5 nights...Excellent location wif breakfast hamper!!!\r\nspam\t<Forwarded from 21870000>Hi - this is your Mailbox Messaging SMS alert. You have 40 matches. Please call back on 09056242159 to retrieve your messages and matches cc100p/min\r\nham\tHow come?\r\nham\tLol! Nah wasn't too bad thanks. Its good to b home but its been quite a reality check. Hows ur day been? Did u do anything with website?\r\nham\tOk lor...\r\nham\tI'm coming home 4 dinner.\r\nham\tS da..al r above <#>\r\nspam\tFREE RING TONE just text \"POLYS\" to 87131. Then every week get a new tone. 0870737910216yrs only £1.50/wk.\r\nham\tUnni thank you dear for the recharge..Rakhesh\r\nham\tI know I'm lacking on most of this particular dramastorm's details but for the most part I'm not worried about that\r\nham\tHaha... They cant what... At the most tmr forfeit... haha so how?\r\nham\tHey there! Glad u r better now. I hear u treated urself to a digi cam, is it good? We r off at 9pm. Have a fab new year, c u in coupla wks!\r\nham\tNo way I'm going back there!\r\nspam\tURGENT! Your mobile No 077xxx WON a £2,000 Bonus Caller Prize on 02/06/03! This is the 2nd attempt to reach YOU! Call 09066362206 ASAP! BOX97N7QP, 150ppm\r\nham\tI WILL CAL YOU SIR. In meeting\r\nham\tThat's what I love to hear :V see you sundayish, then\r\nham\tSorry da thangam, very very sorry i am held up with prasad.\r\nham\tTiwary to rcb.battle between bang and kochi.\r\nham\tThank god they are in bed!\r\nham\tNo I don't have cancer. Moms making a big deal out of a regular checkup aka pap smear\r\nham\tAm in gobi arts college\r\nham\tWhy she wants to talk to me\r\nham\tPandy joined 4w technologies today.he got job..\r\nspam\tYou are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18\r\nham\tThey can try! They can get lost, in fact. Tee hee\r\nham\tHi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.\r\nham\tMy friends use to call the same.\r\nham\tSorry, I'll call later\r\nham\tEm, its olowoyey@ usc.edu have a great time in argentina. Not sad about secretary, everything is a blessing\r\nham\tIt,,s a taxt massage....tie-pos argh ok! Lool!\r\nham\tHi, can i please get a <#> dollar loan from you. I.ll pay you back by mid february. Pls.\r\nham\tYou might want to pull out more just in case and just plan on not spending it if you can, I don't have much confidence in derek and taylor's money management\r\nham\tDo you like shaking your booty on the dance floor?\r\nham\tText me when you get off, don't call, my phones having problems\r\nham\tNo need for the drug anymore.\r\nham\tSorry da:)i was thought of calling you lot of times:)lil busy.i will call you at noon..\r\nham\tIts sarcasm.. .nt scarcasim\r\nham\tGreat! I have to run now so ttyl!\r\nham\tFeel like trying kadeem again? :V\r\nham\tDai <#> naal eruku.\r\nham\tNot yet chikku..wat abt u?\r\nham\tOk...\r\nham\tWant to finally have lunch today?\r\nham\tDo you know when dad will be back?\r\nspam\tHello darling how are you today? I would love to have a chat, why dont you tell me what you look like and what you are in to sexy?\r\nspam\t8007 FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W4 5WQ norm 150p/tone 16+\r\nham\tHe remains a bro amongst bros\r\nham\tR u meeting da ge at nite tmr?\r\nham\t* Was a nice day and, impressively, i was sensible, went home early and now feel fine. Or am i just boring?! When's yours, i can't remember.\r\nham\tWhy de. You looking good only:-)..\r\nspam\tWanna get laid 2nite? Want real Dogging locations sent direct to ur mobile? Join the UK's largest Dogging Network. Txt PARK to 69696 now! Nyt. ec2a. 3lp £1.50/msg\r\nspam\twe tried to contact you re your response to our offer of a new nokia fone and camcorder hit reply or call 08000930705 for delivery\r\nham\tYes. They replied my mail. I'm going to the management office later. Plus will in to bank later also.or on wednesday.\r\nham\tThat's cool, I'll come by like <#> ish\r\nham\tSuper msg da:)nalla timing.\r\nham\tGood afternoon, my boytoy ... How are you feeling today ? Better I hope? Are you being my good boy? Are you my obedient, slave? Do you please your Queen?\r\nham\tI am 6 ft. We will be a good combination!\r\nham\tI'm sick !! I'm needy !! I want you !! *pouts* *stomps feet* Where are you ?! *pouts* *stomps feet* I want my slave !! I want him now !!\r\nham\t* Am on a train back from northampton so i'm afraid not!\r\nham\tWhere in abj are you serving. Are you staying with dad or alone.\r\nham\tWas playng 9 doors game and gt racing on phone lol\r\nspam\tNew Tones This week include: 1)McFly-All Ab.., 2) Sara Jorge-Shock.. 3) Will Smith-Switch.. To order follow instructions on next message\r\nham\tSolve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.\r\nham\tI'm on da bus going home...\r\nham\tI got a call from a landline number. . . I am asked to come to anna nagar . . . I will go in the afternoon\r\nham\tI'm okay. Chasing the dream. What's good. What are you doing next.\r\nham\tYupz... I've oredi booked slots 4 my weekends liao... \r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050003091 from land line. Claim C52. Valid 12hrs only\r\nham\tThere r many model..sony ericson also der.. <#> ..it luks good bt i forgot modl no\r\nham\tOkie\r\nham\tYes I know the cheesy songs from frosty the snowman :)\r\nham\tYa ok, vikky vl c witin <#> mins and il reply u..\r\nspam\tsports fans - get the latest sports news str* 2 ur mobile 1 wk FREE PLUS a FREE TONE Txt SPORT ON to 8007 www.getzed.co.uk 0870141701216+ norm 4txt/120p \r\nham\tHey tmr meet at bugis 930 ?\r\nspam\tUrgent Urgent! We have 800 FREE flights to Europe to give away, call B4 10th Sept & take a friend 4 FREE. Call now to claim on 09050000555. BA128NNFWFLY150ppm\r\nham\tAll these nice new shirts and the only thing I can wear them to is nudist themed ;_; you in mu?\r\nham\tHey sexy buns! What of that day? No word from you this morning on YM ... :-( ... I think of you\r\nham\tAnd whenever you and i see we can still hook up too.\r\nham\tNope but i'm going home now then go pump petrol lor... Like going 2 rain soon...\r\nham\tCan you use foreign stamps for whatever you send them off for? \r\nspam\tFROM 88066 LOST £12 HELP\r\nham\tOh baby of the house. How come you dont have any new pictures on facebook\r\nham\tFeb <#> is \"I LOVE U\" day. Send dis to all ur \"VALUED FRNDS\" evn me. If 3 comes back u'll gt married d person u luv! If u ignore dis u will lose ur luv 4 Evr\r\nham\tHiya, sorry didn't hav signal. I haven't seen or heard from and neither has, which is unusual in itself! I'll put on the case and get him to sort it out! Hugs and snogs.\r\nham\tOmw back to tampa from west palm, you hear what happened?\r\nham\tYup no more already... Thanx 4 printing n handing it up.\r\nspam\tFreeMsg: Fancy a flirt? Reply DATE now & join the UKs fastest growing mobile dating service. Msgs rcvd just 25p to optout txt stop to 83021. Reply DATE now!\r\nham\tWhat i mean is do they come chase you out when its over or is it stated you can watch as many movies as you want.\r\nham\tS now only i took tablets . Reaction morning only.\r\nspam\tGreat NEW Offer - DOUBLE Mins & DOUBLE Txt on best Orange tariffs AND get latest camera phones 4 FREE! Call MobileUpd8 free on 08000839402 NOW! or 2stoptxt T&Cs\r\nham\tNah, I'm a perpetual DD\r\nham\tSorry de i went to shop.\r\nspam\tHope you enjoyed your new content. text stop to 61610 to unsubscribe. help:08712400602450p Provided by tones2you.co.uk\r\nham\tWen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.\r\nham\tHey you still want to go for yogasana? Coz if we end at cine then can go bathe and hav the steam bath\r\nham\tNope i'm not drivin... I neva develop da photos lei...\r\nham\tI am thinking of going down to reg for pract lessons.. Flung my advance.. Haha wat time u going?\r\nham\tCool. I am <#> inches long. hope you like them big!\r\nham\tHouse-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?\r\nham\tOkie.. Thanx..\r\nspam\t18 days to Euro2004 kickoff! U will be kept informed of all the latest news and results daily. Unsubscribe send GET EURO STOP to 83222.\r\nham\tGo where n buy? Juz buy when we get there lar.\r\nham\tOk lor...\r\nham\tI'm working technical support :)voice process.\r\nham\tIt's justbeen overa week since we broke up and already our brains are going to mush!\r\nham\tTunde, how are you doing. This is just wishing you a great day. Abiola.\r\nham\tNope... C ü then...\r\nham\tNo. But we'll do medical missions to nigeria\r\nham\tNo i am not having not any movies in my laptop\r\nham\tWhatsup there. Dont u want to sleep\r\nspam\tUrgent Please call 09066612661 from landline. £5000 cash or a luxury 4* Canary Islands Holiday await collection. T&Cs SAE award. 20M12AQ. 150ppm. 16+ “\r\nspam\tUrgent! Please call 09066612661 from your landline, your complimentary 4* Lux Costa Del Sol holiday or £1000 CASH await collection. ppm 150 SAE T&Cs James 28, EH74RR\r\nham\tI havent lei.. Next mon can?\r\nham\tMm feeling sleepy. today itself i shall get that dear\r\nham\tHow dare you stupid. I wont tell anything to you. Hear after i wont talk to you:-.\r\nham\tDo ü noe if ben is going?\r\nham\tCan you do a mag meeting this avo at some point?\r\nham\tI meant middle left or right?\r\nham\tReally? I crashed out cuddled on my sofa.\r\nham\tHi Chachi tried calling u now unable to reach u .. Pl give me a missed cal once u c tiz msg Kanagu\r\nham\tI sent you the prices and do you mean the <#> g,\r\nham\tAre you this much buzy\r\nham\tNothing. Can...\r\nspam\tI don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.\r\nham\tNo * am working on the ringing u thing but have whole houseful of screaming brats so * am pulling my hair out! Loving u\r\nham\tBut my family not responding for anything. Now am in room not went to home for diwali but no one called me and why not coming. It makes me feel like died.\r\nham\tTick, tick, tick ... Babe\r\nham\tR ü going 4 today's meeting?\r\nham\tK da:)how many page you want?\r\nham\tYa had just now.onion roast.\r\nham\tSend his number and give reply tomorrow morning for why you said that to him like that ok\r\nham\tYou said not now. No problem. When you can. Let me know.\r\nham\tOk but tell me half an hr b4 u come i need 2 prepare.\r\nham\tPlay w computer? Aiyah i tok 2 u lor?\r\nham\tSat right? Okay thanks... \r\nham\tDerp. Which is worse, a dude who always wants to party or a dude who files a complaint about the three drug abusers he lives with\r\nham\tOk Chinese food on its way. When I get fat you're paying for my lipo.\r\nham\tWe r outside already.\r\nham\tHave a good trip. Watch out for . Remember when you get back we must decide about easter.\r\nham\tYo we are watching a movie on netflix\r\nham\tWhat time. I‘m out until prob 3 or so\r\nham\tCan meh? Thgt some will clash... Really ah, i dun mind... I dun seen to have lost any weight... Gee...\r\nham\tI dont thnk its a wrong calling between us\r\nham\tI am not sure about night menu. . . I know only about noon menu\r\nham\tARR birthday today:) i wish him to get more oscar.\r\nham\tSay this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...\r\nham\tOpen rebtel with firefox. When it loads just put plus sign in the user name place, and it will show you two numbers. The lower number is my number. Once you pick that number the pin will display okay!\r\nham\t and picking them up from various points\r\nspam\tMarried local women looking for discreet action now! 5 real matches instantly to your phone. Text MATCH to 69969 Msg cost 150p 2 stop txt stop BCMSFWC1N3XX\r\nham\tWow v v impressed. Have funs shopping!\r\nham\tI am on the way to ur home\r\nspam\tBurger King - Wanna play footy at a top stadium? Get 2 Burger King before 1st Sept and go Large or Super with Coca-Cola and walk out a winner\r\nham\tNo problem. Talk to you later\r\nham\tThen ur sis how?\r\nham\tStill in customer place\r\nspam\tHow come it takes so little time for a child who is afraid of the dark to become a teenager who wants to stay out all night?\r\nham\tDude u knw also telugu..thts gud..k, gud nyt..\r\nham\tWe confirm eating at esplanade?\r\nham\tSend me your id and password\r\nham\tKind of. Took it to garage. Centre part of exhaust needs replacing. Part ordered n taking it to be fixed tomo morning.\r\nspam\tFor ur chance to win a £250 cash every wk TXT: ACTION to 80608. T's&C's www.movietrivia.tv custcare 08712405022, 1x150p/wk.\r\nham\tWell I might not come then...\r\nham\tLong after I quit. I get on only like 5 minutes a day as it is.\r\nham\tThen its most likely called Mittelschmertz. Google it. If you dont have paracetamol dont worry it will go.\r\nham\tWell at this right I'm gonna have to get up and check today's steam sales/pee so text me when you want me to come get you\r\nham\tJust arrived, see you in a couple days <3\r\nham\tK, wat s tht incident?\r\nham\tYeah get the unlimited\r\nham\tcThen i thk shd b enuff.. Still got conclusion n contents pg n references.. I'll b doing da contents pg n cover pg..\r\nham\tForgot it takes me 3 years to shower, sorry. Where you at/your phone dead yet?\r\nham\tÜ got wat to buy tell us then ü no need to come in again.\r\nham\tWhen you are big..| God will bring success.\r\nspam\tU’ve Bin Awarded £50 to Play 4 Instant Cash. Call 08715203028 To Claim. EVERY 9th Player Wins Min £50-£500. OptOut 08718727870\r\nham\t… we r stayin here an extra week, back next wed. How did we do in the rugby this weekend? Hi to and and , c u soon \"\r\nham\tWell there's still a bit left if you guys want to tonight\r\nham\tNot from this campus. Are you in the library?\r\nham\tThe affidavit says <#> E Twiggs St, division g, courtroom <#> , <TIME> AM. I'll double check and text you again tomorrow\r\nham\tHow will I creep on you now? ;_;\r\nham\tTell your friends what you plan to do on Valentines day @ <URL>\r\nham\tIf I get there before you after your ten billion calls and texts so help me god\r\nham\tPurity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng musthu\r\nham\tI've told him that i've returned it. That should i re order it.\r\nham\tHouse-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..\r\nham\tDepends on where u going lor.\r\nham\tAnd smile for me right now as you go and the world will wonder what you are smiling about and think your crazy and keep away from you ... *grins*\r\nspam\tFreeMsg>FAV XMAS TONES!Reply REAL\r\nham\tLil fever:) now fine:)\r\nham\tI think it's all still in my car\r\nham\tCan a not?\r\nspam\tDecember only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906 \r\nham\tYes princess! I want to catch you with my big strong hands...\r\nham\tOh yeah I forgot. U can only take 2 out shopping at once.\r\nham\tMm so you asked me not to call radio\r\nham\tThinkin about someone is all good. No drugs for that\r\nham\tSay this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...\r\nham\tEnjoy the showers of possessiveness poured on u by ur loved ones, bcoz in this world of lies, it is a golden gift to be loved truly..\r\nham\tAlright if you're sure, let me know when you're leaving\r\nham\tSome are lasting as much as 2 hours. You might get lucky.\r\nham\tGenius what's up. How your brother. Pls send his number to my skype.\r\nspam\tGr8 Poly tones 4 ALL mobs direct 2u rply with POLY TITLE to 8007 eg POLY BREATHE1 Titles: CRAZYIN, SLEEPINGWITH, FINEST, YMCA :getzed.co.uk POBox365O4W45WQ 300p\r\nham\tThk some of em find wtc too far... Weiyi not goin... E rest i dunno yet... R ur goin 4 dinner den i might b able to join...\r\nham\tDon't forget who owns you and who's private property you are ... And be my good boy always .. *passionate kiss*\r\nspam\tINTERFLORA - It's not too late to order Interflora flowers for christmas call 0800 505060 to place your order before Midnight tomorrow.\r\nham\tOh god..taken the teeth?is it paining\r\nspam\tROMCAPspam Everyone around should be responding well to your presence since you are so warm and outgoing. You are bringing in a real breath of sunshine.\r\nham\tThen u ask darren go n pick u lor... But i oso sian tmr haf 2 meet lect...\r\nham\tNo need to buy lunch for me.. I eat maggi mee..\r\nspam\tCongratulations - Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08712103738 NOW! Only 10p per minute. BT-national-rate\r\nham\tOk lor...\r\nham\tOh right, ok. I'll make sure that i do loads of work during the day! got a really nasty cough today and is dry n shot so that should really help it!\r\nham\tWife.how she knew the time of murder exactly\r\nspam\tSend a logo 2 ur lover - 2 names joined by a heart. Txt LOVE NAME1 NAME2 MOBNO eg LOVE ADAM EVE 07123456789 to 87077 Yahoo! POBox36504W45WQ TxtNO 4 no ads 150p.\r\nham\tHowz that persons story\r\nham\tThanx 4 sending me home...\r\nham\tIts normally hot mail. Com you see!\r\nspam\tYou've won tkts to the EURO2004 CUP FINAL or £800 CASH, to collect CALL 09058099801 b4190604, POBOX 7876150ppm\r\nham\tU sick still can go shopping?\r\nham\tYa they are well and fine., BBD(pooja) full pimples..even she become quite black..and ur rite here its too cold, wearing sweatter..\r\nham\tNice.nice.how is it working?\r\nham\t1's reach home call me.\r\nham\tWere trying to find a Chinese food place around here\r\nham\tEasy mate, * guess the quick drink was bit ambitious.\r\nham\tBABE !!! I miiiiiiissssssssss you ! I need you !!! I crave you !!! :-( ... Geeee ... I'm so sad without you babe ... I love you ...\r\nham\tOk thanx...\r\nham\taathi..where are you dear..\r\nham\tTunji, how's the queen? how are you doing. This is just wishing you a great day. Abiola.\r\nham\tToday iZ Yellow rose day. If u love my frndship give me 1 misscall & send this to ur frndZ & See how many miss calls u get. If u get 6missed U marry ur Lover.\r\nham\tWill be out of class in a few hours. Sorry\r\nham\tWat time u finish ur lect today?\r\nspam\tFree-message: Jamster!Get the crazy frog sound now! For poly text MAD1, for real text MAD2 to 88888. 6 crazy sounds for just 3 GBP/week! 16+only! T&C's apply\r\nham\tSad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish. As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there. She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife, My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!\r\nspam\tYOUR CHANCE TO BE ON A REALITY FANTASY SHOW call now = 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national = rate call\r\nham\tShe's fine. Good to hear from you. How are you my dear? Happy new year oh.\r\nham\tAre you going to wipro interview today?\r\nham\thow tall are you princess?\r\nham\tI doubt you could handle 5 times per night in any case...\r\nham\tHaha... Hope ü can hear the receipt sound... Gd luck!\r\nham\tYour gonna be the death if me. I'm gonna leave a note that says its all robs fault. Avenge me.\r\nham\tJapanese Proverb: If one Can do it, U too Can do it, If none Can do it,U must do it Indian version: If one Can do it, LET HIM DO it.. If none Can do it,LEAVE it!! And finally Kerala version: If one can do it, Stop him doing it.. If none can do it, Make a strike against it ...\r\nham\tToday i'm not workin but not free oso... Gee... Thgt u workin at ur fren's shop ? \r\nham\tIn life when you face choices Just toss a coin not becoz its settle the question But while the coin in the air U will know what your heart is hoping for. Gudni8\r\nham\tDo you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!\r\nham\tI want to be there so i can kiss you and feel you next to me\r\nham\tI am not at all happy with what you saying or doing\r\nspam\tAdult 18 Content Your video will be with you shortly\r\nham\tOk that would b lovely, if u r sure. Think about wot u want to do, drinkin, dancin, eatin, cinema, in, out, about... Up to u! Wot about ? \r\nham\tWhat I'm saying is if you haven't explicitly told nora I know someone I'm probably just not gonna bother\r\nham\tHe says hi and to get your ass back to south tampa (preferably at a kegger)\r\nham\tSmith waste da.i wanna gayle.\r\nham\tMum, i've sent you many many messages since i got here. I just want to know that you are actually getting them. Do enjoy the rest of your day.\r\nham\tAight, tomorrow around <#> it is\r\nham\tHouse-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?\r\nspam\tYOUR CHANCE TO BE ON A REALITY FANTASY SHOW call now = 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national = rate call.\r\nham\tI actually did for the first time in a while. I went to bed not too long after i spoke with you. Woke up at 7. How was your night?\r\nham\tSee you there! \r\nham\tI dont understand your message.\r\nham\tCrucify is c not s. You should have told me earlier.\r\nham\tIdk. You keep saying that you're not, but since he moved, we keep butting heads over freedom vs. responsibility. And i'm tired. I have so much other shit to deal with that i'm barely keeping myself together once this gets added to it.\r\nham\tFuck cedar key and fuck her (come over anyway tho)\r\nham\ttwenty past five he said will this train have been to durham already or not coz i am in a reserved seat\r\nspam\tHey Boys. Want hot XXX pics sent direct 2 ur phone? Txt PORN to 69855, 24Hrs free and then just 50p per day. To stop text STOPBCM SF WC1N3XX\r\nham\tU still painting ur wall?\r\nspam\tLast Chance! Claim ur £150 worth of discount vouchers today! Text SHOP to 85023 now! SavaMob, offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Sub. 16\r\nham\tPrinter is cool. I mean groovy. Wine is groovying\r\nham\tHi Harish's rent has been transfred to ur Acnt.\r\nham\tAnything lor is she coming?\r\nham\tCbe is really good nowadays:)lot of shop and showrooms:)city is shaping good.\r\nham\tÜ still attending da talks?\r\nham\tNo probs hon! How u doinat the mo?\r\nham\tK I'll take care of it\r\nham\tI take it we didn't have the phone callon Friday. Can we assume we won't have it this year now?\r\nham\tMy battery is low babe\r\nham\tShuhui has bought ron's present it's a swatch watch...\r\nham\tYeah there's quite a bit left, I'll swing by tomorrow when I get up\r\nham\tBabe? You said 2 hours and it's been almost 4 ... Is your internet down ?\r\nham\tK I'll be sure to get up before noon and see what's what\r\nham\tK...k...yesterday i was in cbe .\r\nham\tWent to ganesh dress shop\r\nspam\tpdate_Now - Double mins and 1000 txts on Orange tariffs. Latest Motorola, SonyEricsson & Nokia & Bluetooth FREE! Call MobileUpd8 on 08000839402 or call2optout/!YHL\r\nham\tÜ collecting ur laptop then going to configure da settings izzit?\r\nham\tIf you r @ home then come down within 5 min\r\nham\tAight, I should be there by 8 at the latest, probably closer to 7. Are jay and tyler down or should we just do two trips?\r\nham\tCome aftr <DECIMAL> ..now i m cleaning the house\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send CASH to 86688 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1\r\nham\tBill, as in: Are there any letters for me. i’m expecting one from orange that isn’t a bill but may still say orange on it.\r\nham\tTell me pa. How is pain de.\r\nham\tHI DARLIN I HOPE YOU HAD A NICE NIGHT I WISH I HAD COME CANT WAIT TO SEE YOU LOVE FRAN PS I WANT DIRTY ANAL SEX AND I WANT A 10 MAN GANG BANG\r\nham\tHa. You don‘t know either. I did a a clever but simple thing with pears the other day, perfect for christmas.\r\nham\tHelloooo... Wake up..! \"Sweet\" \"morning\" \"welcomes\" \"You\" \"Enjoy\" \"This Day\" \"with full of joy\".. \"GUD MRNG\".\r\nham\tALRITE\r\nham\tWhy must we sit around and wait for summer days to celebrate. Such a magical sight when the worlds dressed in white. Oooooh let there be snow.\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09058094454 from land line. Claim 3030. Valid 12hrs only\r\nham\tHow do you guys go to see movies on your side.\r\nham\tSorry,in meeting I'll call later\r\nham\tYou didn't have to tell me that...now i'm thinking. Plus he's going to stop all your runs\r\nham\tKindly send some one to our flat before <DECIMAL> today.\r\nspam\tSorry! U can not unsubscribe yet. THE MOB offer package has a min term of 54 weeks> pls resubmit request after expiry. Reply THEMOB HELP 4 more info\r\nham\tNothing lor... A bit bored too... Then y dun u go home early 2 sleep today...\r\nham\tWhat time should I tell my friend to be around?\r\nham\tYes. that will be fine. Love you. Be safe.\r\nham\tThanks chikku..:-) gud nyt:-*\r\nham\tIs xy in ur car when u picking me up?\r\nham\tThanx 4 the time weve spent 2geva, its bin mint! Ur my Baby and all I want is u!xxxx\r\nham\tYo, any way we could pick something up tonight?\r\nham\tI've not sent it. He can send me.\r\nham\tFine am simply sitting.\r\nham\tThts god's gift for birds as humans hav some natural gift frm god..\r\nham\tAre you coming to day for class.\r\nham\tIm done. Just studyn in library\r\nham\tOk... U enjoy ur shows...\r\nham\tAnything...\r\nham\tWhere wuld I be without my baby? The thought alone mite break me and I dont wanna go crazy but everyboy needs his lady xxxxxxxx\r\nham\tWat's my dear doing? Sleeping ah?\r\nham\tHi' Test on <#> rd ....\r\nham\tOnly 2% students solved this CAT question in 'xam... 5+3+2= <#> 9+2+4= <#> 8+6+3= <#> then 7+2+5=????? Tell me the answer if u r brilliant...1thing.i got d answr.\r\nham\tYo do you know anyone <#> or otherwise able to buy liquor? Our guy flaked and right now if we don't get a hold of somebody its just 4 loko all night\r\nham\tYup n her fren lor. I'm meeting my fren at 730.\r\nham\tYeah, we got one lined up for us\r\nham\tAnd stop wondering \"wow is she ever going to stop tm'ing me ?!\" because I will tm you whenever I want because you are MINE ... *laughs*\r\nham\tLol yep did that yesterday. Already got my fireplace. Now its just another icon sitting there for me.\r\nham\tHey i've booked the pilates and yoga lesson already... Haha\r\nham\tAre you ok. What happen to behave like this\r\nspam\tYou have 1 new message. Please call 08712400200.\r\nham\tMy supervisor find 4 me one lor i thk his students. I havent ask her yet. Tell u aft i ask her.\r\nham\tHello. No news on job, they are making me wait a fifth week! Yeah im up for some woozles and weasels... In exeter still, but be home about 3. \r\nham\tNo message..no responce..what happend?\r\nspam\tWe currently have a message awaiting your collection. To collect your message just call 08718723815.\r\nham\tHey babe, sorry i didn't get sooner. Gary can come and fix it cause he thinks he knows what it is but he doesn't go as far a Ptbo and he says it will cost <#> bucks. I don't know if it might be cheaper to find someone there ? We don't have any second hand machines at all right now, let me know what you want to do babe\r\nham\tmake that 3! 4 fucks sake?! x\r\nham\tLeave it. U will always be ignorant.\r\nham\tNope but i'll b going 2 sch on fri quite early lor cos mys sis got paper in da morn :-)\r\nham\tat bruce b downs & fletcher now\r\nham\tWhere are you ? You said you would be here when I woke ... :-(\r\nham\tHey now am free you can call me.\r\nham\tTell me whos this pls:-)\r\nspam\tURGENT! Your mobile was awarded a £1,500 Bonus Caller Prize on 27/6/03. Our final attempt 2 contact U! Call 08714714011\r\nham\tThink i might have to give it a miss. Am teaching til twelve, then have lecture at two. Damn this working thing.\r\nham\tId have to check but there's only like 1 bowls worth left\r\nham\tYes there were many sweets\r\nham\tI would but I'm still cozy. And exhausted from last night.nobody went to school or work. Everything is closed.\r\nspam\tU have a secret admirer. REVEAL who thinks U R So special. Call 09065174042. To opt out Reply REVEAL STOP. 1.50 per msg recd. Cust care 07821230901\r\nham\tBuzzzz! *grins* Did I buzz your ass? Buzz your chest ? Buzz your cock ? Where do you keep your phone ? Is the vibrator on ? Did you feel it shake ?\r\nham\tSir send to group mail check it.\r\nham\tI'm doing da intro covers energy trends n pros n cons... Brief description of nuclear fusion n oso brief history of iter n jet got abt 7 n half pages..\r\nham\t\"NONE!NOWHERE IKNO DOESDISCOUNT!SHITINNIT\"\r\nham\tYou dont know you jabo me abi.\r\nspam\tDo you ever notice that when you're driving, anyone going slower than you is an idiot and everyone driving faster than you is a maniac?\r\nham\tNot yet had..ya sapna aunty manege y'day hogidhe..chinnu full weak and swalpa black agidhane..\r\nham\tAre you being good, baby? :)\r\nham\tNEFT Transaction with reference number <#> for Rs. <DECIMAL> has been credited to the beneficiary account on <#> at <TIME> : <#>\r\nham\tMostly sports type..lyk footbl,crckt..\r\nham\tMa head dey swell oh. Thanks for making my day\r\nham\tU should make a fb list\r\nham\tHeight of Confidence: All the Aeronautics professors wer calld & they wer askd 2 sit in an aeroplane. Aftr they sat they wer told dat the plane ws made by their students. Dey all hurried out of d plane.. Bt only 1 didnt move... He said:\"if it is made by my students,this wont even start........ Datz confidence..\r\nham\tSary just need Tim in the bollox &it hurt him a lot so he tol me!\r\nham\tHappy New Year Princess!\r\nham\tI'll text carlos and let you know, hang on\r\nham\tDon't worry, * is easy once have ingredients!\r\nham\tI love u 2 my little pocy bell I am sorry but I love u\r\nham\tOk omw now, you at castor?\r\nham\tYar lor... Keep raining non stop... Or u wan 2 go elsewhere?\r\nspam\tXmas Offer! Latest Motorola, SonyEricsson & Nokia & FREE Bluetooth or DVD! Double Mins & 1000 Txt on Orange. Call MobileUpd8 on 08000839402 or call2optout/4QF2\r\nham\tWhat u mean u almost done? Done wif sleeping? But i tot u going to take a nap.. Yup i send her liao so i'm picking her up at ard 4 smth lor..\r\nham\t7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st \"Ur Lovely Friendship\"... good morning dear\r\nham\tTonight? Yeah, I'd be down for that\r\nham\tWhat should i eat fo lunch senor\r\nham\tHe said that he had a right giggle when he saw u again! You would possibly be the first person2die from NVQ, but think how much you could for! \r\nham\tNo break time one... How... I come out n get my stuff fr ü?\r\nspam\tReply to win £100 weekly! What professional sport does Tiger Woods play? Send STOP to 87239 to end service\r\nham\tI'm there and I can see you, but you can't see me ? Maybe you should reboot ym ? I seen the buzz\r\nham\tDo you still have the grinder?\r\nspam\tNo 1 POLYPHONIC tone 4 ur mob every week! Just txt PT2 to 87575. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info\r\nham\tLove isn't a decision, it's a feeling. If we could decide who to love, then, life would be much simpler, but then less magical\r\nspam\tHOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call\r\nham\tK.i did't see you.:)k:)where are you now?\r\nham\tSo i'm doing a list of buyers.\r\nham\tNo idea, I guess we'll work that out an hour after we're supposed to leave since as usual nobody has any interest in figuring shit out before the last second\r\nham\tMm not entirely sure i understood that text but hey. Ho. Which weekend?\r\nham\tThey released vday shirts and when u put it on it makes your bottom half naked instead of those white underwear.\r\nham\tDon know..he is watching film in computer..\r\nham\tNo b4 Thursday\r\nham\tOh, then your phone phoned me but it disconnected\r\nham\tId onluy matters when getting on from offcampus\r\nspam\tThis message is free. Welcome to the new & improved Sex & Dogging club! To unsubscribe from this service reply STOP. msgs@150p 18+only\r\nham\tExcellent, I'll see what riley's plans are\r\nham\tI will see in half an hour\r\nspam\tYou've won tkts to the EURO2004 CUP FINAL or £800 CASH, to collect CALL 09058099801 b4190604, POBOX 7876150ppm\r\nham\tEw are you one of them?\r\nham\tAlso hi wesley how've you been\r\nham\tAh you see. You have to be in the lingo. I will let you know wot on earth it is when has finished making it!\r\nspam\tLoan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'\r\nspam\tUpdate_Now - 12Mths Half Price Orange line rental: 400mins...Call MobileUpd8 on 08000839402 or call2optout=J5Q\r\nham\tImagine Life WITHOUT ME... see.. How fast u are searching me?Don't worry.. l'm always there To disturb U.. Goodnoon..:)\r\nham\tHm good morning, headache anyone? :-)\r\nham\tYeah no probs - last night is obviously catching up with you... Speak soon \r\nspam\tFREE UNLIMITED HARDCORE PORN direct 2 your mobile Txt PORN to 69200 & get FREE access for 24 hrs then chrgd@50p per day txt Stop 2exit. This msg is free\r\nham\tI might go 2 sch. Yar at e salon now v boring.\r\nham\t <#> mins but i had to stop somewhere first.\r\nham\t<#> is fast approaching. So, Wish u a very Happy New Year Happy Sankranti Happy republic day Happy Valentines Day Happy Shivratri Happy Ugadi Happy Fools day Happy May Day Happy Independence Day, Happy Friendship,Mother,Father,Teachers,Childrens Day, & HAPPY BIRTHDAY 4 U. Happy Ganesh festival Happy Dasara Happy Diwali Happy Christmas <#> Good Mornings Afternoons, Evenings Nights. RememberI AM the first to WISHING U ALL THESE...your's Raj\r\nham\tOne of the joys in lifeis waking up each daywith thoughts that somewhereSomeone cares enough tosend a warm morning greeting.. -\r\nham\tI didn't get the second half of that message\r\nham\tWat time do u wan 2 meet me later?\r\nham\tI thank you so much for all you do with selflessness. I love you plenty.\r\nham\tAm in film ill call you later.\r\nham\tHow dare you change my ring\r\nham\tYou are a very very very very bad girl. Or lady.\r\nham\tI love ya too but try and budget your money better babe. Gary would freak on me if he knew\r\nham\tWhat part of \"don't initiate\" don't you understand\r\nham\tI finished my lunch already. U wake up already?\r\nham\tYou still at the game?\r\nham\tYou have got tallent but you are wasting.\r\nham\tWhat is your record for one night? :)\r\nham\tAlso sir, i sent you an email about how to log into the usc payment portal. I.ll send you another message that should explain how things are back home. Have a great weekend.\r\nham\t gonna let me know cos comes bak from holiday that day. is coming. Don't4get2text me number. \r\nham\tJokin only lar... :-) depends on which phone my father can get lor...\r\nham\tAight, lemme know what's up\r\nham\tGet ready for <#> inches of pleasure...\r\nham\tRaji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.\r\nham\t;-) ok. I feel like john lennon.\r\nham\tCos darren say ü considering mah so i ask ü...\r\nham\tYou are not bothering me but you have to trust my answers. Pls.\r\nham\tWishing you and your family Merry \"X\" mas and HAPPY NEW Year in advance..\r\nham\tOne day a crab was running on the sea shore..The waves came n cleared the footprints of the crab.. Crab asked: being my frnd y r u clearing my beautiful footprints? Waves replied: A fox was following ur footprints to catch you! thats y i cleared it off:) frndsship never lets u dwn :-) GUD nyt..\r\nham\tAight what time you want me to come up?\r\nham\tSlaaaaave ! Where are you ? Must I summon you to me all the time now ? Don't you wish to come to me on your own anymore?\r\nham\tYour bill at 3 is £33.65 so thats not bad!\r\nham\tLet me know how it changes in the next 6hrs. It can even be appendix but you are out of that age range. However its not impossible. So just chill and let me know in 6hrs\r\nham\tHello, yeah i've just got out of the bath and need to do my hair so i'll come up when i'm done, yeah?\r\nham\tSo how's the weather over there?\r\nham\tOk. Not much to do here though. H&M Friday, cant wait. Dunno wot the hell im gonna do for another 3 weeks! Become a slob- oh wait, already done that! \r\nham\tDie... Now i have e toot fringe again...\r\nham\tLol they don't know about my awesome phone. I could click delete right now if I want.\r\nham\tOk\r\nham\tAwesome question with a cute answer: Someone asked a boy \"how is ur life?\" . . He smiled & answered: . . \"She is fine!\" Gudnite\r\nham\tPlease leave this topic..sorry for telling that..\r\nham\tPls send me the correct name da.\r\nham\tWhat happened to our yo date?\r\nspam\tEASTENDERS TV Quiz. What FLOWER does DOT compare herself to? D= VIOLET E= TULIP F= LILY txt D E or F to 84025 NOW 4 chance 2 WIN £100 Cash WKENT/150P16+\r\nham\tWebpage s not available!\r\nham\tJust woke up. Yeesh its late. But I didn't fall asleep til <#> am :/\r\nspam\tYou are now unsubscribed all services. Get tons of sexy babes or hunks straight to your phone! go to http://gotbabes.co.uk. No subscriptions.\r\nham\tDear all, as we know <#> th is the <#> th birthday of our loving Gopalettan. We are planning to give a small gift on that day. Those who like to participate in that you are welcome. Please contact our admin team for more details\r\nham\tK..k...from tomorrow onwards started ah?\r\nham\tWhat u talking bout early morning? It's almost noon where your at!\r\nham\tFine. Do you remember me.\r\nspam\tHi babe its Jordan, how r u? Im home from abroad and lonely, text me back if u wanna chat xxSP visionsms.com Text stop to stopCost 150p 08712400603\r\nham\tOk. How many should i buy.\r\nham\tSounds good, keep me posted\r\nspam\tGet a brand new mobile phone by being an agent of The Mob! Plus loads more goodies! For more info just text MAT to 87021.\r\nham\tOk. So april. Cant wait\r\nham\tBoy you best get yo ass out here quick\r\nham\tAy wana meet on sat?ü wkg on sat?\r\nham\tI'm now but have to wait till 2 for the bus to pick me.\r\nham\tApart from the one i told you about yesterday?\r\nham\tOk lor... But buy wat?\r\nham\tSomebody should go to andros and steal ice\r\nham\tDon know. I did't msg him recently.\r\nham\tTake us out shopping and Mark will distract Isaiah.=D\r\nham\tMum, hope you are having a great day. Hoping this text meets you well and full of life. Have a great day. Abiola\r\nham\tThere is no sense in my foot and penis.\r\nham\tOkay but i thought you were the expert\r\nham\t*deep sigh* ... I miss you :-( ... I am really surprised you haven't gone to the net cafe yet to get to me ... Don't you miss me?\r\nham\tS.s:)i thinl role is like sachin.just standing. Others have to hit.\r\nham\tHave a great trip to India. And bring the light to everyone not just with the project but with everyone that is lucky to see you smile. Bye. Abiola\r\nham\tAnd very importantly, all we discuss is between u and i only.\r\nham\tK..k:)how about your training process?\r\nham\tOk lor. I ned 2 go toa payoh 4 a while 2 return smth u wan 2 send me there or wat?\r\nham\tIn da car park \r\nham\tI wish that I was with you. Holding you tightly. Making you see how important you are. How much you mean to me ... How much I need you ... In my life ...\r\nham\tSo i asked how's anthony. Dad. And your bf\r\nham\t'Wnevr i wana fal in luv vth my books, My bed fals in luv vth me..!'' . Yen madodu, nav pretsorginta, nammanna pretsovru important alwa....!!:) Gud eveB-).\r\nham\tWhat Today-sunday..sunday is holiday..so no work..\r\nham\tAm going to take bath ill place the key in window:-)\r\nspam\tLORD OF THE RINGS:RETURN OF THE KING in store NOW!REPLY LOTR by 2 June 4 Chance 2 WIN LOTR soundtrack CDs StdTxtRate. Reply STOP to end txts\r\nham\tDear, take care. I am just reaching home.love u a lot.\r\nham\tstaff.science.nus.edu.sg/~phyhcmk/teaching/pc1323\r\nham\tHave you emigrated or something? Ok maybe 5.30 was a bit hopeful...\r\nham\tOlol i printed out a forum post by a guy with the exact same prob which was fixed with a gpu replacement. Hopefully they dont ignore that.\r\nham\tWe walked from my moms. Right on stagwood pass right on winterstone left on victors hill. Address is <#>\r\nham\tYo, you at jp and hungry like a mofo?\r\nham\tThis is all just creepy and crazy to me.\r\nham\tOk... I din get ur msg...\r\nham\tTessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas\r\nham\tPathaya enketa maraikara pa'\r\nham\tEven if he my friend he is a priest call him now\r\nham\tU so lousy, run already come back then half dead... Hee...\r\nham\tThat's y i said it's bad dat all e gals know u... Wat u doing now?\r\nham\tOr remind me in a few hrs.\r\nham\tI had been hoping i would not have to send you this message. My rent is due and i dont have enough for it. My reserves are completely gone. Its a loan i need and was hoping you could her. The balance is <#> . Is there a way i could get that from you, till mid march when i hope to pay back.\r\nham\tHi. Happy New Year. I dont mean to intrude but can you pls let me know how much tuition you paid last semester and how much this semester is. Thanks\r\nham\tHello hun how ru? Its here by the way. Im good. Been on 2 dates with that guy i met in walkabout so far. We have to meet up soon. Hows everyone else?\r\nham\tLol I was gonna last month. I cashed some in but I left <#> just in case. I was collecting more during the week cause they announced it on the blog.\r\nspam\tGood Luck! Draw takes place 28th Feb 06. Good Luck! For removal send STOP to 87239 customer services 08708034412\r\nham\tShort But Cute : \" Be a good person , but dont try to prove\" ..... Gud mrng...\r\nham\tJust haven't decided where yet eh ?\r\nham\tWat time liao, where still got.\r\nham\tYes watching footie but worried we're going to blow it - Phil Neville?\r\nham\tI wait 4 ü inside da car park...\r\nham\tUncle Abbey! Happy New Year. Abiola\r\nham\tNow am free call me pa.\r\nham\tR u saying i should re order the slippers cos i had to pay for returning it.\r\nham\tStop knowing me so well!\r\nham\tGood evening! this is roger. How are you?\r\nham\tSmall problem in auction:)punj now asking tiwary\r\nspam\tFree entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+\r\nham\tHe telling not to tell any one. If so treat for me hi hi hi\r\nham\tMy uncles in Atlanta. Wish you guys a great semester.\r\nspam\t1st wk FREE! Gr8 tones str8 2 u each wk. Txt NOKIA ON to 8007 for Classic Nokia tones or HIT ON to 8007 for Polys. Nokia/150p Poly/200p 16+\r\nham\tU coming 2 pick me?\r\nham\tThats cool. i liked your photos. You are very sexy!\r\nham\twould u fuckin believe it they didnt know i had thurs pre booked off so they re cancelled me AGAIN! that needs to b sacked\r\nham\tHaha better late than ever, any way I could swing by?\r\nham\tOk. But i finish at 6.\r\nspam\tLookAtMe!: Thanks for your purchase of a video clip from LookAtMe!, you've been charged 35p. Think you can do better? Why not send a video in a MMSto 32323.\r\nham\tI've been barred from all B and Q stores for life!?This twat in orange dungerees came up to me and asked if I wanted decking? So I got the first punch in!!\r\nham\tSo no messages. Had food?\r\nham\tOk going to sleep. Hope i can meet her.\r\nham\tWat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!\r\nham\tCan you let me know details of fri when u find out cos I'm not in tom or fri. mentionned chinese. Thanks\r\nham\tYou're right I have now that I think about it\r\nham\tWat r u doing now?\r\nham\tIs ur lecture over?\r\nspam\tsexy sexy cum and text me im wet and warm and ready for some porn! u up for some fun? THIS MSG IS FREE RECD MSGS 150P INC VAT 2 CANCEL TEXT STOP\r\nham\tCustomer place i will call you\r\nham\tNot planned yet :)going to join company on jan 5 only.don know what will happen after that.\r\nham\tBoy; I love u Grl: Hogolo Boy: gold chain kodstini Grl: Agalla Boy: necklace madstini Grl: agalla Boy: Hogli 1 mutai eerulli kodthini! Grl: I love U kano;-)\r\nham\tHaha I heard that, text me when you're around\r\nham\tI.ll get there tomorrow and send it to you\r\nham\t\"SHIT BABE.. THASA BIT MESSED UP.YEH, SHE SHUDVETOLD U. DID URGRAN KNOW?NEWAY, ILLSPEAK 2 U2MORO WEN IM NOT ASLEEP...\"\r\nham\tOh thats late! Well have a good night and i will give u a call tomorrow. Iam now going to go to sleep night night\r\nham\t\"CHEERS U TEX MECAUSE U WEREBORED! YEAH OKDEN HUNNY R UIN WK SAT?SOUNDS LIKEYOUR HAVIN GR8FUN J! KEEP UPDAT COUNTINLOTS OF LOVEME XXXXX.\"\r\nham\tSorry, in meeting I'll call you later\r\nham\tYo! Howz u? girls never rang after india. L\r\nham\tYeah but which is worse for i\r\nspam\tHard LIVE 121 chat just 60p/min. Choose your girl and connect LIVE. Call 09094646899 now! Cheap Chat UK's biggest live service. VU BCM1896WC1N3XX\r\nham\tI tagged MY friends that you seemed to count as YOUR friends.\r\nspam\tNot heard from U4 a while. Call 4 rude chat private line 01223585334 to cum. Wan 2C pics of me gettin shagged then text PIX to 8552. 2End send STOP 8552 SAM xxx\r\nham\tOk...\r\nham\tLong time. You remember me today.\r\nham\tHavent shopping now lor i juz arrive only\r\nham\tThank u. IT BETTER WORK OUT CAUSE I WILL FEEL USED OTHERWISE\r\nham\tAre you up for the challenge? I know i am :)\r\nham\tHow much did ur hdd casing cost.\r\nham\tMystery solved! Just opened my email and he's sent me another batch! Isn't he a sweetie\r\nham\tI can't describe how lucky you are that I'm actually awake by noon\r\nspam\tThis is the 2nd time we have tried to contact u. U have won the £1450 prize to claim just call 09053750005 b4 310303. T&Cs/stop SMS 08718725756. 140ppm\r\nham\tTODAY is Sorry day.! If ever i was angry with you, if ever i misbehaved or hurt you? plz plz JUST SLAP URSELF Bcoz, Its ur fault, I'm basically GOOD\r\nham\tCheers for the card ... Is it that time of year already?\r\nspam\tHOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870..k\r\nham\tWhen people see my msgs, They think Iam addicted to msging... They are wrong, Bcoz They don\\'t know that Iam addicted to my sweet Friends..!! BSLVYL\r\nham\tUgh hopefully the asus ppl dont randomly do a reformat.\r\nham\tHaven't seen my facebook, huh? Lol!\r\nham\tMah b, I'll pick it up tomorrow\r\nham\tStill otside le..u come 2morrow maga..\r\nham\tDo u still have plumbers tape and a wrench we could borrow?\r\nspam\tDear Voucher Holder, To claim this weeks offer, at you PC please go to http://www.e-tlp.co.uk/reward. Ts&Cs apply.\r\nham\tIt vl bcum more difficult..\r\nspam\tUR GOING 2 BAHAMAS! CallFREEFONE 08081560665 and speak to a live operator to claim either Bahamas cruise of£2000 CASH 18+only. To opt out txt X to 07786200117\r\nham\tHavent still waitin as usual... Ü come back sch oredi?\r\nham\tIn meeting da. I will call you\r\nham\tK k :-):-) then watch some films.\r\nham\tDoes cinema plus drink appeal tomo? * Is a fr thriller by director i like on at mac at 8.30.\r\nham\tThere the size of elephant tablets & u shove um up ur ass!!\r\nham\tSo many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. take cr da\r\nham\tMy Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!\r\nham\tDunno i juz askin cos i got a card got 20% off 4 a salon called hair sense so i tot it's da one ü cut ur hair. \r\nham\tGood morning pookie pie! Lol hope I didn't wake u up\r\nham\tMAYBE IF YOU WOKE UP BEFORE FUCKING 3 THIS WOULDN'T BE A PROBLEM.\r\nham\tHappy birthday to you....dear.with lots of love.rakhesh NRI\r\nham\tHowz that persons story\r\nspam\tThis is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08712101358 NOW! Only 10p per min. BT-national-rate\r\nham\tX2 <#> . Are you going to get that\r\nham\tHi neva worry bout da truth coz the truth will lead me 2 ur heart. Its the least a unique person like u deserve. Sleep tight or morning\r\nspam\tUR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039.SkilGme.TsCs087147403231Winawk!Age16+£1.50perWKsub\r\nham\tIs ur paper today in e morn or aft?\r\nham\tI will lick up every drop :) are you ready to use your mouth as well?\r\nham\tAnd you! Will expect you whenever you text! Hope all goes well tomo \r\nham\tGreat. P diddy is my neighbor and comes for toothpaste every morning\r\nham\tI av a new number, . Wil u only use this one,ta.\r\nham\tSo its to be poking man everyday that they teach you in canada abi! How are you. Just saying hi.\r\nham\t7 lor... Change 2 suntec... Wat time u coming?\r\nham\tNo de.am seeing in online shop so that i asked.\r\nham\tJust curious because my cuz asked what I was up to\r\nham\tNice.nice.how is it working?\r\nham\tOkay lor... Wah... like that def they wont let us go... Haha... What did they say in the terms and conditions?\r\nham\tHaha... Yup hopefully we will lose a few kg by mon. after hip hop can go orchard and weigh again\r\nham\tShe's good. How are you. Where r u working now\r\nham\tOh, yes, I've just been a little under the weather so i've kind of been coccooning at home\r\nham\tAt home also.\r\nham\tThis phone has the weirdest auto correct.\r\nham\tOops my phone died and I didn't even know. Yeah I like it better.\r\nham\tHavent mus ask if u can 1st wat. Of meet 4 lunch den u n him meet can already lor. Or u wan 2 go ask da ge 1st then confirm w me asap?\r\nham\tShe said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,\r\nham\tOH YEAH,AND HAV A GREAT TIME IN NEWQUAY-SEND ME A POSTCARD !1 LOOK AFTER ALL THE GIRLS WHILE IM GONE(U KNOW THE 1IM TALKIN BOUT!)xx\r\nham\tWe got a divorce. Lol. She.s here\r\nham\tWhat's ur pin?\r\nham\tBabe, have you got enough money to pick up bread and milk ? And I'll give you it back when you get home ?\r\nham\tI want snow. It's just freezing and windy.\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09066358361 from land line. Claim Y87. Valid 12hrs only\r\nham\tCome to mahal bus stop.. <DECIMAL>\r\nham\tDon know:)this week i'm going to tirunelvai da.\r\nham\tMe too baby! I promise to treat you well! I bet you will take good care of me...\r\nham\tIts like that hotel dusk game i think. You solve puzzles in a area thing\r\nspam\tThanks for your ringtone order, reference number X29. Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989180\r\nham\tHi, my love! How goes that day? Fuck, this morning I woke and dropped my cell on the way down the stairs but it seems alright ... *phews* I miss you !\r\nham\tWell that must be a pain to catch\r\nham\tSorry da thangam.it's my mistake.\r\nham\tI need... Coz i never go before \r\nham\tRose for red,red for blood,blood for heart,heart for u. But u for me.... Send tis to all ur friends.. Including me.. If u like me.. If u get back, 1-u r poor in relation! 2-u need some 1 to support 3-u r frnd 2 many 4-some1 luvs u 5+- some1 is praying god to marry u.:-) try it....\r\nham\tWife.how she knew the time of murder exactly\r\nspam\tSIX chances to win CASH! From 100 to 20,000 pounds txt> CSH11 and send to 87575. Cost 150p/day, 6days, 16+ TsandCs apply Reply HL 4 info\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send COLLECT to 83600 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1\r\nham\tI feel like a dick because I keep sleeping through your texts and facebook messages. Sup, you in town?\r\nham\tNo plm i will come da. On the way.\r\nham\tGuess he wants alone time. We could just show up and watch when they do..\r\nham\tHeight of recycling: Read twice- People spend time for earning money and the same money is spent for spending time!;-) Good morning.. keep smiling:-)\r\nham\tYup ü not comin :-(\r\nham\tYes, princess. Toledo.\r\nham\tAight text me when you're back at mu and I'll swing by, need somebody to get the door for me\r\nham\tRon say fri leh. N he said ding tai feng cant make reservations. But he said wait lor.\r\nham\tGood. No swimsuit allowed :)\r\nham\tAm okay. Will soon be over. All the best\r\nham\tA cute thought for friendship: \"Its not necessary to share every secret with ur close Frnd, but watever u shared should be true\"....\r\nham\tOk i've sent u da latest version of da project.\r\nham\tGood Morning my Dear........... Have a great & successful day.\r\nham\tPls accept me for one day. Or am begging you change the number.\r\nham\tSqueeeeeze!! This is christmas hug.. If u lik my frndshp den hug me back.. If u get 3 u r cute:) 6 u r luvd:* 9 u r so lucky;) None? People hate u:\r\nham\tIts ok, if anybody asks abt me, u tel them..:-P\r\nham\tFunny fact Nobody teaches volcanoes 2 erupt, tsunamis 2 arise, hurricanes 2 sway aroundn no 1 teaches hw 2 choose a wife Natural disasters just happens\r\nham\t* You gonna ring this weekend or wot?\r\nham\tAlso track down any lighters you can find\r\nham\tSorry, I can't help you on this.\r\nham\tBabe, I need your advice\r\nham\tI‘ll leave around four, ok?\r\nham\tCome to medical college at 7pm ......forward it da\r\nham\tK:)k..its good:)when are you going?\r\nham\tI can make lasagna for you... vodka...\r\nham\tHI ITS KATE CAN U GIVE ME A RING ASAP XXX\r\nham\tWho were those people ? Were you in a tour ? I thought you were doing that sofa thing you sent me ? Your curious sugar\r\nham\tNo, but you told me you were going, before you got drunk!\r\nham\tHe fucking chickened out. He messaged me he would be late and woould buzz me and then I didn't hear a word from him\r\nspam\tCongratulations! Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08718726978 NOW! Only 10p per minute. BT-national-rate\r\nham\tI'm always looking for an excuse to be in the city.\r\nham\tYup i'm still having coffee wif my frens... My fren drove she'll give me a lift...\r\nham\tO shore are you takin the bus\r\nham\tSo u gonna get deus ex?\r\nham\tI will send them to your email. Do you mind <#> times per night?\r\nspam\t44 7732584351, Do you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705.\r\nham\ttap & spile at seven. * Is that pub on gas st off broad st by canal. Ok?\r\nham\tOk then i come n pick u at engin?\r\nham\tWhich is why i never wanted to tell you any of this. Which is why i'm so short with you and on-edge as of late.\r\nham\tRaviyog Peripherals bhayandar east\r\nham\tK actually can you guys meet me at the sunoco on howard? It should be right on the way\r\nspam\tYou have 1 new voicemail. Please call 08719181513.\r\nham\tMOON has come to color your dreams, STARS to make them musical and my SMS to give you warm and Peaceful Sleep. Good Night\r\nham\tJust finished eating. Got u a plate. NOT leftovers this time.\r\nham\tThanx a lot...\r\nham\tHurry home u big butt. Hang up on your last caller if u have to. Food is done and I'm starving. Don't ask what I cooked.\r\nham\tLol your right. What diet? Everyday I cheat anyway. I'm meant to be a fatty :(\r\nham\tIts a great day. Do have yourself a beautiful one.\r\nham\tWhat happened in interview?\r\nham\tSolve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.\r\nham\tBadrith is only for chennai:)i will surely pick for us:)no competition for him.\r\nham\tI tot it's my group mate... Lucky i havent reply... Wat time do ü need to leave... \r\nham\tHey you around? I've got enough for a half + the ten I owe you\r\nham\tHey tmr maybe can meet you at yck\r\nham\tALRITE SAM ITS NIC JUST CHECKIN THAT THIS IS UR NUMBER-SO IS IT?T.B*\r\nham\tThey are just making it easy to pay back. I have <#> yrs to say but i can pay back earlier. You get?\r\nham\tNot to worry. I'm sure you'll get it.\r\nham\tThe gas station is like a block away from my house, you'll drive right by it since armenia ends at swann and you have to take howard\r\nspam\tSomeone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058097189 NOW all will be revealed. POBox 6, LS15HB 150p \r\nspam\tCamera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days\r\nham\tMy tuition is at 330. Hm we go for the 1120 to 1205 one? Do you mind?\r\nham\tI'm not smoking while people use \"wylie smokes too much\" to justify ruining my shit\r\nham\tDear good morning how you feeling dear\r\nham\tA little. Meds say take once every 8 hours. It's only been 5 but pain is back. So I took another. Hope I don't die\r\nham\tBeautiful tomorrow never comes.. When it comes, it's already TODAY.. In the hunt of beautiful tomorrow don't waste your wonderful TODAY.. GOODMORNING:)\r\nham\tDunno lei ü all decide lor. How abt leona? Oops i tot ben is going n i msg him.\r\nham\tHi there. We have now moved in2 our pub . Would be great 2 c u if u cud come up.\r\nspam\tTodays Voda numbers ending 5226 are selected to receive a ?350 award. If you hava a match please call 08712300220 quoting claim code 1131 standard rates app \r\nspam\tThis message is free. Welcome to the new & improved Sex & Dogging club! To unsubscribe from this service reply STOP. msgs@150p 18 only\r\nham\tHoneybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)\r\nham\tJust do what ever is easier for you\r\nspam\tRCT' THNQ Adrian for U text. Rgds Vatian\r\nham\tStop calling everyone saying I might have cancer. My throat hurts to talk. I can't be answering everyones calls. If I get one more call I'm not babysitting on Monday\r\nham\tIt'll be tough, but I'll do what I have to\r\nham\tIM GONNAMISSU SO MUCH!!I WOULD SAY IL SEND U A POSTCARD BUTTHERES ABOUTAS MUCH CHANCE OF MEREMEMBERIN ASTHERE IS OFSI NOT BREAKIN HIS CONTRACT!! LUV Yaxx\r\nham\tEe msg na poortiyagi odalebeku: Hanumanji 7 name 1-Hanuman 2-Bajarangabali 3-Maruti 4-Pavanaputra 5-Sankatmochan 6-Ramaduth 7-Mahaveer ee 7 name <#> janarige ivatte kalisidare next saturday olage ondu good news keluviri...! Maretare inde 1 dodda problum nalli siguviri idu matra <#> % true.. Don't neglet.\r\nham\tHI DARLIN I FINISH AT 3 DO U 1 2 PICK ME UP OR MEET ME? TEXT BACK ON THIS NUMBER LUV KATE XXX\r\nham\tSet a place for me in your heart and not in your mind, as the mind easily forgets but the heart will always remember. Wish you Happy Valentines Day!\r\nham\tBut i'm surprised she still can guess right lor...\r\nham\tOkie ü wan meet at bishan? Cos me at bishan now. I'm not driving today.\r\nham\tOh ho. Is this the first time u use these type of words\r\nham\tHI DARLIN HOW WAS WORK DID U GET INTO TROUBLE? IJUST TALKED TO YOUR MUM ALL MORNING! I HAD A REALLY GOOD TIME LAST NIGHT IM GOIN OUT SOON BUT CALL ME IF U CAN\r\nham\tI know you are serving. I mean what are you doing now.\r\nham\tHuh... Hyde park not in mel ah, opps, got confused... Anyway, if tt's e best choice den we juz have to take it...\r\nham\tOh gei. That happend to me in tron. Maybe ill dl it in 3d when its out\r\nspam\tFREE MESSAGE Activate your 500 FREE Text Messages by replying to this message with the word FREE For terms & conditions, visit www.07781482378.com\r\nham\tI know girls always safe and selfish know i got it pa. Thank you. good night.\r\nham\tNo worries, hope photo shoot went well. have a spiffing fun at workage. \r\nham\tI'm freezing and craving ice. Fml\r\nham\tKay... Since we are out already \r\nham\tEh sorry leh... I din c ur msg. Not sad already lar. Me watching tv now. U still in office?\r\nham\tYo im right by yo work\r\nham\tOk darlin i supose it was ok i just worry too much.i have to do some film stuff my mate and then have to babysit again! But you can call me there.xx\r\nham\tShe said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,\r\nham\tI don wake since. I checked that stuff and saw that its true no available spaces. Pls call the embassy or send a mail to them.\r\nham\tNope... Juz off from work...\r\nham\tHuh so fast... Dat means u havent finished painting?\r\nham\t what number do u live at? Is it 11?\r\nham\tNo we put party 7 days a week and study lightly, I think we need to draw in some custom checkboxes so they know we're hardcore\r\nham\tSac will score big hundred.he is set batsman:-)\r\nham\tSend me yetty's number pls.\r\nham\tHow much it will cost approx . Per month.\r\nham\tOk... The theory test? when are ü going to book? I think it's on 21 may. Coz thought wanna go out with jiayin. But she isnt free\r\nspam\tYou are being contacted by our dating service by someone you know! To find out who it is, call from a land line 09050000928. PoBox45W2TG150P\r\nham\tThat's fine, have him give me a call if he knows what he wants or has any questions\r\nham\tSorry, got a late start, we're on the way\r\nham\tThen u go back urself lor...\r\nham\tI AM AT THE GAS STATION. GO THERE.\r\nham\tK, if u bored up just come to my home..\r\nham\tBabe !!!! I LOVE YOU !!!! *covers your face in kisses*\r\nham\tLike I made him throw up when we were smoking in our friend's car one time, it was awesome\r\nham\tStill i have not checked it da. . .\r\nham\tYou will go to walmart. I.ll stay.\r\nham\tI haven't forgotten you, i might have a couple bucks to send you tomorrow, k? I love ya too\r\nham\tOh great. I.ll disturb him more so that we can talk.\r\nham\tReverse is cheating. That is not mathematics.\r\nham\tU're welcome... Caught u using broken english again...\r\nham\tNo problem baby. Is this is a good time to talk? I called and left a message.\r\nham\tSorry, I'll call later\r\nham\tOh is it! Which brand?\r\nham\tSorry i cant take your call right now. It so happens that there r 2waxsto do wat you want. She can come and ill get her medical insurance. And she'll be able to deliver and have basic care. I'm currently shopping for the right medical insurance for her. So just give me til friday morning. Thats when i.ll see the major person that can guide me to the right insurance.\r\nham\tAt what time are you coming.\r\nham\tCall him and say you not coming today ok and tell them not to fool me like this ok\r\nham\tI emailed yifeng my part oredi.. Can ü get it fr him..\r\nham\tR u sure they'll understand that! Wine * good idea just had a slurp!\r\nham\tMinimum walk is 3miles a day.\r\nham\tOk not a problem will get them a taxi. C ing tomorrow and tuesday. On tuesday think we r all going to the cinema. \r\nham\tBrainless Baby Doll..:-D;-), vehicle sariyag drive madoke barolla..\r\nham\tI don't run away frm u... I walk slowly & it kills me that u don't care enough to stop me...\r\nspam\tSorry I missed your call let's talk when you have the time. I'm on 07090201529\r\nham\tPlease attend the phone:)\r\nham\tYou only hate me. You can call any but you didnt accept even a single call of mine. Or even you messaged\r\nham\tNo messages on her phone. I'm holding it now\r\nham\tCan... I'm free...\r\nham\tYo my trip got postponed, you still stocked up?\r\nham\tSorry, I'll call later\r\nham\tI am waiting for your call sir.\r\nham\tHey what are you doing. Y no reply pa..\r\nham\tHey elaine, is today's meeting still on?\r\nham\tSorry i've not gone to that place. I.ll do so tomorrow. Really sorry.\r\nham\tMost of the tiime when i don't let you hug me it's so i don't break into tears.\r\nham\tTomorrow i am not going to theatre. . . So i can come wherever u call me. . . Tell me where and when to come tomorrow\r\nham\tAnd now electricity just went out fml.\r\nham\tLooks like you found something to do other than smoke, great job!\r\nham\tAlso andros ice etc etc\r\nham\t:) \r\nham\tGood afternon, my love. How are today? I hope your good and maybe have some interviews. I wake and miss you babe. A passionate kiss from across the sea\r\nham\tYup. Wun believe wat? U really neva c e msg i sent shuhui?\r\nham\tHows that watch resizing\r\nham\tDear umma she called me now :-)\r\nham\tJust finished. Missing you plenty\r\nspam\tcomplimentary 4 STAR Ibiza Holiday or £10,000 cash needs your URGENT collection. 09066364349 NOW from Landline not to lose out! Box434SK38WP150PPM18+\r\nham\tWell, I meant as opposed to my drunken night of before\r\nham\tK... Must book a not huh? so going for yoga basic on sunday?\r\nspam\tFREE MSG:We billed your mobile number by mistake from shortcode 83332.Please call 08081263000 to have charges refunded.This call will be free from a BT landline\r\nham\tOk can...\r\nham\tOops - am at my mum's in somerset... Bit far! Back tomo, see you soon x\r\nham\tSo u workin overtime nigpun?\r\nham\tSame as kallis dismissial in 2nd test:-).\r\nham\tO. Guess they both got screwd\r\nspam\tPlease CALL 08712402972 immediately as there is an urgent message waiting for you\r\nham\tI'm in a meeting, call me later at\r\nham\tWhat r u cooking me for dinner?\r\nham\tOk thanx...\r\nham\tBull. Your plan was to go floating off to IKEA with me without a care in the world. So i have to live with your mess another day.\r\nham\tThen i buy.\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 Bonus Caller Prize. Call 09058095201 from land line. Valid 12hrs only\r\nham\tHeehee that was so funny tho\r\nham\tIt only does simple arithmetic not percentages.\r\nham\tYeah we wouldn't leave for an hour at least, how's 4 sound?\r\nspam\tAs a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066364589\r\nham\tThanks honey. Have a great day.\r\nham\t'An Amazing Quote'' - \"Sometimes in life its difficult to decide whats wrong!! a lie that brings a smile or the truth that brings a tear....\"\r\nham\tGood night my dear.. Sleepwell&Take care\r\nham\tThen ü ask dad to pick ü up lar... Ü wan 2 stay until 6 meh...\r\nham\tJus chillaxin, what up\r\nham\t\"HEY DAS COOL... IKNOW ALL 2 WELLDA PERIL OF STUDENTFINANCIAL CRISIS!SPK 2 U L8R.\"\r\nham\tBeautiful Truth against Gravity.. Read carefully: \"Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it..\" GOODMORNING\r\nspam\tDo you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705\r\nham\tWhats that coming over the hill..... Is it a monster! Hope you have a great day. Things r going fine here, busy though! \r\nham\tJoy's father is John. Then John is the ____ of Joy's father. If u ans ths you hav <#> IQ. Tis s IAS question try to answer.\r\nham\tOnly once then after ill obey all yours.\r\nham\tNo she didnt. I will search online and let you know.\r\nham\tWhere do you need to go to get it?\r\nham\tNo pic. Please re-send.\r\nham\tHe remains a bro amongst bros\r\nham\tUhhhhrmm isnt having tb test bad when youre sick\r\nham\tBut i haf enuff space got like 4 mb...\r\nspam\tLIFE has never been this much fun and great until you came in. You made it truly special for me. I won't forget you! enjoy @ one gbp/sms\r\nspam\tDo you want a new Video phone? 600 anytime any network mins 400 Inclusive Video calls AND downloads 5 per week Free delTOMORROW call 08002888812 or reply NOW\r\nspam\tAs a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066368470\r\nspam\tWelcome! Please reply with your AGE and GENDER to begin. e.g 24M\r\nspam\tFreemsg: 1-month unlimited free calls! Activate SmartCall Txt: CALL to No: 68866. Subscriptn3gbp/wk unlimited calls Help: 08448714184 Stop?txt stop landlineonly\r\nspam\tHad your mobile 10 mths? Update to latest Orange camera/video phones for FREE. Save £s with Free texts/weekend calls. Text YES for a callback orno to opt out\r\nspam\tAm new 2 club & dont fink we met yet Will B gr8 2 C U Please leave msg 2day wiv ur area 09099726553 reply promised CARLIE x Calls£1/minMobsmore LKPOBOX177HP51FL\r\nham\tTrue. Its easier with her here.\r\nham\tSure but since my parents will be working on Tuesday I don't really need a cover story\r\nham\tHaha okay... Today weekend leh... \r\nham\t\"Hi darlin did youPhone me? Im atHome if youwanna chat.\"\r\nham\tI don't know jack shit about anything or i'd say/ask something helpful but if you want you can pretend that I did and just text me whatever in response to the hypotheticalhuagauahahuagahyuhagga\r\nham\tYou've always been the brainy one.\r\nham\tYeah if we do have to get a random dude we need to change our info sheets to PARTY <#> /7 NEVER STUDY just to be safe\r\nspam\tCamera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.\r\nham\tChristmas is An occasion that is Celebrated as a Reflection of UR... Values..., Desires..., Affections...& Traditions.... Have an ideal Christmas...\r\nham\tSending you greetings of joy and happiness. Do have a gr8 evening\r\nham\t\"Hi darlin i cantdo anythingtomorrow as myparents aretaking me outfor a meal. when are u free? Katexxx\"\r\nham\tIf india win or level series means this is record:)\r\nham\tThen what about further plan?\r\nham\tIts good to hear from you\r\nham\tawesome, how do I deal with the gate? Charles told me last night but, uh, yeah\r\nham\tWhat time you thinkin of goin?\r\nspam\tGet a FREE mobile video player FREE movie. To collect text GO to 89105. Its free! Extra films can be ordered t's and c's apply. 18 yrs only\r\nspam\tSave money on wedding lingerie at www.bridal.petticoatdreams.co.uk Choose from a superb selection with national delivery. Brought to you by WeddingFriend\r\nham\tYour board is working fine. The issue of overheating is also reslove. But still software inst is pending. I will come around 8'o clock.\r\nham\tYes but I don't care cause I know its there!\r\nham\twiskey Brandy Rum Gin Beer Vodka Scotch Shampain Wine \"KUDI\"yarasu dhina vaazhthukkal. ..\r\nham\tMon okie lor... Haha, best is cheap n gd food la, ex oso okie... Depends on whether wana eat western or chinese food... Den which u prefer... \r\nham\tSitting ard nothing to do lor. U leh busy w work?\r\nham\tIts <#> k here oh. Should i send home for sale.\r\nham\tSorry. || mail? || \r\nham\tYa just telling abt tht incident..\r\nham\tYes we were outside for like 2 hours. And I called my whole family to wake them up cause it started at 1 am\r\nham\tUgh just got outta class\r\nham\tNowadays people are notixiquating the laxinorficated opportunity for bambling of entropication.... Have you ever oblisingately opted ur books for the masteriastering amplikater of fidalfication? It is very champlaxigating, i think it is atrocious.. Wotz Ur Opinion???? Junna\r\nham\tI dont have any of your file in my bag..i was in work when you called me.i 'll tell you if i find anything in my room.\r\nham\tNo need lar. Jus testing e phone card. Dunno network not gd i thk. Me waiting 4 my sis 2 finish bathing so i can bathe. Dun disturb u liao u cleaning ur room.\r\nham\tOk. I.ll do you right later.\r\nham\tFriendship poem: Dear O Dear U R Not Near But I Can Hear Dont Get Fear Live With Cheer No More Tear U R Always my Dear. Gud ni8\r\nham\tHave your lunch and come quickly and open the door:)\r\nspam\tNot heard from U4 a while. Call me now am here all night with just my knickers on. Make me beg for it like U did last time 01223585236 XX Luv Nikiyu4.net\r\nham\tI am back. Bit long cos of accident on a30. Had to divert via wadebridge.I had a brilliant weekend thanks. Speak soon. Lots of love\r\nham\tK.. I yan jiu liao... Sat we can go 4 bugis vill one frm 10 to 3 den hop to parco 4 nb. Sun can go cine frm 1030 to 2, den hop to orc mrt 4 hip hop at 4...\r\nspam\tBloomberg -Message center +447797706009 Why wait? Apply for your future http://careers. bloomberg.com\r\nham\ti am seeking a lady in the street and a freak in the sheets. Is that you?\r\nham\tMy phone\r\nham\tHaha figures, well I found the piece and priscilla's bowl\r\nham\tActually fuck that, just do whatever, do find an excuse to be in tampa at some point before january though\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001808 from land line. Claim M95. Valid12hrs only\r\nham\tyay! finally lol. i missed our cinema trip last week :-(\r\nham\tAll day working day:)except saturday and sunday..\r\nham\taathi..where are you dear..\r\nham\tHeart is empty without love.. Mind is empty without wisdom.. Eyes r empty without dreams & Life is empty without frnds.. So Alwys Be In Touch. Good night & sweet dreams\r\nham\tI think I‘m waiting for the same bus! Inform me when you get there, if you ever get there.\r\nham\tYou getting back any time soon?\r\nham\t, how's things? Just a quick question.\r\nham\tNight has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng\r\nham\tI can probably come by, everybody's done around <#> right?\r\nham\tI got it before the new year cos yetunde said she wanted to surprise you with it but when i didnt see money i returned it mid january before the <#> day return period ended.\r\nham\tI can ask around but there's not a lot in terms of mids up here\r\nham\tBe sure to check your yahoo email. We sent photos yesterday\r\nham\tWhat was she looking for?\r\nham\tWherre's my boytoy ? :-(\r\nspam\tDo you want a NEW video phone750 anytime any network mins 150 text for only five pounds per week call 08000776320 now or reply for delivery tomorrow\r\nham\tHello, my love! How goes that day ? I wish your well and fine babe and hope that you find some job prospects. I miss you, boytoy ... *a teasing kiss*\r\nham\tTell my bad character which u Dnt lik in me. I'll try to change in <#> . I ll add tat 2 my new year resolution. Waiting for ur reply.Be frank...good morning.\r\nham\tNo:-)i got rumour that you going to buy apartment in chennai:-)\r\nham\tYeah, probably earlier than that\r\nham\tChange windows logoff sound..\r\nham\tStill i have not checked it da. . .\r\nham\tI'm also came to room.\r\nham\tHuh but i got lesson at 4 lei n i was thinkin of going to sch earlier n i tot of parkin at kent vale... \r\nham\tOk.\r\nham\tI will reach office around <DECIMAL> . & my mobile have problem. You cann't get my voice. So call you asa i'll free\r\nham\tCool, text me when you head out\r\nspam\tYou are being contacted by our dating service by someone you know! To find out who it is, call from a land line 09050000878. PoBox45W2TG150P\r\nspam\tWan2 win a Meet+Greet with Westlife 4 U or a m8? They are currently on what tour? 1)Unbreakable, 2)Untamed, 3)Unkempt. Text 1,2 or 3 to 83049. Cost 50p +std text\r\nham\tHappy birthday... May u find ur prince charming soon n dun work too hard...\r\nham\tOh, the grand is having a bit of a party but it doesn't mention any cover charge so it's probably first come first served\r\nham\tYou said to me before i went back to bed that you can't sleep for anything.\r\nham\tI hope you arnt pissed off but id would really like to see you tomorrow. Love me xxxxxxxxxxxxxX\r\nspam\[email protected] (Bank of Granite issues Strong-Buy) EXPLOSIVE PICK FOR OUR MEMBERS *****UP OVER 300% *********** Nasdaq Symbol CDGT That is a $5.00 per..\r\nham\tsays the <#> year old with a man and money. I'm down to my last <#> . Still waiting for that check.\r\nham\tI will come to ur home now\r\nham\tFree any day but i finish at 6 on mon n thurs...\r\nham\tWill you be here for food\r\nham\tlife alle mone,eppolum oru pole allalo\r\nham\tNite...\r\nham\tTwo fundamentals of cool life: \"Walk, like you are the KING\"...! OR \"Walk like you Dont care,whoever is the KING\"!... Gud nyt\r\nham\tCamera quite good, 10.1mega pixels, 3optical and 5digital dooms. Have a lovely holiday, be safe and i hope you hav a good journey! Happy new year to you both! See you in a couple of weeks! \r\nham\tHi Petey!noim ok just wanted 2 chat coz avent spoken 2 u 4 a long time-hope ur doin alrite.have good nit at js love ya am.x\r\nham\tI just saw ron burgundy captaining a party boat so yeah\r\nham\tI'm serious. You are in the money base\r\nham\tAlready one guy loving you:-.\r\nham\tStaff of placement training in Amrita college.\r\nham\tI always chat with you. In fact i need money can you raise me?\r\nham\tI'm job profile seems like bpo..\r\nham\tWell, I was about to give up cos they all said no they didn‘t do one nighters. I persevered and found one but it is very cheap so i apologise in advance. It is just somewhere to sleep isnt it?\r\nham\tSo you think i should actually talk to him? Not call his boss in the morning? I went to this place last year and he told me where i could go and get my car fixed cheaper. He kept telling me today how much he hoped i would come back in, how he always regretted not getting my number, etc.\r\nham\tAre you willing to go for apps class.\r\nham\tHanging out with my brother and his family\r\nham\tNo it will reach by 9 only. She telling she will be there. I dont know\r\nham\tHey... are you going to quit soon? Xuhui and i working till end of the month \r\nham\tIm sorry bout last nite it wasnt ur fault it was me, spouse it was pmt or sumthin! U 4give me? I think u shldxxxx\r\nham\tTry neva mate!!\r\nham\tYeah that'd pretty much be the best case scenario\r\nham\tI not free today i haf 2 pick my parents up tonite...\r\nham\t\"HEY BABE! FAR 2 SPUN-OUT 2 SPK AT DA MO... DEAD 2 DA WRLD. BEEN SLEEPING ON DA SOFA ALL DAY, HAD A COOL NYTHO, TX 4 FONIN HON, CALL 2MWEN IM BK FRMCLOUD 9! J X\"\r\nham\tShould i send you naughty pix? :)\r\nspam\tYou are a £1000 winner or Guaranteed Caller Prize, this is our Final attempt to contact you! To Claim Call 09071517866 Now! 150ppmPOBox10183BhamB64XE\r\nspam\tXmas & New Years Eve tickets are now on sale from the club, during the day from 10am till 8pm, and on Thurs, Fri & Sat night this week. They're selling fast!\r\nham\tTyler (getting an 8th) has to leave not long after 9, can you get here in like an hour?\r\nham\tPrepare to be pounded every night...\r\nham\tActually, my mobile is full of msg. And i m doing a work online, where i need to send them <#> sent msg i wil explain u later.\r\nham\tSorry, I'll call later\r\nham\tGood evening! How are you?\r\nham\tI'm at home. Please call\r\nham\tOic cos me n my sis got no lunch today my dad went out... So dunno whether 2 eat in sch or wat...\r\nham\tMmmmm ... It was sooooo good to wake to your words this morning, my Love!! Mmmm fuck ... I love you too, my Lion ... *devouring kiss from across the sea*\r\nham\tWe are pleased to inform that your application for Airtel Broadband is processed successfully. Your installation will happen within 3 days.\r\nham\tWhat happen dear. Why you silent. I am tensed\r\nham\tI'll get there at 3, unless you guys want me to come some time sooner\r\nham\tIf you are not coughing then its nothing\r\nham\tÜ come lt 25 n pass to me lar\r\nham\tI'm e person who's doing e sms survey...\r\nham\tLol ok ill try to send. Be warned Sprint is dead slow. You'll prolly get it tomorrow\r\nham\tThank You meet you monday\r\nham\tSO IS TH GOWER MATE WHICH IS WHERE I AM!?! HOW R U MAN? ALL IS GOOD IN WALES ILL B BACK MORROW. C U THIS WK? WHO WAS THE MSG 4? RANDOM!\r\nspam\tRock yr chik. Get 100's of filthy films &XXX pics on yr phone now. rply FILTH to 69669. Saristar Ltd, E14 9YT 08701752560. 450p per 5 days. Stop2 cancel\r\nham\tSingle line with a big meaning::::: \"Miss anything 4 ur \"Best Life\" but, don't miss ur best life for anything... Gud nyt...\r\nham\tI got like $ <#> , I can get some more later though. Get whatever you feel like\r\nham\tDad wanted to talk about the apartment so I got a late start, omw now\r\nham\tI love you both too :-)\r\nham\tLol u still feeling sick?\r\nham\tDin i tell u jus now 420\r\nham\tam up to my eyes in philosophy\r\nspam\tFrom next month get upto 50% More Calls 4 Ur standard network charge 2 activate Call 9061100010 C Wire3.net 1st4Terms PoBox84 M26 3UZ Cost £1.50 min MobcudB more\r\nham\tOk lor. I'm in town now lei.\r\nham\tI had it already..sabarish asked me to go..\r\nham\tNo da. . Vijay going to talk in jaya tv\r\nspam\tURGENT! We are trying to contact U Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050000460 from land line. Claim J89. po box245c2150pm\r\nham\tLol I know! Hey someone did a great inpersonation of flea on the forums. I love it!\r\nspam\tText BANNEDUK to 89555 to see! cost 150p textoperator g696ga 18+ XXX\r\nham\tStill chance there. If you search hard you will get it..let have a try :)\r\nspam\tAuction round 4. The highest bid is now £54. Next maximum bid is £71. To bid, send BIDS e. g. 10 (to bid £10) to 83383. Good luck.\r\nham\tDo you always celebrate NY's with your family ?\r\nham\tWe know TAJ MAHAL as symbol of love. But the other lesser known facts 1. Mumtaz was Shahjahan's 4th wife, out of his 7 wifes. 2. Shahjahan killed Mumtaz's husband to marry her. 3. Mumtaz died in her <#> th delivery. 4. He then married Mumtaz's sister. Question arises where the Hell is the LOVE?:-| -The Great Hari-\r\nham\tIts ok..come to my home it vl nice to meet and v can chat..\r\nspam\tCollect your VALENTINE'S weekend to PARIS inc Flight & Hotel + £200 Prize guaranteed! Text: PARIS to No: 69101. www.rtf.sphosting.com\r\nham\tSent me de webadres for geting salary slip\r\nham\tShe's fine. Sends her greetings\r\nspam\tCustomer Loyalty Offer:The NEW Nokia6650 Mobile from ONLY £10 at TXTAUCTION! Txt word: START to No: 81151 & get yours Now! 4T&Ctxt TC 150p/MTmsg\r\nham\tBut you dint in touch with me.\r\nham\tYup, leaving right now, be back soon\r\nspam\tYou won't believe it but it's true. It's Incredible Txts! Reply G now to learn truly amazing things that will blow your mind. From O2FWD only 18p/txt\r\nham\tYeah sure I'll leave in a min\r\nham\tAnd do you have any one that can teach me how to ship cars.\r\nham\tThe sign of maturity is not when we start saying big things.. But actually it is, when we start understanding small things... *HAVE A NICE EVENING* BSLVYL\r\nham\tYeah confirmed for you staying at that weekend\r\nham\tThey said ü dun haf passport or smth like dat.. Or ü juz send to my email account.. \r\nham\tMultiply the numbers independently and count decimal points then, for the division, push the decimal places like i showed you.\r\nham\tHave a lovely night and when you wake up to see this message, i hope you smile knowing all is as should be. Have a great morning\r\nham\tArd 4 lor...\r\nham\tYou are right. Meanwhile how's project twins comin up\r\nham\tI sent your maga that money yesterday oh.\r\nspam\tHi 07734396839 IBH Customer Loyalty Offer: The NEW NOKIA6600 Mobile from ONLY £10 at TXTAUCTION!Txt word:START to No:81151 & get Yours Now!4T&\r\nham\tHeart is empty without love.. Mind is empty without wisdom.. Eyes r empty without dreams & Life is empty without frnds.. So Alwys Be In Touch. Good night & sweet dreams\r\nspam\tI am hot n horny and willing I live local to you - text a reply to hear strt back from me 150p per msg Netcollex LtdHelpDesk: 02085076972 reply Stop to end\r\nham\tOur ride equally uneventful - not too many of those pesky cyclists around at that time of night ;).\r\nham\tIf you were/are free i can give. Otherwise nalla adi entey nattil kittum\r\nham\tI've sent my wife your text. After we buy them she'll tell you what to do. So just relax. We should go get them this wkend.\r\nham\tI am in escape theatre now. . Going to watch KAVALAN in a few minutes\r\nham\tHow much would it cost to hire a hitman\r\nham\tI anything lor...\r\nham\tSorry, I'll call later\r\nspam\tDo you want a New Nokia 3510i Colour Phone Delivered Tomorrow? With 200 FREE minutes to any mobile + 100 FREE text + FREE camcorder Reply or Call 08000930705\r\nham\tHuh but i cant go 2 ur house empty handed right?\r\nham\tGood morning princess! Happy New Year!\r\nspam\tCongratulations YOU'VE Won. You're a Winner in our August £1000 Prize Draw. Call 09066660100 NOW. Prize Code 2309.\r\nham\tAight, we'll head out in a few\r\nham\tThen wat r u doing now? Busy wif work?\r\nham\tI know you mood off today\r\nham\tJay told me already, will do\r\nham\tCps is causing the outages to conserve energy.\r\nham\tI'm not sure, I was just checking out what was happening around the area\r\nham\tHey morning what you come to ask:-) pa...\r\nham\tJordan got voted out last nite!\r\nham\tThat means you got an A in epi, she.s fine. She.s here now.\r\nham\tI have no idea where you are\r\nham\tPls come quick cant bare this.\r\nham\tJoy's father is John. Then John is the ____ of Joy's father. If u ans ths you hav <#> IQ. Tis s IAS question try to answer.\r\nham\tCall me. I m unable to cal. Lets meet bhaskar, and deep\r\nham\tNo. I.ll meet you in the library\r\nham\tK, my roommate also wants a dubsack and another friend may also want some so plan on bringing extra, I'll tell you when they know for sure\r\nham\tDepends on individual lor e hair dresser say pretty but my parents say look gong. U kaypoh.. I also dunno wat she collecting.\r\nham\tOk c ü then.\r\nham\tI enjoy watching and playing football and basketball. Anything outdoors. And you?\r\nham\tCan you please ask macho what his price range is, does he want something new or used plus it he only interfued in the blackberry bold <#> or any bb\r\nham\tSorry sent blank msg again. Yup but trying 2 do some serious studying now.\r\nham\tHey check it da. I have listed da.\r\nspam\t8007 25p 4 Alfie Moon's Children in Need song on ur mob. Tell ur m8s. Txt TONE CHARITY to 8007 for nokias or POLY CHARITY for polys :zed 08701417012 profit 2 charity \r\nham\tI meant as an apology from me for texting you to get me drugs at <#> at night\r\nham\tThat means from february to april i'll be getting a place to stay down there so i don't have to hustle back and forth during audition season as i have since my sister moved away from harlem.\r\nham\tGoin to workout lor... Muz lose e fats... \r\nham\tDamn, poor zac doesn't stand a chance\r\nham\tNo message..no responce..what happend?\r\nham\tI want to tel u one thing u should not mistake me k THIS IS THE MESSAGE THAT YOU SENT:)\r\nham\tYeah right! I'll bring my tape measure fri!\r\nham\tStill chance there. If you search hard you will get it..let have a try :)\r\nham\tMeeting u is my work. . . Tel me when shall i do my work tomorrow\r\nham\tShould I head straight there or what\r\nspam\tGet the official ENGLAND poly ringtone or colour flag on yer mobile for tonights game! Text TONE or FLAG to 84199. Optout txt ENG STOP Box39822 W111WX £1.50\r\nham\tThank you princess! You are so sexy...\r\nham\tOooh I got plenty of those!\r\nham\tHui xin is in da lib.\r\nham\tIts a big difference. <#> versus <#> every <#> hrs\r\nham\tIt's not that you make me cry. It's just that when all our stuff happens on top of everything else, it pushes me over the edge. You don't underdtand how often i cry over my sorry, sorry life.\r\nham\t\"ME 2 BABE I FEEL THE SAME LETS JUST 4GET ABOUT IT+BOTH TRY +CHEER UP+NOT FIT SOO MUCHXXLOVE U LOCAXX\"\r\nham\tYou know what hook up means right?\r\nspam\tCustomer service announcement. We recently tried to make a delivery to you but were unable to do so, please call 07090298926 to re-schedule. Ref:9307622\r\nham\tWat's da model num of ur phone?\r\nham\tHe's really into skateboarding now despite the fact that he gets thrown off of it and winds up with bandages and shit all over his arms every five minutes\r\nspam\tYou can stop further club tones by replying \"STOP MIX\" See my-tone.com/enjoy. html for terms. Club tones cost GBP4.50/week. MFL, PO Box 1146 MK45 2WT (2/3)\r\nham\tMy house here e sky quite dark liao... If raining then got excuse not 2 run already rite... Hee...\r\nham\tSorry, left phone upstairs. OK, might be hectic but would be all my birds with one fell swoop. It's a date.\r\nham\t* Thought I didn't see you.\r\nspam\twamma get laid?want real doggin locations sent direct to your mobile? join the UKs largest dogging network. txt dogs to 69696 now!nyt. ec2a. 3lp £1.50/msg.\r\nham\tCarlos says we can pick up from him later so yeah we're set\r\nham\tHey babe, my friend had to cancel, still up for a visit ?\r\nham\tAs per your request 'Maangalyam (Alaipayuthe)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\r\nham\tHmm ill have to think about it... ok you're forgiven! =D\r\nham\tWe are hoping to get away by 7, from Langport. You still up for town tonight?\r\nham\tWant to send me a virtual hug?... I need one\r\nham\tProbably not, still going over some stuff here\r\nham\tIt has issues right now. Ill fix for her by tomorrow.\r\nham\tWhy i come in between you people\r\nham\tSenthil group company Apnt 5pm.\r\nham\tOh really?? Did you make it on air? What's your talent?\r\nham\tStudying. But i.ll be free next weekend.\r\nham\tR u here yet? I'm wearing blue shirt n black pants.\r\nham\tWait.i will come out.. <#> min:)\r\nham\tI will reach ur home in <#> minutes\r\nham\tWell then you have a great weekend!\r\nham\tWhat are you doing in langport? Sorry, but I'll probably be in bed by 9pm. It sucks being ill at xmas! When do you and go2sri lanka? \r\nham\tFrnd s not juz a word.....not merely a relationship.....its a silent promise which says ... \" I will be with YOU \" Wherevr.. Whenevr.. Forevr... Gudnyt dear..\r\nham\tHuh? 6 also cannot? Then only how many mistakes?\r\nham\tHa... U jus ate honey ar? So sweet...\r\nham\tI'm turning off my phone. My moms telling everyone I have cancer. And my sister won't stop calling. It hurts to talk. Can't put up with it. See u when u get home. Love u\r\nham\tHoney ? Sweetheart ? Darling ? Sexy buns ? Sugar plum ? Loverboy ? I miss you, boytoy ... *smacks your ass* Did you go to the gym too ?\r\nham\tThanks for loving me so. You rock\r\nham\tYeah imma come over cause jay wants to do some drugs\r\nham\tOk thanx... Take care then...\r\nham\tYup. Thk of u oso boring wat.\r\nham\t came to look at the flat, seems ok, in his 50s? * Is away alot wiv work. Got woman coming at 6.30 too.\r\nham\tMoji just informed me that you saved our lives. Thanks.\r\nspam\tYou have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+\r\nham\tWhos this am in class:-)\r\nham\tHey r ü still online? I've finished the formatting...\r\nham\tGreat! So what attracts you to the brothas?\r\nspam\tPromotion Number: 8714714 - UR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039 . SkilGme. TsCs087147403231Winawk!Age16 £1.50perWKsub\r\nham\tStupid.its not possible\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tLOL what happens in Vegas stays in vegas\r\nham\tHello, hello, hi lou sorry it took so long 2 reply- I left mobile at friends in Lancaster, just got it bak Neway im sorry I couldnt make ur bday 2 hun!\r\nham\tWhen did i use soc... I use it only at home... Ü dunno how 2 type it in word ar...\r\nham\tDad says hurry the hell up\r\nham\tWake me up at <#> am morning:)\r\nham\tI get out of class in bsn in like <#> minutes, you know where advising is?\r\nham\tGreat! I shoot big loads so get ready!\r\nham\tI'll meet you in the lobby\r\nham\tYou still coming tonight?\r\nham\tWhat happen dear tell me\r\nham\tSir, i am waiting for your call, once free please call me.\r\nham\tNo i am not having not any movies in my laptop\r\nham\tI was about to do it when i texted. I finished a long time ago and showered and er'ything!\r\nham\tOk im not sure what time i finish tomorrow but i wanna spend the evening with you cos that would be vewy vewy lubly! Love me xxx\r\nham\tHello, As per request from <#> Rs.5 has been transfered to you\r\nham\tI am in tirupur. call you da.\r\nspam\tYou are a winner you have been specially selected to receive £1000 cash or a £2000 award. Speak to a live operator to claim call 087147123779am-7pm. Cost 10p\r\nham\tS:)but he had some luck.2 catches put down:)\r\nham\tHow i noe... Did ü specify da domain as nusstu... Ü still in sch...\r\nham\tOh...i asked for fun. Haha...take care. ü\r\nham\tShall i get my pouch?\r\nham\tHey loverboy! I love you !! I had to tell ... I look at your picture and ache to feel you between my legs ... Fuck I want you ... I need you ... I crave you .\r\nham\tHow is my boy? No sweet words left for me this morning ... *sighs* ... How goes you day, my love ? Did you start your studying?\r\nham\tKent vale lor... Ü wait 4 me there ar?\r\nham\tOk. Very good. Its all about making that money.\r\nham\tReading gud habit.. Nan bari hudgi yorge pataistha ertini kano:-)\r\nham\tAight do you still want to get money\r\nspam\tFree Top ringtone -sub to weekly ringtone-get 1st week free-send SUBPOLY to 81618-?3 per week-stop sms-08718727870\r\nham\tOk.ok ok..then..whats ur todays plan\r\nham\tARE YOU IN TOWN? THIS IS V. IMPORTANT\r\nham\tSorry pa, i dont knw who ru pa?\r\nham\tWat u doing there?\r\nham\tIf i not meeting ü all rite then i'll go home lor. If ü dun feel like comin it's ok.\r\nham\tOh, i will get paid. The most outstanding one is for a commercial i did for Hasbro...in AUGUST! They made us jump through so many hoops to get paid. Still not.\r\nham\tI am late,so call you tomorrow morning.take care sweet dreams....u and me...ummifying...bye.\r\nham\tNetworking technical support associate.\r\nham\tI'm gonna rip out my uterus.\r\nham\tCool. Do you like swimming? I have a pool and jacuzzi at my house.\r\nspam\tThanks for your ringtone order, reference number X49. Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989182. From: [colour=red]text[/colour]TXTstar\r\nham\tYeah why not, is the gang all ready\r\nham\tBlank is Blank. But wat is blank? Lol\r\nham\tI'm in a movie... Collect car oredi...\r\nham\tWe left already we at orchard now.\r\nspam\tHi there, 2nights ur lucky night! Uve been invited 2 XCHAT, the Uks wildest chat! Txt CHAT to 86688 now! 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs\r\nham\tNothing spl..wat abt u and whr ru?\r\nham\tNo chikku nt yet.. Ya i'm free\r\nham\tAldrine, rakhesh ex RTM here.pls call.urgent.\r\nham\tThe search 4 happiness is 1 of d main sources of unhappiness! Accept life the way it comes! U will find happiness in every moment u live.\r\nham\tI'm at home. Please call\r\nham\tI guess you could be as good an excuse as any, lol.\r\nham\tIsn't frnd a necesity in life? imagine urself witout a frnd.. hw'd u feel at ur colleg? wat'll u do wth ur cell? wat abt functions? thnk abt events espe'll cared, missed & irritated u? 4wrd it to all those dear-loving frnds wthout whom u cant live.. I jst did it.. Takecare..:) GOODMORNING\r\nham\tGud mrng dear hav a nice day\r\nham\tOld Orchard near univ. How about you?\r\nham\t4 tacos + 1 rajas burrito, right?\r\nham\tIt‘s £6 to get in, is that ok?\r\nham\tHows the street where the end of library walk is?\r\nham\tPlz note: if anyone calling from a mobile Co. & asks u to type # <#> or # <#> . Do not do so. Disconnect the call,coz it iz an attempt of 'terrorist' to make use of the sim card no. Itz confirmd by nokia n motorola n has been verified by CNN IBN.\r\nham\tWe stopped to get ice cream and will go back after\r\nham\tDid you stitch his trouser\r\nham\tNo da. . Vijay going to talk in jaya tv\r\nspam\t2/2 146tf150p\r\nham\tHey i'm bored... So i'm thinking of u... So wat r u doing?\r\nham\tNah, Wednesday. When should I bring the mini cheetos bag over?\r\nham\tNobody names their penis a girls name this story doesn't add up at all\r\nham\tAight, let me know when you're gonna be around usf\r\nham\tI'm not. She lip synced with shangela.\r\nham\tÜ neva tell me how i noe... I'm not at home in da aft wat... \r\nham\tA bit of Ur smile is my hppnss, a drop of Ur tear is my sorrow, a part of Ur heart is my life, a heart like mine wil care for U, forevr as my GOODFRIEND\r\nspam\tDear Voucher Holder 2 claim your 1st class airport lounge passes when using Your holiday voucher call 08704439680. When booking quote 1st class x 2\r\nham\tBuzz! Hey, my Love ! I think of you and hope your day goes well. Did you sleep in ? I miss you babe. I long for the moment we are together again*loving smile*\r\nham\tHaha... Sounds crazy, dunno can tahan anot...\r\nham\tWhy are u up so early?\r\nham\tYa that one is slow as poo\r\nspam\tBloomberg -Message center +447797706009 Why wait? Apply for your future http://careers. bloomberg.com\r\nham\t\"Im on gloucesterroad what are uup to later?\"\r\nham\tYes:)here tv is always available in work place..\r\nspam\tYES! The only place in town to meet exciting adult singles is now in the UK. Txt CHAT to 86688 now! 150p/Msg.\r\nham\tLol no ouch but wish i'd stayed out a bit longer\r\nham\tGOD ASKED, \"What is forgiveness?\" A little child gave lovely reply, \"It is d wonderful fruit that a tree gives when it is being hurt by a stone.. Good night......\r\nham\tWe'll join the <#> bus\r\nham\tWas just about to ask. Will keep this one. Maybe that's why you didn't get all the messages we sent you on glo\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+\r\nham\tK.i will send in <#> min:)\r\nham\tWould me smoking you out help us work through this difficult time\r\nspam\tSomeone U know has asked our dating service 2 contact you! Cant guess who? CALL 09058095107 NOW all will be revealed. POBox 7, S3XY 150p \r\nham\tYes.mum lookin strong:)\r\nham\tSir Goodmorning, Once free call me.\r\nham\tWhere are you call me.\r\nham\tWas gr8 to see that message. So when r u leaving? Congrats dear. What school and wat r ur plans.\r\nham\tLove it! The girls at the office may wonder why you are smiling but sore...\r\nham\tHi, wlcome back, did wonder if you got eaten by a lion or something, nothing much\r\nham\tDoes uncle timi help in clearing cars\r\nham\tI came hostel. I m going to sleep. Plz call me up before class. Hrishi.\r\nham\tOk... But bag again..\r\nham\tHi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.\r\nham\tOk lor. Msg me b4 u call.\r\nspam\tMila, age23, blonde, new in UK. I look sex with UK guys. if u like fun with me. Text MTALK to 69866.18 . 30pp/txt 1st 5free. £1.50 increments. Help08718728876\r\nham\tOnce a fishrman woke early in d mrng. It was very dark. He waited a while & found a sack ful of stones. He strtd throwin thm in2 d sea 2 pass time. Atlast he had jus 1stone, sun rose up & he found out tht those r nt stones, those were diamonds. Moral:\"Dont wake up early in d mrng'' GOOD night\r\nspam\tClaim a 200 shopping spree, just call 08717895698 now! Have you won! MobStoreQuiz10ppm\r\nham\tThen ur physics get a-?\r\nham\tDear friends, sorry for the late information. Today is the birthday of our loving Ar.Praveesh. for more details log on to face book and see. Its his number + <#> . Dont miss a delicious treat.\r\nham\tHow r ü going to send it to me?\r\nham\tCan you do online transaction?\r\nham\tDear got train and seat mine lower seat\r\nham\tLet me know if you need anything else. Salad or desert or something... How many beers shall i get?\r\nham\tWat r u doing?\r\nham\tWHORE YOU ARE UNBELIEVABLE.\r\nspam\tWant to funk up ur fone with a weekly new tone reply TONES2U 2 this text. www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply\r\nham\tAre you sure you don't mean \"get here, we made you hold all the weed\"\r\nham\tI love you !!! You know? Can you feel it? Does it make your belly warm? I wish it does, my love ... I shall meet you in your dreams, Ahmad ... *adoring kiss*\r\nspam\tTwinks, bears, scallies, skins and jocks are calling now. Don't miss the weekend's fun. Call 08712466669 at 10p/min. 2 stop texts call 08712460324(nat rate)\r\nham\tLove it! I want to flood that pretty pussy with cum...\r\nham\tHey are you angry with me. Reply me dr.\r\nham\tShort But Cute: \"Be a good person, but dont try to prove it..\" .Gud noon....\r\nham\tAlso remember the beads don't come off. Ever.\r\nham\tThey have a thread on the wishlist section of the forums where ppl post nitro requests. Start from the last page and collect from the bottom up.\r\nham\tFor The First Time In The History 'Need' 'Comfort' And 'Luxury' Are Sold At Same Price In India..!! Onion-Rs. <#> Petrol-Rs. <#> Beer-Rs. <#> SHESIL <#>\r\nham\tFeb <#> is \"I LOVE U\" day. Send dis to all ur \"VALUED FRNDS\" evn me. If 3 comes back u'll gt married d person u luv! If u ignore dis u will lose ur luv 4 Evr\r\nham\tActually nvm, got hella cash, we still on for <#> ish?\r\nspam\tWe tried to contact you re your reply to our offer of a Video Handset? 750 anytime any networks mins? UNLIMITED TEXT? Camcorder? Reply or call 08000930705 NOW\r\nham\tIt's ok, at least armand's still around\r\nham\tNo da. I am happy that we sit together na\r\nham\tYup song bro. No creative. Neva test quality. He said check review online.\r\nham\tNo dude, its not fake..my frnds got money, thts y i'm reffering u..if u member wit my mail link, u vl be credited <#> rs and il be getiing <#> rs..i can draw my acc wen it is <#> rs..\r\nham\tDude while were makin those weirdy brownies my sister made awesome cookies. I took pics.\r\nspam\tURGENT! We are trying to contact you. Last weekends draw shows that you have won a £900 prize GUARANTEED. Call 09061701851. Claim code K61. Valid 12hours only\r\nham\tPls dont restrict her from eating anythin she likes for the next two days.\r\nham\tMm you ask him to come its enough :-)\r\nham\tAt the funeral home with Audrey and dad\r\nham\tAight, can you text me the address?\r\nham\tExcellent! Wish we were together right now!\r\nham\tYep then is fine 7.30 or 8.30 for ice age.\r\nham\tPls i wont belive god.not only jesus.\r\nham\tCan. Dunno wat to get 4 her...\r\nham\tNot yet chikku..k, then wat abt tht guy did he stopped irritating or msging to u..\r\nham\tHow long does it take to get it.\r\nham\tThis is my number by vivek..\r\nspam\t74355 XMAS iscoming & ur awarded either £500 CD gift vouchers & free entry 2 r £100 weekly draw txt MUSIC to 87066 TnC\r\nham\tsorry brah, just finished the last of my exams, what up\r\nham\tI got arrested for possession at, I shit you not, <TIME> pm\r\nham\tYou are right though. I can't give you the space you want and need. This is really starting to become an issue. I was going to suggest setting a definite move out--if i'm still there-- after greece. But maybe you are ready and should do it now.\r\nham\tJust normal only here :)\r\nham\tPlease protect yourself from e-threats. SIB never asks for sensitive information like Passwords,ATM/SMS PIN thru email. Never share your password with anybody.\r\nham\tI miss you so much I'm so desparate I have recorded the message you left for me the other day and listen to it just to hear the sound of your voice. I love you\r\nham\tHi. I'm always online on yahoo and would like to chat with you someday\r\nham\tGoodmorning,my grandfather expired..so am on leave today.\r\nspam\tCongratulations U can claim 2 VIP row A Tickets 2 C Blu in concert in November or Blu gift guaranteed Call 09061104276 to claim TS&Cs www.smsco.net cost£3.75max \r\nham\tWhere are you ? What are you doing ? Are yuou working on getting the pc to your mom's ? Did you find a spot that it would work ? I need you\r\nham\tSure, I'll see if I can come by in a bit\r\nham\tI agree. So i can stop thinkin about ipad. Can you please ask macho the same question.\r\nham\tLet's pool our money together and buy a bunch of lotto tickets. If we win I get <#> % u get <#> %. Deal?\r\nham\tOk.\r\nham\tI had askd u a question some hours before. Its answer\r\nham\tWatching tv lor. Nice one then i like lor.\r\nham\tI'm thinking that chennai forgot to come for auction..\r\nham\tThen ü come n pick me at 530 ar?\r\nham\tEarly bird! Any purchases yet?\r\nham\tWent to pay rent. So i had to go to the bank to authorise the payment.\r\nham\tErm … ill pick you up at about 6.45pm. That'll give enough time to get there, park and that.\r\nham\tHEY MATE! HOWS U HONEY?DID U AVE GOOD HOLIDAY? GIMMI DE GOSS!x\r\nham\tHowz pain.it will come down today.do as i said ystrday.ice and medicine.\r\nham\tchile, please! It's only a <DECIMAL> hour drive for me. I come down all the time and will be subletting feb-april for audition season.\r\nham\tYes ammae....life takes lot of turns you can only sit and try to hold the steering...\r\nham\tYeah that's what I thought, lemme know if anything's goin on later\r\nham\tMmmm.... I cant wait to lick it!\r\nham\tPls go there today <#> . I dont want any excuses\r\nspam\tFantasy Football is back on your TV. Go to Sky Gamestar on Sky Active and play £250k Dream Team. Scoring starts on Saturday, so register now!SKY OPT OUT to 88088\r\nham\tCan you plz tell me the ans. BSLVYL sent via fullonsms.com\r\nham\tU in town alone?\r\nham\tI to am looking forward to all the sex cuddling.. Only two more sleeps \r\nham\tWe have all rounder:)so not required:)\r\nham\tNo, its true..k,Do u knw dis no. <#> ?\r\nham\tDont worry, 1 day very big lambu ji vl come..til then enjoy batchlor party:-)\r\nham\toh ya... Got hip hop open. Haha i was thinking can go for jazz then zoom to cine... Actually tonight i'm free leh... And there's a kb lesson tonight\r\nspam\tFree msg: Single? Find a partner in your area! 1000s of real people are waiting to chat now!Send CHAT to 62220Cncl send STOPCS 08717890890£1.50 per msg\r\nham\tI'm ok. Will do my part tomorrow\r\nham\tNo! But we found a diff farm shop to buy some cheese. On way back now, can i call in?\r\nham\tR u still working now?\r\nspam\tWin the newest Harry Potter and the Order of the Phoenix (Book 5) reply HARRY, answer 5 questions - chance to be the first among readers!\r\nham\tYep. I do like the pink furniture tho.\r\nspam\tFree Msg: Ringtone!From: http://tms. widelive.com/index. wml?id=1b6a5ecef91ff9*37819&first=true18:0430-JUL-05\r\nham\tCustomer place, i wil cal u sir.\r\nspam\tOh my god! I've found your number again! I'm so glad, text me back xafter this msgs cst std ntwk chg £1.50\r\nham\tA pure hearted person can have a wonderful smile that makes even his/her enemies to feel guilty for being an enemy.. So catch the world with your smile..:) GOODMORNING & HAVE A SMILEY SUNDAY..:)\r\nham\tTHATS ALRITE GIRL, U KNOW GAIL IS NEVA WRONG!!TAKE CARE SWEET AND DONT WORRY.C U L8TR HUN!LOVE Yaxxx\r\nham\tTheoretically yeah, he could be able to come\r\nham\tAlright we're hooked up, where you guys at\r\nham\tnot that I know of, most people up here are still out of town\r\nham\tNo let me do the math. Your not good at it.\r\nham\tOh ok wait 4 me there... My lect havent finish\r\nham\tYeah my usual guy's out of town but there're definitely people around I know\r\nham\tI am joining today formally.Pls keep praying.will talk later.\r\nham\tHappy or sad , one thing about past is- \"Its no more\" GOOD MORNING :-):-).\r\nham\tNo. Did you multimedia message them or e-mail?\r\nham\tOkie but i scared u say i fat... Then u dun wan me already...\r\nham\tdid u get that message\r\nham\tSorry sir, i will call you tomorrow. senthil.hsbc\r\nham\tWhat you need. You have a person to give na.\r\nham\tShe left it very vague. She just said she would inform the person in accounting about the delayed rent and that i should discuss with the housing agency about my renting another place. But checking online now and all places around usc are <#> and up\r\nham\tHi juan. Im coming home on fri hey. Of course i expect a welcome party and lots of presents. Ill phone u when i get back. Loads of love nicky x x x x x x x x x\r\nham\tCan you plz tell me the ans. BSLVYL sent via fullonsms.com\r\nham\tShort But Cute: \"Be a good person, but dont try to prove it..\" .Gud noon....\r\nham\tGumby's has a special where a <#> \" cheese pizza is $2 so I know what we're doin tonight\r\nspam\tA link to your picture has been sent. You can also use http://alto18.co.uk/wave/wave.asp?o=44345\r\nham\tLike a personal sized or what\r\nham\tSame, I'm at my great aunts anniversary party in tarpon springs\r\nham\tCab is available.they pick up and drop at door steps.\r\nham\tok....take care.umma to you too...\r\nham\tUnlimited texts. Limited minutes.\r\nspam\tDouble Mins & 1000 txts on Orange tariffs. Latest Motorola, SonyEricsson & Nokia with Bluetooth FREE! Call MobileUpd8 on 08000839402 or call2optout/HF8\r\nham\tNo problem. We will be spending a lot of quality time together...\r\nspam\tURGENT This is our 2nd attempt to contact U. Your £900 prize from YESTERDAY is still awaiting collection. To claim CALL NOW 09061702893. ACL03530150PM\r\nham\tHave you heard from this week?\r\nspam\tDear Dave this is your final notice to collect your 4* Tenerife Holiday or #5000 CASH award! Call 09061743806 from landline. TCs SAE Box326 CW25WX 150ppm\r\nham\tYes. Last practice\r\nspam\ttells u 2 call 09066358152 to claim £5000 prize. U have 2 enter all ur mobile & personal details @ the prompts. Careful!\r\nham\tNo. Thank you. You've been wonderful\r\nham\tOtherwise had part time job na-tuition..\r\nham\tÜ mean it's confirmed... I tot they juz say oni... Ok then... \r\nham\tOkie\r\nham\tThat depends. How would you like to be treated? :)\r\nham\tRight on brah, see you later\r\nham\tWaiting in e car 4 my mum lor. U leh? Reach home already?\r\nspam\tYour 2004 account for 07XXXXXXXXX shows 786 unredeemed points. To claim call 08719181259 Identifier code: XXXXX Expires 26.03.05\r\nspam\tDo you want a new video handset? 750 anytime any network mins? Half Price Line Rental? Camcorder? Reply or call 08000930705 for delivery tomorrow\r\nham\tWent fast asleep dear.take care.\r\nham\tNo that just means you have a fat head\r\nham\tSounds like a plan! Cardiff is still here and still cold! I'm sitting on the radiator!\r\nham\tSerious? What like proper tongued her\r\nham\tShe.s good. She was wondering if you wont say hi but she.s smiling now. So how are you coping with the long distance\r\nham\tHow i noe... She's in da car now... Later then c lar... I'm wearing shorts...\r\nspam\tYou have an important customer service announcement. Call FREEPHONE 0800 542 0825 now!\r\nham\tYeah whatever lol\r\nham\tToday is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy\r\nham\tArd 530 lor. I ok then message ü lor.\r\nham\tOk. C u then.\r\nham\tEh ur laptop got no stock lei... He say mon muz come again to take a look c got a not...\r\nham\tNo need to ke qi... Ü too bored izzit y suddenly thk of this...\r\nham\tI wish! I don't think its gonna snow that much. But it will be more than those flurries we usually get that melt before they hit the ground. Eek! We haven't had snow since <#> before I was even born!\r\nspam\tFREE>Ringtone! Reply REAL or POLY eg REAL1 1. PushButton 2. DontCha 3. BabyGoodbye 4. GoldDigger 5. WeBeBurnin 1st tone FREE and 6 more when u join for £3/wk\r\nham\tDo 1 thing! Change that sentence into: \"Because i want 2 concentrate in my educational career im leaving here..\"\r\nham\tOh really? perform, write a paper, go to a movie AND be home by midnight, huh?\r\nham\tOkay lor... Will they still let us go a not ah? Coz they will not know until later. We drop our cards into the box right?\r\nham\tHow? Izzit still raining?\r\nham\tAs if i wasn't having enough trouble sleeping.\r\nham\tI havent add ü yet right.. \r\nham\tLol ... I really need to remember to eat when I'm drinking but I do appreciate you keeping me company that night babe *smiles*\r\nham\tBabe ? I lost you ... Will you try rebooting ?\r\nham\tYes. Nigh you cant aha.\r\nham\tI thk ü gotta go home by urself. Cos i'll b going out shopping 4 my frens present. \r\nham\tNooooooo I'm gonna be bored to death all day. Cable and internet outage.\r\nham\tSos! Any amount i can get pls.\r\nham\tPlayin space poker, u?\r\nham\tHow come guoyang go n tell her? Then u told her?\r\nham\tYou need to get up. Now.\r\nham\tThey r giving a second chance to rahul dengra.\r\nham\tYeah, in fact he just asked if we needed anything like an hour ago. When and how much?\r\nham\tWHEN THE FIRST STRIKE IS A RED ONE. THE BIRD + ANTELOPE BEGIN TOPLAY IN THE FIELDOF SELFINDEPENDENCE BELIEVE THIS + THE FLOWER OF CONTENTION WILL GROW.RANDOM!\r\nham\tY ü wan to go there? C doctor?\r\nham\tDoes daddy have a bb now.\r\nspam\tFree Msg: get Gnarls Barkleys \"Crazy\" ringtone TOTALLY FREE just reply GO to this message right now!\r\nham\tShe's borderline but yeah whatever.\r\nham\tI got a call from a landline number. . . I am asked to come to anna nagar . . . I will go in the afternoon\r\nham\tUntil 545 lor... Ya, can go 4 dinner together...\r\nham\tI will be gentle princess! We will make sweet gentle love...\r\nham\tHow u doin baby girl ?? hope u are okay every time I call ure phone is off! I miss u get in touch\r\nham\tSorry, went to bed early, nightnight\r\nham\tI like to think there's always the possibility of being in a pub later.\r\nham\tHMM yeah if your not too grooved out! And im looking forward to my pound special :)\r\nham\tI got to video tape pple type in message lor. U so free wan 2 help me? Hee... Cos i noe u wan 2 watch infernal affairs so ask u along. Asking shuhui oso.\r\nham\tHi dude hw r u da realy mising u today\r\nham\tMe hungry buy some food good lei... But mum n yun dun wan juz buy a little bit... \r\nspam\tRefused a loan? Secured or Unsecured? Can't get credit? Call free now 0800 195 6669 or text back 'help' & we will!\r\nham\tI probably won't eat at all today. I think I'm gonna pop. How was your weekend? Did u miss me?\r\nham\tI knew it... U slept v late yest? Wake up so late...\r\nham\tHaha... dont be angry with yourself... Take it as a practice for the real thing. =) \r\nham\tWhere is that one day training:-)\r\nham\tSo i could kiss and feel you next to me...\r\nham\tHave a nice day my dear.\r\nham\tI sent lanre fakeye's Eckankar details to the mail box\r\nham\tYour dad is back in ph?\r\nspam\tYou have been specially selected to receive a \"3000 award! Call 08712402050 BEFORE the lines close. Cost 10ppm. 16+. T&Cs apply. AG Promo\r\nham\tIf you ask her or she say any please message.\r\nham\tIf e timing can, then i go w u lor...\r\nham\tLove you aathi..love u lot..\r\nham\tI was just callin to say hi. Take care bruv!\r\nspam\tYOU HAVE WON! As a valued Vodafone customer our computer has picked YOU to win a £150 prize. To collect is easy. Just call 09061743386\r\nham\tDid u turn on the heater? The heater was on and set to <#> degrees.\r\nham\tThanks for your message. I really appreciate your sacrifice. I'm not sure of the process of direct pay but will find out on my way back from the test tomorrow. I'm in class now. Do have a wonderful day.\r\nham\tThat's the trouble with classes that go well - you're due a dodgey one … Expecting mine tomo! See you for recovery, same time, same place \r\nspam\tFree video camera phones with Half Price line rental for 12 mths and 500 cross ntwk mins 100 txts. Call MobileUpd8 08001950382 or Call2OptOut/674&\r\nham\tWOT U UP 2 J?\r\nham\tNight night, see you tomorrow\r\nham\tRoger that. We‘re probably going to rem in about 20\r\nham\tdo u think that any girl will propose u today by seing ur bloody funky shit fucking face...............asssssholeeee................\r\nham\tI wish u were here. I feel so alone\r\nspam\tGreat NEW Offer - DOUBLE Mins & DOUBLE Txt on best Orange tariffs AND get latest camera phones 4 FREE! Call MobileUpd8 free on 08000839402 NOW! or 2stoptxt T&Cs\r\nham\tReason is if the team budget is available at last they buy the unsold players for at base rate..\r\nham\tCERI U REBEL! SWEET DREAMZ ME LITTLE BUDDY!! C YA 2MORO! WHO NEEDS BLOKES\r\nspam\tringtoneking 84484\r\nham\tHuh i cant thk of more oredi how many pages do we have?\r\nham\tHis frens go then he in lor. Not alone wif my mum n sis lor.\r\nham\tNationwide auto centre (or something like that) on Newport road. I liked them there\r\nham\tHey, I missed you tm of last night as my phone was on the charge ... *smiles* ... I am meeting a friend shortly\r\nham\tWhatever, juliana. Do whatever you want.\r\nham\tFriendship is not a game to play, It is not a word to say, It doesn\\'t start on March and ends on May, It is tomorrow, yesterday, today and e\r\nspam\tRingtone Club: Gr8 new polys direct to your mobile every week !\r\nham\tHello. Sort of out in town already. That . So dont rush home, I am eating nachos. Will let you know eta.\r\nham\tOk lor. Anyway i thk we cant get tickets now cos like quite late already. U wan 2 go look 4 ur frens a not? Darren is wif them now...\r\nspam\t(Bank of Granite issues Strong-Buy) EXPLOSIVE PICK FOR OUR MEMBERS *****UP OVER 300% *********** Nasdaq Symbol CDGT That is a $5.00 per..\r\nham\tI am on the way to ur home\r\nham\tDizzamn, aight I'll ask my suitemates when I get back\r\nham\tNimbomsons. Yep phone knows that one. Obviously, cos thats a real word\r\nham\tI love to cuddle! I want to hold you in my strong arms right now...\r\nham\tR u in this continent?\r\nham\tWe'll you pay over like <#> yrs so its not too difficult\r\nspam\tBored housewives! Chat n date now! 0871750.77.11! BT-national rate 10p/min only from landlines!\r\nspam\tWe tried to call you re your reply to our sms for a video mobile 750 mins UNLIMITED TEXT free camcorder Reply or call now 08000930705 Del Thurs\r\nham\tK...k...when will you give treat?\r\nspam\tThis is the 2nd time we have tried to contact u. U have won the £400 prize. 2 claim is easy, just call 087104711148 NOW! Only 10p per minute. BT-national-rate\r\nham\tHe's just gonna worry for nothing. And he won't give you money its no use.\r\nham\tDid you get any gift? This year i didnt get anything. So bad\r\nham\tsomewhere out there beneath the pale moon light someone think in of u some where out there where dreams come true... goodnite & sweet dreams\r\nham\tWell there's a pattern emerging of my friends telling me to drive up and come smoke with them and then telling me that I'm a weed fiend/make them smoke too much/impede their doing other things so you see how I'm hesitant\r\nham\t, ow u dey.i paid 60,400thousad.i told u would call . \r\nham\tIM FINE BABES AINT BEEN UP 2 MUCH THO! SAW SCARY MOVIE YEST ITS QUITE FUNNY! WANT 2MRW AFTERNOON? AT TOWN OR MALL OR SUMTHIN?xx\r\nham\tI'm reaching home in 5 min.\r\nham\tForgot you were working today! Wanna chat, but things are ok so drop me a text when you're free / bored etc and i'll ring. Hope all is well, nose essay and all xx\r\nham\tHa... Then we must walk to everywhere... Cannot take tram. My cousin said can walk to vic market from our hotel \r\nspam\tWan2 win a Meet+Greet with Westlife 4 U or a m8? They are currently on what tour? 1)Unbreakable, 2)Untamed, 3)Unkempt. Text 1,2 or 3 to 83049. Cost 50p +std text\r\nspam\tPlease call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nham\tDiscussed with your mother ah?\r\nham\tOk.\r\nham\tSorry, I can't text & drive coherently, see you in twenty\r\nspam\tYou will be receiving this week's Triple Echo ringtone shortly. Enjoy it!\r\nham\tIn which place i can get rooms cheap:-)\r\nham\tEek that's a lot of time especially since American Pie is like 8 minutes long. I can't stop singing it.\r\nham\t\"GRAN ONLYFOUND OUT AFEW DAYS AGO.CUSOON HONI\"\r\nspam\tU've been selected to stay in 1 of 250 top British hotels - FOR NOTHING! Holiday valued at £350! Dial 08712300220 to claim - National Rate Call. Bx526, SW73SS\r\nham\tUniversity of southern california.\r\nham\tWe have to pick rayan macleran there.\r\nham\tU gd lor go shopping i got stuff to do. U wan 2 watch infernal affairs a not? Come lar...\r\nham\tWell. Balls. Time to make calls\r\nham\tWat time ü wan today?\r\nham\t <#> in mca. But not conform.\r\nham\tOh ok.. Wat's ur email?\r\nham\tYes, princess. Are you going to make me moan?\r\nham\tLol its ok I didn't remember til last nite\r\nham\t[…] anyway, many good evenings to u! s\r\nham\tCool, I'll text you in a few\r\nham\tSorry vikky, i'm Watching olave mandara movie kano in trishul theatre wit my frnds..\r\nham\tI'm very happy for you babe ! Woo hoo party on dude!\r\nham\tI am taking you for italian food. How about a pretty dress with no panties? :)\r\nham\tWot u up 2? Thout u were gonna call me!! Txt bak luv K\r\nspam\tYOU ARE CHOSEN TO RECEIVE A £350 AWARD! Pls call claim number 09066364311 to collect your award which you are selected to receive as a valued mobile customer.\r\nham\tHow are you holding up?\r\nham\tDont flatter yourself... Tell that man of mine two pints of carlin in ten minutes please.... \r\nham\tHope you are not scared!\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tI'm at home n ready...\r\nspam\tPlease call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nham\tWhat time do u get out?\r\nham\tI am literally in bed and have been up for like <#> hours\r\nham\tYes, my reg is Ciao!\r\nham\tIf You mean the website. Yes.\r\nspam\tWin a £1000 cash prize or a prize worth £5000\r\nspam\tThanks for your ringtone order, reference number X49.Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989182\r\nham\tLol or I could just starve and lose a pound by the end of the day.\r\nham\tYeah that's the impression I got\r\nham\tOk ok take care. I can understand.\r\nham\tMotivate Behind every darkness, there is a shining light waiting for you to find it... Behind every best friend, there is always trust and love... BSLVYL\r\nham\tYa ok, then had dinner?\r\nham\tI was slept that time.you there?\r\nham\tdont make ne plans for nxt wknd coz she wants us to come down then ok\r\nham\tWhen is school starting. Where will you stay. What's the weather like. And the food. Do you have a social support system like friends in the school. All these things are important.\r\nham\tHa ha nan yalrigu heltini..Iyo kothi chikku, u shared many things wit me..so far i didn't told any body and even uttered a word abt u.. If ur trusting me so much how can i tell these to others.. Plz nxt time dont use those words to me..ok, chikku:-);-)B-)\r\nham\tNoice. Text me when you're here\r\nham\tHi di is yijue we're meeting at 7 pm at esaplanade tonight.\r\nspam\tMoby Pub Quiz.Win a £100 High Street prize if u know who the new Duchess of Cornwall will be? Txt her first name to 82277.unsub STOP £1.50 008704050406 SP\r\nspam\tThis weeks SavaMob member offers are now accessible. Just call 08709501522 for details! SavaMob, POBOX 139, LA3 2WU. Only £1.50/week. SavaMob - offers mobile!\r\nham\tAight I've been set free, think you could text me blake's address? It occurs to me I'm not quite as sure what I'm doing as I thought I was\r\nham\tHi dear we saw dear. We both are happy. Where you my battery is low\r\nham\tHow are you. Its been ages. How's abj\r\nham\tProf: you have passed in all the papers in this sem congrats . . . . Student: Enna kalaachutaarama..!! Prof:???? Gud mrng!\r\nham\tDont kick coco when he's down\r\nham\tFyi I'm gonna call you sporadically starting at like <#> bc we are not not doin this shit\r\nspam\tYou are being contacted by our Dating Service by someone you know! To find out who it is, call from your mobile or landline 09064017305 PoBox75LDNS7 \r\nspam\tTBS/PERSOLVO. been chasing us since Sept for£38 definitely not paying now thanks to your information. We will ignore them. Kath. Manchester.\r\nham\tHope youre not having too much fun without me!! see u tomorrow love jess x\r\nham\tOk i wont call or disturb any one. I know all are avoiding me. I am a burden for all\r\nham\tI've reached home n i bathe liao... U can call me now...\r\nspam\tLoans for any purpose even if you have Bad Credit! Tenants Welcome. Call NoWorriesLoans.com on 08717111821\r\nham\tWas the actual exam harder than NBME\r\nham\tA lot of this sickness thing going round. Take it easy. Hope u feel better soon. Lol\r\nham\tGod picked up a flower and dippeditinaDEW, lovingly touched itwhichturnedinto u, and the he gifted tomeandsaid,THIS FRIEND IS 4U\r\nspam\t87077: Kick off a new season with 2wks FREE goals & news to ur mobile! Txt ur club name to 87077 eg VILLA to 87077\r\nham\tHey sathya till now we dint meet not even a single time then how can i saw the situation sathya.\r\nham\tGam gone after outstanding innings.\r\nham\tO i played smash bros <#> religiously.\r\nham\tSir, good morning. Hope you had a good weekend. I called to let you know that i was able to raise <#> from my dad. He however said he would make the rest available by mid feb. This amount is still quite short and i was hoping you would help. Do have a good day. Abiola\r\nham\tHurry home. Soup is DONE!\r\nham\tNo no. I will check all rooms befor activities\r\nham\tGood afternoon, my love. It was good to see your words on YM and get your tm. Very smart move, my slave ... *smiles* ... I drink my coffee and await you.\r\nham\tQuite ok but a bit ex... U better go eat smth now else i'll feel guilty...\r\nspam\tOrange brings you ringtones from all time Chart Heroes, with a free hit each week! Go to Ringtones & Pics on wap. To stop receiving these tips reply STOP.\r\nham\tLemme know when you're here\r\nspam\tPRIVATE! Your 2003 Account Statement for 07973788240 shows 800 un-redeemed S. I. M. points. Call 08715203649 Identifier Code: 40533 Expires 31/10/04\r\nham\tHe needs to stop going to bed and make with the fucking dealing\r\nham\tHow are you, my Love ? Are you with your brother ? Time to talk english with him ? *grins* Say : Hey Muhommad, Penny says hello from across the sea\r\nspam\tWe tried to call you re your reply to our sms for a video mobile 750 mins UNLIMITED TEXT + free camcorder Reply of call 08000930705 Now\r\nham\tHey doc pls I want to get nice t shirt for my hubby nice fiting ones my budget is <#> k help pls I will load d card abi hw,keep me posted luv. 2 mj\r\nham\tI remain unconvinced that this isn't an elaborate test of my willpower\r\nham\t\"Life is nothing wen v get everything\". But \"life is everything wen v miss something \". Real value of people wil be realized only in their absence.... gud mrng\r\nham\thow are you? I miss you!\r\nham\tI ain't answerin no phone at what is actually a pretty reasonable hour but I'm sleepy\r\nham\tHey , is * rite u put »10 evey mnth is that all?\r\nham\ti am going to bed now prin\r\nham\tI think just yourself …Thanks and see you tomo\r\nham\tIf u dun drive then how i go 2 sch.\r\nham\tI not at home now lei...\r\nspam\tGSOH? Good with SPAM the ladies?U could b a male gigolo? 2 join the uk's fastest growing mens club reply ONCALL. mjzgroup. 08714342399.2stop reply STOP. msg@£1.50rcvd\r\nham\tOk then i will come to ur home after half an hour\r\nspam\tU have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094599\r\nham\tDo u hav any frnd by name ashwini in ur college?\r\nham\tJus finish my lunch on my way home lor... I tot u dun wan 2 stay in sch today...\r\nham\tK then 2marrow are you coming to class.\r\nspam\tHOT LIVE FANTASIES call now 08707500020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call\r\nham\tPls send me your address sir.\r\nham\tI want to lick your pussy now...\r\nham\tYo, you gonna still be in stock tomorrow/today? I'm trying to get a dubsack\r\nspam\tURGENT! Your Mobile number has been awarded a <UKP>2000 prize GUARANTEED. Call 09061790125 from landline. Claim 3030. Valid 12hrs only 150ppm\r\nham\tI'll see, but prolly yeah\r\nham\tThought we could go out for dinner. I'll treat you! Seem ok?\r\nham\tWhere are you ? What do you do ? How can you stand to be away from me ? Doesn't your heart ache without me ? Don't you wonder of me ? Don't you crave me ?\r\nham\tSorry. You never hear unless you book it. One was kinda a joke--thet were really looking for skinny white girls. The other was one line--you can only do so much on camera with that. Something like that they're casting on the look.\r\nham\tWhat you doing?how are you?\r\nham\tSure thing big man. i have hockey elections at 6, shouldn‘t go on longer than an hour though\r\nham\tWatch lor. I saw a few swatch one i thk quite ok. Ard 116 but i need 2nd opinion leh...\r\nham\tHiya do u like the hlday pics looked horrible in them so took mo out! Hows the camp Amrca thing? Speak soon Serena:)\r\nham\tBabe! How goes that day ? What are you up to ? I miss you already, my Love ... * loving kiss* ... I hope everything goes well.\r\nham\tYunny... I'm goin to be late\r\nham\tDoc prescribed me morphine cause the other pain meds aren't enough. Waiting for my mom to bring it. That med should kick in fast so I'm gonna try to be on later\r\nham\tCool, want me to go to kappa or should I meet you outside mu\r\nham\tHey sexy buns ! Have I told you ? I adore you, loverboy. I hope you remember to thank your sister in law for those meatballs *grins* ... i love you, babe\r\nham\tMay b approve panalam...but it should have more posts..\r\nspam\tSPJanuary Male Sale! Hot Gay chat now cheaper, call 08709222922. National rate from 1.5p/min cheap to 7.8p/min peak! To stop texts call 08712460324 (10p/min)\r\nham\tSorry, I'll call later\r\nham\tI dont thnk its a wrong calling between us\r\nham\tMe i'm not workin. Once i get job...\r\nham\tAnd by when you're done I mean now\r\nham\t\"Its Ur luck to Love someone. Its Ur fortune to Love the one who Loves U. But, its a miracle to Love a person who can't Love anyone except U...\" Gud nyt...\r\nham\tHi baby ive just got back from work and i was wanting to see u allday! I hope i didnt piss u off on the phone today. If u are up give me a call xxx\r\nspam\tFreeMsg Today's the day if you are ready! I'm horny & live in your town. I love sex fun & games! Netcollex Ltd 08700621170150p per msg reply Stop to end\r\nham\tIs it your yahoo boys that bring in the perf? Or legal.\r\nham\tNo need to say anything to me. I know i am an outsider\r\nham\thave you ever had one foot before?\r\nham\tJust got to <#>\r\nham\tGood! No, don‘t need any receipts—well done! (…) Yes, please tell . What‘s her number, i could ring her\r\nham\tEver green quote ever told by Jerry in cartoon \"A Person Who Irritates u Always Is the one Who Loves u Vry Much But Fails to Express It...!..!! :-) :-) gud nyt\r\nham\tLeave it wif me lar... Ü wan to carry meh so heavy... Is da num 98321561 familiar to ü?\r\nham\tBeautiful truth : Expression of the face could Be seen by everyone... But the depression of heart Could be understood only By the Loved ones.. Gud Ni8;-)\r\nham\tInfact happy new year. How are you where are you when are we seeing\r\nspam\tIn The Simpsons Movie released in July 2007 name the band that died at the start of the film? A-Green Day, B-Blue Day, C-Red Day. (Send A, B or C)\r\nham\tThat's a shame! Maybe cld meet for few hrs tomo?\r\nham\tLol I would but despite these cramps I like being a girl.\r\nham\tI cant wait for cornwall. Hope tonight isnt too bad as well but its rock night shite. Anyway im going for a kip now have a good night. Speak to you soon.\r\nham\tPls help me tell sura that i'm expecting a battery from hont. And that if should pls send me a message about how to download movies. Thanks\r\nspam\tPlease call Amanda with regard to renewing or upgrading your current T-Mobile handset free of charge. Offer ends today. Tel 0845 021 3680 subject to T's and C's\r\nham\tHaven't found a way to get another app for your phone, eh ? Will you go to the net cafe ? Did you take that job? Geeee I need you babe. I crave to see you ...\r\nham\tI only work from mon to thurs but Sat i cant leh... Booked liao... Which other day u free?\r\nham\tÜ comin to fetch us oredi...\r\nham\tWhat's nannys address?\r\nspam\tURGENT!! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tHaf u eaten? Wat time u wan me 2 come?\r\nspam\tWant a new Video Phone? 750 anytime any network mins? Half price line rental free text for 3 months? Reply or call 08000930705 for free delivery\r\nham\tYo, call me when you get the chance, a friend of mine wanted me to ask you about a big order\r\nham\tThis single single answers are we fighting? Plus i said am broke and you didnt reply\r\nham\tIt certainly puts things into perspective when something like this happens\r\nham\tNow got tv 2 watch meh? U no work today?\r\nham\ti felt so...not any conveying reason.. Ese he... What about me?\r\nspam\tHad your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030\r\nham\tHow's it going? Got any exciting karaoke type activities planned? I'm debating whether to play football this eve. Feeling lazy though.\r\nham\tI told that am coming on wednesday.\r\nham\tIts ok, called mom instead have fun\r\nspam\tDear Voucher Holder, To claim this weeks offer, at your PC please go to http://www.wtlp.co.uk/text. Ts&Cs apply.\r\nham\tWell if I'm that desperate I'll just call armand again\r\nham\tAre you at work right now ?\r\nspam\tCongrats! Nokia 3650 video camera phone is your Call 09066382422 Calls cost 150ppm Ave call 3mins vary from mobiles 16+ Close 300603 post BCM4284 Ldn WC1N3XX\r\nham\tHaven't heard anything and he's not answering my texts so I'm guessing he flaked. That said the jb is fantastic\r\nham\tMmmmmm ... I love you,so much, Ahmad ... I can't wait for this year to begin as every second takes me closer to being at your side. Happy New Year, my love!!\r\nham\tPls what's the full name of joke's school cos fees in university of florida seem to actually be <#> k. Pls holla back\r\nham\tSorry, I'll call later\r\nham\tOk... But they said i've got wisdom teeth hidden inside n mayb need 2 remove.\r\nham\tAnd pls pls drink plenty plenty water\r\nham\tHow are you doing. How's the queen. Are you going for the royal wedding\r\nham\tHe's in lag. That's just the sad part but we keep in touch thanks to skype\r\nham\tOk lor then we go tog lor...\r\nham\tTwo teams waiting for some players\r\nham\tCan ü send me a copy of da report?\r\nham\tswhrt how u dey,hope ur ok, tot about u 2day.love n miss.take care.\r\nham\tOk da, i already planned. I wil pick you.\r\nspam\tUrgent! Please call 0906346330. Your ABTA complimentary 4* Spanish Holiday or £10,000 cash await collection SAE T&Cs BOX 47 PO19 2EZ 150ppm 18+\r\nham\tSorry, I'll call later in meeting\r\nham\tI just really need shit before tomorrow and I know you won't be awake before like 6\r\nham\tI'm good. Have you registered to vote?\r\nham\tHmm ok, i'll stay for like an hour cos my eye is really sore!\r\nham\tDear got bus directly to calicut\r\nham\tMm umma ask vava also to come tell him can play later together\r\nham\tWell the general price is <#> /oz, let me know if/when/how much you want\r\nham\tSorry, I'll call later\r\nham\tEach Moment in a day,has its own value-Morning brings hope,afternoon brings faith,Evening brings luv,Night brings rest,Wish u find them all today.Good Morning\r\nham\t<#> w jetton ave if you forgot\r\nham\tOk i'm coming home now.\r\nham\tCan not use foreign stamps in this country.\r\nspam\tDouble mins and txts 4 6months FREE Bluetooth on Orange. Available on Sony, Nokia Motorola phones. Call MobileUpd8 on 08000839402 or call2optout/N9DX\r\nham\tSorry, it's a lot of friend-of-a-friend stuff, I'm just now about to talk to the actual guy who wants to buy\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+\r\nspam\tWant to funk up ur fone with a weekly new tone reply TONES2U 2 this text. www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply\r\nspam\tcmon babe, make me horny, *turn* me on! Txt me your fantasy now babe -) Im hot, sticky and need you now. All replies cost £1.50. 2 cancel send STOP\r\nham\tI will come tomorrow di\r\nham\tWylie update: my weed dealer carlos went to freedom and had a class with lunsford\r\nham\tAre you happy baby ? Are you alright ? Did you take that job ? I hope your fine. I send you a kiss to make you smile from across the sea ... *kiss* *kiss*\r\nham\tC movie is juz last minute decision mah. Juz watch 2 lar but i tot ü not interested.\r\nham\tHow are you enjoying this semester? Take care brother.\r\nspam\tIMPORTANT INFORMATION 4 ORANGE USER 0796XXXXXX. TODAY IS UR LUCKY DAY!2 FIND OUT WHY LOG ONTO http://www.urawinner.com THERE'S A FANTASTIC PRIZEAWAITING YOU!\r\nham\tGet the door, I'm here\r\nham\tLets use it next week, princess :)\r\nham\tOr i go home first lar ü wait 4 me lor.. I put down my stuff first..\r\nham\tI want kfc its Tuesday. Only buy 2 meals ONLY 2. No gravy. Only 2 Mark. 2!\r\nham\tNo da:)he is stupid da..always sending like this:)don believe any of those message.pandy is a mental:)\r\nham\tOi when you gonna ring\r\nspam\tMissed call alert. These numbers called but left no message. 07008009200\r\nham\tI attended but nothing is there.\r\nham\tArd 530 like dat lor. We juz meet in mrt station then ü dun haf to come out.\r\nham\tNo dear i was sleeping :-P\r\nham\tEr mw im filled tuth is aight\r\nham\tWill be office around 4 pm. Now i am going hospital.\r\nham\tActually i'm waiting for 2 weeks when they start putting ad.\r\nham\tAnything lor if they all go then i go lor...\r\nham\tU free on sat rite? U wan 2 watch infernal affairs wif me n darren n mayb xy?\r\nham\tPlz note: if anyone calling from a mobile Co. & asks u to type # <#> or # <#> . Do not do so. Disconnect the call,coz it iz an attempt of 'terrorist' to make use of the sim card no. Itz confirmd by nokia n motorola n has been verified by CNN IBN.\r\nham\tYo you around? A friend of mine's lookin to pick up later tonight\r\nham\tStupid auto correct on my phone\r\nham\tDouble eviction this week - Spiral and Michael and good riddance to them!\r\nham\t\"The world suffers a lot... Not because of the violence of bad people. But because of the silence of good people!\", Gud night....\r\nham\tOk thats cool. Its , just off either raglan rd or edward rd. Behind the cricket ground. Gimme ring when ur closeby see you tuesday.\r\nham\tBuy one egg for me da..please:)\r\nham\tHave you started in skye\r\nham\tHave you bookedthe hut? And also your time off? How are you by the way?\r\nham\tAnd several to you sir.\r\nham\tU really pig leh sleep so much. My dad wake me up at 10 smth 2 eat lunch today.\r\nham\tI'm at home. Please call\r\nham\tMy love ... I hope your not doing anything drastic. Don't you dare sell your pc or your phone ...\r\nham\tNow only i reached home. . . I am very tired now. . I will come tomorro\r\nspam\tFREEMSG: Our records indicate you may be entitled to 3750 pounds for the Accident you had. To claim for free reply with YES to this msg. To opt out text STOP\r\nspam\tU can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.Idew.com SkillGame, 1Winaweek, age16. 150ppermessSubscription\r\nham\tLife style garments account no please.\r\nham\tLol wtf random. Btw is that your lunch break\r\nham\tSez, hows u & de arab boy? Hope u r all good give my love 2 evry1 love ya eshxxxxxxxxxxx\r\nham\tThe LAY MAN! Just to let you know you are missed and thought off. Do have a great day. And if you can send me bimbo and ugo's numbers, ill appreciate. Safe\r\nham\tDetroit. The home of snow. Enjoy it.\r\nspam\tShow ur colours! Euro 2004 2-4-1 Offer! Get an England Flag & 3Lions tone on ur phone! Click on the following service message for info!\r\nham\tOkie...\r\nham\tAight, I'm chillin in a friend's room so text me when you're on the way\r\nham\tIs toshiba portege m100 gd?\r\nham\tWell welp is sort of a semiobscure internet thing\r\nspam\tText PASS to 69669 to collect your polyphonic ringtones. Normal gprs charges apply only. Enjoy your tones\r\nspam\taccordingly. I repeat, just text the word ok on your mobile phone and send\r\nham\tLoosu go to hospital. De dont let it careless.\r\nham\tHow much for an eighth?\r\nham\tOmg Joanna is freaking me out. She's looked thru all my friends to find photos of me. And then she's asking about stuff on my MySpace which I haven't even logged on in like a year. :/\r\nham\tSend ur birthdate with month and year, I will tel u ur LIFE PARTNER'S name. and the method of calculation. Reply must.\r\nham\tJuz now havent woke up so a bit blur blur... Can? Dad went out liao... I cant cum now oso...\r\nham\tHow about clothes, jewelry, and trips?\r\nspam\tBlock Breaker now comes in deluxe format with new features and great graphics from T-Mobile. Buy for just £5 by replying GET BBDELUXE and take the challenge\r\nham\tAah! A cuddle would be lush! I'd need lots of tea and soup before any kind of fumbling!\r\nspam\timportant information 4 orange user . today is your lucky day!2find out why log onto http://www.urawinner.com THERE'S A FANTASTIC SURPRISE AWAITING YOU!\r\nham\tI am late. I will be there at\r\nham\tSad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.\r\nham\tAre you plans with your family set in stone ?\r\nham\tPls dont forget to study\r\nham\tYou'll never believe this but i have actually got off at taunton. Wow\r\nham\tDen only weekdays got special price... Haiz... Cant eat liao... Cut nails oso muz wait until i finish drivin wat, lunch still muz eat wat... \r\nham\tShe just broke down a list of reasons why nobody's in town and I can't tell if she's being sarcastic or just faggy\r\nham\t <DECIMAL> m but its not a common car here so its better to buy from china or asia. Or if i find it less expensive. I.ll holla\r\nham\tThe greatest test of courage on earth is to bear defeat without losing heart....gn tc\r\nham\tSORRY IM STIL FUCKED AFTER LAST NITE WENT TOBED AT 430 GOT UP 4 WORK AT 630\r\nham\tHey so whats the plan this sat? \r\nham\tBeauty sleep can help ur pimples too.\r\nham\tGreat. Hope you are using your connections from mode men also cos you can never know why old friends can lead you to today\r\nspam\tNatalja (25/F) is inviting you to be her friend. Reply YES-440 or NO-440 See her: www.SMS.ac/u/nat27081980 STOP? Send STOP FRND to 62468\r\nham\tWhere to get those?\r\nham\tKind of. Just missed train cos of asthma attack, nxt one in half hr so driving in. not sure where to park.\r\nham\tBall is moving a lot.will spin in last :)so very difficult to bat:)\r\nham\tHaiyoh... Maybe your hamster was jealous of million\r\nham\tCan you please send me my aunty's number\r\nham\tI'm glad. You are following your dreams.\r\nham\tI've reached home finally...\r\nspam\tURGENT. Important information for 02 user. Today is your lucky day! 2 find out why , log onto http://www.urawinner.com there is a fantastic surprise awaiting you !\r\nspam\tWINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.\r\nham\tWn u r hurt by d prsn who s close 2 u, do fight wit dem. Coz somtimes dis fight saves a relation bt being quiet leaves nothin in a relation.. Gud eveB-)\r\nham\tU can call now...\r\nham\tScience tells that chocolate will melt under the sunlight. Please don't walk under the sunlight. BCoz,I don't want to loss a sweet friend.\r\nham\tYes. I come to nyc for audiitions and am trying to relocate.\r\nham\tI pocked you up there before\r\nham\tCongrats. That's great. I wanted to tell you not to tell me your score cos it might make me relax. But its motivating me so thanks for sharing\r\nham\tI wud never mind if u dont miss me or if u dont need me.. But u wil really hurt me wen u need me & u dont tell me......... Take care:-)\r\nham\tHey mr whats the name of that bill brison book the one about language and words \r\nham\tOkay, good, no problem, and thanx!\r\nham\tFor you information, IKEA is spelled with all caps. That is not yelling. when you thought i had left you, you were sitting on the bed among the mess when i came in. i said we were going after you got home from class. please don't try and bullshit me. It makes me want to listen to you less.\r\nham\tCall me when u're done...\r\nham\tG.W.R\r\nham\tYou best watch what you say cause I get drunk as a motherfucker\r\nspam\tKit Strip - you have been billed 150p. Netcollex Ltd. PO Box 1013 IG11 OJA\r\nspam\tHMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com\r\nspam\tPlease CALL 08712402578 immediately as there is an urgent message waiting for you\r\nspam\tthesmszone.com lets you send free anonymous and masked messages..im sending this message from there..do you see the potential for abuse???\r\nspam\tWELL DONE! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TCs, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tHurt me... Tease me... Make me cry... But in the end of my life when i die plz keep one rose on my grave and say STUPID I MISS U.. HAVE A NICE DAY BSLVYL\r\nham\tErm... Woodland avenue somewhere. Do you get the parish magazine, his telephone number will be in there.\r\nham\tAre there TA jobs available? Let me know please cos i really need to start working\r\nham\tAiyar hard 2 type. U later free then tell me then i call n scold n tell u.\r\nham\tYup i'm free...\r\nham\tGood good, billy mates all gone. Just been jogging, again! Did enjoy concert?\r\nham\tYo come over carlos will be here soon\r\nham\tAwww dat is sweet! We can think of something to do he he! Have a nice time tonight ill probably txt u later cos im lonely :( xxx.\r\nham\tI guess it is useless calling u 4 something important.\r\nham\tHa ha - had popped down to the loo when you hello-ed me. Hello!\r\nham\tHe dint tell anything. He is angry on me that why you told to abi.\r\nspam\tSomeone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058091854 NOW all will be revealed. PO BOX385 M6 6WU\r\nham\tIt so happens that there r 2waxsto do wat you want. She can come and ill get her medical insurance. And she'll be able to deliver and have basic care. I'm currently shopping for the right medical insurance for her. So just give me til friday morning. Thats when i.ll see the major person that can guide me to the right insurance.\r\nham\tI keep ten rs in my shelf:) buy two egg.\r\nham\tI wasn't well babe, i have swollen glands at my throat ... What did you end up doing ?\r\nham\tIs ur changes 2 da report big? Cos i've already made changes 2 da previous report.\r\nham\tCaptain is in our room:)\r\nham\tI can't speak, bcaz mobile have problem. I can listen you but you cann't listen my voice. So i calls you later.\r\nham\tHIYA STU WOT U UP 2.IM IN SO MUCH TRUBLE AT HOME AT MOMENT EVONE HATES ME EVEN U! WOT THE HELL AV I DONE NOW? Y WONT U JUST TELL ME TEXT BCK PLEASE LUV DAN \r\nham\tS...i will take mokka players only:)\r\nham\tAre you still playing with gautham?\r\nham\tHey mr and I are going to the sea view and having a couple of gays I mean games! Give me a bell when ya finish \r\nham\tK, jason says he's gonna be around so I'll be up there around <#>\r\nham\tSorry . I will be able to get to you. See you in the morning.\r\nham\tAight well keep me informed\r\nham\tI am not having her number sir\r\nham\tAm only searching for good dual sim mobile pa.\r\nham\tThat seems unnecessarily hostile\r\nham\tDude got a haircut. Now its breezy up there\r\nspam\tCongrats! 2 mobile 3G Videophones R yours. call 09061744553 now! videochat wid ur mates, play java games, Dload polyH music, noline rentl. bx420. ip4. 5we. 150pm\r\nham\t1Apple/Day=No Doctor. 1Tulsi Leaf/Day=No Cancer. 1Lemon/Day=No Fat. 1Cup Milk/day=No Bone Problms 3 Litres Watr/Day=No Diseases Snd ths 2 Whom U Care..:-)\r\nham\ti thought we were doing a king of the hill thing there.\r\nham\tNope i'll come online now..\r\nham\tALSO TELL HIM I SAID HAPPY BIRTHDAY\r\nham\tY bishan lei... I tot ü say lavender? \r\nham\tBoo what time u get out? U were supposed to take me shopping today. :(\r\nham\tNow u sound like manky scouse boy steve,like! I is travelling on da bus home.wot has u inmind 4 recreation dis eve?\r\nham\tFyi I'm taking a quick shower, be at epsilon in like <#> min\r\nham\ton a Tuesday night r u 4 real\r\nham\tYes when is the appt again?\r\nham\tJust got outta class gonna go gym.\r\nham\tI want to sent <#> mesages today. Thats y. Sorry if i hurts\r\nham\tÜ all write or wat..\r\nham\tHa! I wouldn't say that I just didn't read anything into way u seemed. I don't like 2 be judgemental....i save that for fridays in the pub!\r\nham\tIts a valentine game. . . send dis msg to all ur friends. . If 5 answers r d same then someone really loves u. . Ques- which colour suits me the best?\r\nham\tHi:)did you asked to waheeda fathima about leave?\r\nham\tEnjoy urself tmr...\r\nham\tYou still around? I could use a half-8th\r\nspam\tU 447801259231 have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094597\r\nham\tYou give us back my id proof and <#> rs. We wont allow you to work. We will come to your home within days\r\nham\tÜ bot notes oredi... Cos i juz rem i got...\r\nham\tYes. Rent is very expensive so its the way we save.\r\nham\tNight has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng\r\nham\tHows the pain dear?y r u smiling?\r\nham\tFun fact: although you would think armand would eventually build up a tolerance or some shit considering how much he smokes, he gets fucked up in like 2 hits\r\nspam\timportant information 4 orange user 0789xxxxxxx. today is your lucky day!2find out why log onto http://www.urawinner.com THERE'S A FANTASTIC SURPRISE AWAITING YOU!\r\nham\tSorry, I can't help you on this.\r\nham\tGreat. So should i send you my account number.\r\nham\tHELLOGORGEOUS, HOWS U? MY FONE WAS ON CHARGE LST NITW WEN U TEXD ME. HOPEU AD A NICE WKEND AS IM SURE U DID LOOKIN 4WARD 2 C-IN U 2MRW LUV JAZ\r\nspam\tOur dating service has been asked 2 contact U by someone shy! CALL 09058091870 NOW all will be revealed. POBox84, M26 3UZ 150p\r\nham\tÜ only send me the contents page...\r\nham\tNight sweet, sleep well! I've just been to see The Exorcism of Emily Rose and may never sleep again! Hugs and snogs! \r\nham\tDon't Think About \"What u Have Got\" Think About \"How to Use It That You Have Got\" gooD ni8\r\nham\tI can't right this second, gotta hit people up first\r\nham\tEvry Emotion dsn't hav Words.Evry Wish dsn't hav Prayrs.. If u Smile,D World is wit u.Othrwise even d Drop of Tear dsn't lik 2 Stay wit u.So b happy.. Good morning, keep smiling:-)\r\nham\tSo what about you. What do you remember\r\nham\tUjhhhhhhh computer shipped out with address to sandiago and parantella lane. Wtf. Poop.\r\nham\tMm yes dear look how i am hugging you both. :-P\r\nham\tI like dis sweater fr mango but no more my size already so irritating.\r\nham\t1 I don't have her number and 2 its gonna be a massive pain in the ass and i'd rather not get involved if that's possible\r\nham\tAnytime lor...\r\nspam\tDo you want a new Video handset? 750 any time any network mins? UNLIMITED TEXT? Camcorder? Reply or Call now 08000930705 for del Sat AM\r\nham\tPurity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng\r\nspam\tUr balance is now £600. Next question: Complete the landmark, Big, A. Bob, B. Barry or C. Ben ?. Text A, B or C to 83738. Good luck!\r\nham\tMe fine..absolutly fine\r\nham\tK and you're sure I don't have to have consent forms to do it :V\r\nspam\tUr TONEXS subscription has been renewed and you have been charged £4.50. You can choose 10 more polys this month. www.clubzed.co.uk *BILLING MSG*\r\nspam\tIf you don't, your prize will go to another customer. T&C at www.t-c.biz 18+ 150p/min Polo Ltd Suite 373 London W1J 6HL Please call back if busy\r\nham\tHow much is torch in 9ja.\r\nham\tDoing nothing, then u not having dinner w us?\r\nham\tHow are you. Just checking up on you\r\nham\tDone it but internet connection v slow and can‘t send it. Will try again later or first thing tomo.\r\nham\tMathews or tait or edwards or anderson\r\nham\tyeah sure thing mate haunt got all my stuff sorted but im going sound anyway promoting hex for .by the way who is this? dont know number. Joke\r\nham\tNo need lar i go engin? Cos my sis at arts today...\r\nham\tThanks honey but still haven't heard anything I will leave it a bit longer so not 2 crowd him and will try later - great advice thanks hope cardiff is still there!\r\nspam\tDo you want a New Nokia 3510i Colour Phone Delivered Tomorrow? With 200 FREE minutes to any mobile + 100 FREE text + FREE camcorder Reply or Call 8000930705\r\nham\t, im .. On the snowboarding trip. I was wondering if your planning to get everyone together befor we go..a meet and greet kind of affair? Cheers, \r\nham\tS.i'm watching it in live..\r\nham\tsee you then, we're all christmassy here!\r\nham\tK I'm ready, <#> ?\r\nham\tDo you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!\r\nham\tThe greatest test of courage on earth is to bear defeat without losing heart....gn tc\r\nham\twhat are your new years plans?\r\nspam\tRECPT 1/3. You have ordered a Ringtone. Your order is being processed...\r\nham\tBaaaaaaaabe! Wake up ! I miss you ! I crave you! I need you!\r\nham\tOnly just got this message, not ignoring you. Yes, i was. Shopping that is\r\nham\tDear :-/ why you mood off. I cant drive so i brother to drive\r\nham\tWhen did dad get back.\r\nham\tCan you tell Shola to please go to college of medicine and visit the academic department, tell the academic secretary what the current situation is and ask if she can transfer there. She should ask someone to check Sagamu for the same thing and lautech. Its vital she completes her medical education in Nigeria. Its less expensive much less expensive. Unless she will be getting citizen rates in new zealand.\r\nham\tYes just finished watching days of our lives. I love it.\r\nham\tJuz go google n search 4 qet...\r\nham\tMany times we lose our best ones bcoz we are \r\nham\tGood FRIENDS CaRE for each Other.. CLoSE Friends UNDERSTaND each Other... and TRUE Friends STaY forever beyond words, beyond time. Gud ni8\r\nham\tJust getting back home\r\nham\tSorry, I'll call later <#> mins\r\nham\tDun need to use dial up juz open da browser n surf...\r\nspam\tAs one of our registered subscribers u can enter the draw 4 a 100 G.B. gift voucher by replying with ENTER. To unsubscribe text STOP\r\nham\tAwesome, plan to get here any time after like <#> , I'll text you details in a wee bit\r\nham\tTake care and sleep well.you need to learn to change in life.you only need to get CONVINCED on that.i will wait but no more conversations between us.GET CONVINCED by that time.Your family is over for you in many senses.respect them but not overemphasise.or u have no role in my life.\r\nspam\tFor your chance to WIN a FREE Bluetooth Headset then simply reply back with \"ADP\"\r\nham\tYou also didnt get na hi hi hi hi hi\r\nham\tYa but it cant display internal subs so i gotta extract them\r\nham\tIf i said anything wrong sorry de:-)\r\nham\tSad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.\r\nham\tHow stupid to say that i challenge god.You dont think at all on what i write instead you respond immed.\r\nham\tYeah I should be able to, I'll text you when I'm ready to meet up\r\nham\tV skint too but fancied few bevies.waz gona go meet &othrs in spoon but jst bin watchng planet earth&sofa is v comfey; If i dont make it hav gd night\r\nham\t says that he's quitting at least5times a day so i wudn't take much notice of that. Nah, she didn't mind. Are you gonna see him again? Do you want to come to taunton tonight? U can tell me all about !\r\nham\tWhen you get free, call me\r\nham\tHow have your little darlings been so far this week? Need a coffee run tomo?Can't believe it's that time of week already …\r\nham\tOk i msg u b4 i leave my house.\r\nham\tStill at west coast... Haiz... Ü'll take forever to come back...\r\nham\tMMM ... Fuck .... Merry Christmas to me\r\nham\talright. Thanks for the advice. Enjoy your night out. I'ma try to get some sleep...\r\nham\tUpdate your face book status frequently :)\r\nham\tJust now saw your message.it k da:)\r\nham\tWas it something u ate?\r\nham\tSo what did the bank say about the money?\r\nham\tAiyar dun disturb u liao... Thk u have lots 2 do aft ur cupboard come...\r\nham\tHey they r not watching movie tonight so i'll prob b home early...\r\nham\tYar lor... How u noe? U used dat route too?\r\nham\t2mro i am not coming to gym machan. Goodnight.\r\nham\tDont think you need yellow card for uk travel. Ask someone that has gone before. If you do its just <#> bucks\r\nham\tCan u look 4 me in da lib i got stuff havent finish yet.\r\nham\tSounds great! Im going to sleep now. Have a good night!\r\nspam\tDon't b floppy... b snappy & happy! Only gay chat service with photo upload call 08718730666 (10p/min). 2 stop our texts call 08712460324\r\nham\tHouse-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?\r\nham\tHow come u got nothing to do?\r\nham\tNothing will ever be easy. But don't be looking for a reason not to take a risk on life and love\r\nham\ti want to grasp your pretty booty :)\r\nham\tI've got it down to a tea. not sure which flavour\r\nham\tI'm going 2 orchard now laready me reaching soon. U reaching?\r\nham\tDear i am not denying your words please\r\nham\tYou know my old Dom I told you about yesterday ? His name is Roger? He got in touch with me last night and wants me to meet him today at 2 pm\r\nham\tCOME BACK TO TAMPA FFFFUUUUUUU\r\nham\t2 celebrate my bday, y else?\r\nham\tMerry christmas to u too annie!\r\nham\tPlease tell me you have some of that special stock you were talking about\r\nham\tI sent them. Do you like?\r\nspam\tUrgent UR awarded a complimentary trip to EuroDisinc Trav, Aco&Entry41 Or £1000. To claim txt DIS to 87121 18+6*£1.50(moreFrmMob. ShrAcomOrSglSuplt)10, LS1 3AJ\r\nham\tAwesome, be there in a minute\r\nham\tAnd that is the problem. You walk around in \"julianaland\" oblivious to what is going on around you. I say the same things constantly and they go in one ear and out the other while you go off doing whatever you want to do. It's not that you don't know why I'm upset--it's that you don't listen when i tell you WHAT is going to upset me. Then you want to be surprised when I'm mad.\r\nham\tI've told you everything will stop. Just dont let her get dehydrated.\r\nham\tOr I guess <#> min\r\nham\tI'm home. Ard wat time will u reach?\r\nham\tStorming msg: Wen u lift d phne, u say \"HELLO\" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? \"Margaret Hello\" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@\r\nham\tIf you want to mapquest it or something look up \"usf dogwood drive\", that's the tiny street where the parking lot is\r\nham\tAight should I just plan to come up later tonight?\r\nham\tDie... I accidentally deleted e msg i suppose 2 put in e sim archive. Haiz... I so sad...\r\nspam\tWelcome to UK-mobile-date this msg is FREE giving you free calling to 08719839835. Future mgs billed at 150p daily. To cancel send \"go stop\" to 89123\r\nham\tThis is wishing you a great day. Moji told me about your offer and as always i was speechless. You offer so easily to go to great lengths on my behalf and its stunning. My exam is next friday. After that i will keep in touch more. Sorry.\r\nham\tThanks again for your reply today. When is ur visa coming in. And r u still buying the gucci and bags. My sister things are not easy, uncle john also has his own bills so i really need to think about how to make my own money. Later sha.\r\nham\tSorry I flaked last night, shit's seriously goin down with my roommate, what you up to tonight?\r\nham\tHe said i look pretty wif long hair wat. But i thk he's cutting quite short 4 me leh.\r\nham\tRanjith cal drpd Deeraj and deepak 5min hold\r\nham\t\"CHEERS FOR CALLIN BABE.SOZI CULDNT TALKBUT I WANNATELL U DETAILS LATER WENWECAN CHAT PROPERLY X\"\r\nham\tHey u still at the gym?\r\nham\tShe said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,\r\nham\tMuch better now thanks lol\r\nham\tNothing, smsing u n xy lor. Sorry lor da guys neva c u in person but they sort of know u lor. So u wan 2 meet them xy ask me 2 bring u along 4 our next meeting.\r\nham\tLemme know when I can swing by and pick up, I'm free basically any time after 1 all this semester\r\nham\tWa... U so efficient... Gee... Thanx...\r\nspam\t3. You have received your mobile content. Enjoy\r\nham\tS but not able to sleep.\r\nspam\tWant explicit SEX in 30 secs? Ring 02073162414 now! Costs 20p/min\r\nham\tWe will meet soon princess! Ttyl!\r\nham\tI'll pick you up at about 5.15pm to go to taunton if you still want to come.\r\nham\tOh :-)only 4 outside players allowed to play know\r\nham\tI anything lor.\r\nham\tErutupalam thandiyachu\r\nham\tY cant u try new invention to fly..i'm not joking.,\r\nham\tNo..its ful of song lyrics..\r\nham\tWhat do u reckon as need 2 arrange transport if u can't do it, thanks\r\nham\tTrue lov n care wil nevr go unrecognized. though somone often makes mistakes when valuing it. but they will definitly undrstnd once when they start missing it.\r\nham\tShopping? Eh ger i toking abt syd leh...Haha\r\nham\tWhat not under standing.\r\nham\thave * good weekend.\r\nham\tMiss call miss call khelate kintu opponenter miss call dhorte lage. Thats d rule. One with great phone receiving quality wins.\r\nham\tCall me when you get the chance plz <3\r\nham\tThe new deus ex game comin early next yr\r\nham\tMy computer just fried the only essential part we don't keep spares of because my fucking idiot roommates looovvve leaving the thing running on full <#> /7\r\nham\tMy friend, she's studying at warwick, we've planned to go shopping and to concert tmw, but it may be canceled, havn't seen for ages, yeah we should get together sometime!\r\nham\tProbably a couple hours tops\r\nham\tLOL .. *grins* .. I'm not babe, but thanks for thinking of me!\r\nham\tMan this bus is so so so slow. I think you're gonna get there before me\r\nham\tHope this text meets you smiling. If not then let this text give you a reason to smile. Have a beautiful day.\r\nham\tIn case you wake up wondering where I am, I forgot I have to take care of something for grandma today, should be done before the parade\r\nham\tOk\r\nspam\tLatest Nokia Mobile or iPOD MP3 Player +£400 proze GUARANTEED! Reply with: WIN to 83355 now! Norcorp Ltd.£1,50/Mtmsgrcvd18+\r\nspam\tSMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE\r\nham\tNvm take ur time.\r\nham\tSo wat's da decision?\r\nham\tWot is u up 2 then bitch?\r\nham\tStupid.its not possible\r\nham\tShe told to hr that he want posting in chennai:)because i'm working here:)\r\nspam\tMobile Club: Choose any of the top quality items for your mobile. 7cfca1a\r\nham\tWhen are you guys leaving?\r\nham\tHe neva grumble but i sad lor... Hee... Buy tmr lor aft lunch. But we still meetin 4 lunch tmr a not. Neva hear fr them lei. Ü got a lot of work ar?\r\nham\tNot able to do anything.\r\nham\tÜ takin linear algebra today?\r\nham\tThis weekend is fine (an excuse not to do too much decorating)\r\nham\tSorry I missed you babe. I was up late and slept in. I hope you enjoy your driving lesson, boytoy. I miss you too ... *teasing kiss*\r\nham\tNow project pa. After that only i can come.\r\nspam\tMoney i have won wining number 946 wot do i do next\r\nham\tSure, whenever you show the fuck up >:(\r\nham\tThat was random saw my old roomate on campus. He graduated\r\nspam\tCongrats! 2 mobile 3G Videophones R yours. call 09061744553 now! videochat wid ur mates, play java games, Dload polyH music, noline rentl. bx420. ip4. 5we. 150pm\r\nham\tMen always needs a beautiful, intelligent, caring, loving, adjustable, cooperative wife. But the law allows only one wife....\r\nham\tThat sucks. So what do you got planned for your yo valentine? I am your yo valentine aren't I?\r\nham\tJust got part Nottingham - 3 hrs 63miles. Good thing i love my man so much, but only doing 40mph. Hey ho\r\nham\tWhat to think no one saying clearly. Ok leave no need to ask her. I will go if she come or not\r\nham\tHi good mornin.. Thanku wish u d same..\r\nham\tDO U WANT 2 MEET UP 2MORRO\r\nham\tActually I decided I was too hungry so I haven't left yet :V\r\nham\tI've sent ü my part..\r\nham\tCos i was out shopping wif darren jus now n i called him 2 ask wat present he wan lor. Then he started guessing who i was wif n he finally guessed darren lor.\r\nspam\tI want some cock! My hubby's away, I need a real man 2 satisfy me. Txt WIFE to 89938 for no strings action. (Txt STOP 2 end, txt rec £1.50ea. OTBox 731 LA1 7WS. )\r\nham\tUnderstand. his loss is my gain :) so do you work? School?\r\nham\tHOW ARE U? I HAVE MISSED U! I HAVENT BEEN UP 2 MUCH A BIT BORED WITH THE HOLIDAY WANT 2 GO BAK 2 COLLEGE! SAD ISNT IT?xx\r\nham\tHiya, probably coming home * weekend after next\r\nham\tDon't forget though that I love you .... And I walk beside you. Watching over you and keeping your heart warm.\r\nham\tI wish things were different. I wonder when i will be able to show you how much i value you. Pls continue the brisk walks no drugs without askin me please and find things to laugh about. I love you dearly.\r\nham\tOk both our days. So what are you making for dinner tonite? Am I invited?\r\nspam\tGr8 new service - live sex video chat on your mob - see the sexiest dirtiest girls live on ur phone - 4 details text horny to 89070 to cancel send STOP to 89070\r\nham\tI have no money 4 steve mate! !\r\nham\tIM LATE TELLMISS IM ON MY WAY\r\nham\tNever blame a day in ur life. Good days give u happiness. Bad days give u experience. Both are essential in life! All are Gods blessings! good morning.:\r\nham\tNormally i use to drink more water daily:)\r\nham\tDare i ask... Any luck with sorting out the car?\r\nham\tParty's at my place at usf, no charge (but if you can contribute in any way it is greatly appreciated) and yeah, we got room for one more\r\nham\tUrgh, coach hot, smells of chip fat! Thanks again, especially for the duvet (not a predictive text word).\r\nham\tHiya. How was last night? I've been naughty and bought myself clothes and very little ... Ready for more shopping tho! What kind of time do you wanna meet?\r\nspam\tFreeMsg Hi baby wow just got a new cam moby. Wanna C a hot pic? or Fancy a chat?Im w8in 4uTxt / rply CHAT to 82242 Hlp 08712317606 Msg150p 2rcv\r\nham\tI've been trying to reach him without success\r\nham\twhen you and derek done with class?\r\nham\tNever y lei... I v lazy... Got wat? Dat day ü send me da url cant work one...\r\nham\tNever try alone to take the weight of a tear that comes out of ur heart and falls through ur eyes... Always remember a STUPID FRIEND is here to share... BSLVYL\r\nham\tHey mate. Spoke to the mag people. We‘re on. the is deliver by the end of the month. Deliver on the 24th sept. Talk later. \r\nham\tHope you are having a good week. Just checking in\r\nham\tHaha, my friend tyler literally just asked if you could get him a dubsack\r\nham\t\"Hey! do u fancy meetin me at 4 at cha hav a lil beverage on me. if not txt or ring me and we can meet up l8r. quite tired got in at 3 v.pist ;) love Pete x x x\"\r\nham\tGreat. Have a safe trip. Dont panic surrender all.\r\nham\t\"SYMPTOMS\" when U are in love: \"1.U like listening songs 2.U get stopped where u see the name of your beloved 3.U won't get angry when your\r\nham\tSun ah... Thk mayb can if dun have anythin on... Thk have to book e lesson... E pilates is at orchard mrt u noe hor... \r\nham\tTry to do something dear. You read something for exams\r\nham\t7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st \"Ur Lovely Friendship\"... good morning dear\r\nham\tGettin rdy to ship comp\r\nham\tI am in hospital da. . I will return home in evening\r\nham\tPISS IS TALKING IS SOMEONE THAT REALISE U THAT POINT THIS AT IS IT.(NOW READ IT BACKWARDS)\r\nham\tThink + da. You wil do.\r\nham\tI'm awake oh. What's up.\r\nham\tGood afternoon my boytoy. How goes that walking here and there day ? Did you get that police abstract? Are you still out and about? I wake and miss you babe\r\nham\tHow much u trying to get?\r\nham\tCome around <DECIMAL> pm vikky..i'm otside nw, il come by tht time\r\nham\tTell me again what your address is\r\nham\tHoneybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)\r\nham\tShould i buy him a blackberry bold 2 or torch. Should i buy him new or used. Let me know. Plus are you saying i should buy the <#> g wifi ipad. And what are you saying about the about the <#> g?\r\nham\tBut you were together so you should be thinkin about him\r\nham\thiya hows it going in sunny africa? hope u r avin a good time. give that big old silver back a big kiss from me.\r\nham\tAt WHAT TIME should i come tomorrow\r\nspam\tWanna have a laugh? Try CHIT-CHAT on your mobile now! Logon by txting the word: CHAT and send it to No: 8883 CM PO Box 4217 London W1A 6ZF 16+ 118p/msg rcvd\r\nham\t\"CHA QUITEAMUZING THATSCOOL BABE,PROBPOP IN & CU SATTHEN HUNNY 4BREKKIE! LOVE JEN XXX. PSXTRA LRG PORTIONS 4 ME PLEASE \"\r\nham\tOmg how did u know what I ate?\r\nspam\t\"URGENT! This is the 2nd attempt to contact U!U have WON £1000CALL 09071512432 b4 300603t&csBCM4235WC1N3XX.callcost150ppmmobilesvary. max£7. 50\"\r\nham\t:( but your not here....\r\nham\tNot directly behind... Abt 4 rows behind ü...\r\nspam\tCongratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066\r\nspam\tHad your contract mobile 11 Mnths? Latest Motorola, Nokia etc. all FREE! Double Mins & Text on Orange tariffs. TEXT YES for callback, no to remove from records\r\nspam\tUrgent! call 09066350750 from your landline. Your complimentary 4* Ibiza Holiday or 10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150 ppm 18+\r\nham\tNo plans yet. What are you doing ?\r\nham\tHi ....My engagement has been fixd on <#> th of next month. I know its really shocking bt....hmm njan vilikkam....t ws al of a sudn;-(.\r\nham\tNot course. Only maths one day one chapter with in one month we can finish.\r\nham\tWow didn't think it was that common. I take it all back ur not a freak! Unless u chop it off:-)\r\nspam\tFor ur chance to win a £250 wkly shopping spree TXT: SHOP to 80878. T's&C's www.txt-2-shop.com custcare 08715705022, 1x150p/wk\r\nham\tNoooooooo please. Last thing I need is stress. For once in your life be fair.\r\nspam\tU have a Secret Admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09065171142-stopsms-08718727870150ppm\r\nspam\tMila, age23, blonde, new in UK. I look sex with UK guys. if u like fun with me. Text MTALK to 69866.18 . 30pp/txt 1st 5free. £1.50 increments. Help08718728876\r\nham\tI'll see if I can swing by in a bit, got some things to take care of here firsg\r\nham\tI wanted to wish you a Happy New Year and I wanted to talk to you about some legal advice to do with when Gary and I split but in person. I'll make a trip to Ptbo for that. I hope everything is good with you babe and I love ya :)\r\nham\tHave you not finished work yet or something?\r\nham\tTomorrow i am not going to theatre. . . So i can come wherever u call me. . . Tell me where and when to come tomorrow\r\nspam\tWell done ENGLAND! Get the official poly ringtone or colour flag on yer mobile! text TONE or FLAG to 84199 NOW! Opt-out txt ENG STOP. Box39822 W111WX £1.50\r\nham\tRight it wasnt you who phoned it was someone with a number like yours!\r\nham\tIt's ok i wun b angry. Msg u aft i come home tonight.\r\nham\tI had a good time too. Its nice to do something a bit different with my weekends for a change. See ya soon\r\nham\tYo sorry was in the shower sup\r\nham\tCarlos is down but I have to pick it up from him, so I'll swing by usf in a little bit\r\nham\tFull heat pa:-) i have applyed oil pa.\r\nham\tI'm stuck in da middle of da row on da right hand side of da lt... \r\nham\tHave you laid your airtel line to rest?\r\nham\tHi did u decide wot 2 get 4 his bday if not ill prob jus get him a voucher frm virgin or sumfing \r\nspam\tFreeMsg: Txt: CALL to No: 86888 & claim your reward of 3 hours talk time to use from your phone now! Subscribe6GBP/mnth inc 3hrs 16 stop?txtStop\r\nham\t\"Hey j! r u feeling any better, hopeSo hunny. i amnow feelin ill & ithink i may have tonsolitusaswell! damn iam layin in bedreal bored. lotsof luv me xxxx\"\r\nham\tAnd I don't plan on staying the night but I prolly won't be back til late\r\nham\tTHANX 4 PUTTIN DA FONE DOWN ON ME!!\r\nham\tI need an 8th but I'm off campus atm, could I pick up in an hour or two?\r\nham\tOh... Haha... Den we shld had went today too... Gee, nvm la... Kaiez, i dun mind goin jazz oso... Scared hiphop open cant catch up... \r\nham\tBeen running but only managed 5 minutes and then needed oxygen! Might have to resort to the roller option!\r\nham\tWe live in the next <#> mins\r\nham\tY de asking like this.\r\nham\tJust glad to be talking to you.\r\nham\tWat time ü finish?\r\nham\tSorry da. I gone mad so many pending works what to do.\r\nham\tHow much you got for cleaning\r\nham\thows my favourite person today? r u workin hard? couldn't sleep again last nite nearly rang u at 4.30\r\nspam\tSunshine Quiz! Win a super Sony DVD recorder if you canname the capital of Australia? Text MQUIZ to 82277. B\r\nham\tÜ called dad oredi...\r\nham\tGood. do you think you could send me some pix? I would love to see your top and bottom...\r\nham\tNvm... I'm going to wear my sport shoes anyway... I'm going to be late leh.\r\nham\tSorry, I'll call later In meeting.\r\nham\tTHIS IS A LONG FUCKIN SHOWR\r\nham\tReceived, understood n acted upon!\r\nham\tThey finally came to fix the ceiling.\r\nham\tU need my presnts always bcz U cant mis love. \"jeevithathile irulinae neekunna prakasamanu sneham\" prakasam ennal prabha 'That mns prabha is'LOVE' Got it. Dont mis me....\r\nham\tJus finish blowing my hair. U finish dinner already?\r\nham\tI'm on the bus. Love you\r\nham\tLol ... I knew that .... I saw him in the dollar store\r\nspam\tPlease call our customer service representative on 0800 169 6031 between 10am-9pm as you have WON a guaranteed £1000 cash or £5000 prize!\r\nspam\tTodays Voda numbers ending with 7634 are selected to receive a £350 reward. If you have a match please call 08712300220 quoting claim code 7684 standard rates apply.\r\nham\tOnly saturday and sunday holiday so its very difficult:)\r\nham\tEverybody had fun this evening. Miss you.\r\nham\tGot hella gas money, want to go on a grand nature adventure with galileo in a little bit?\r\nham\tI'm in a meeting, call me later at\r\nham\tOh wow thats gay. Will firmware update help\r\nham\tThese won't do. Have to move on to morphine\r\nham\tHow come i din c ü... Yup i cut my hair...\r\nham\tK k pa Had your lunch aha.\r\nham\tOh ho. Is this the first time u use these type of words\r\nham\tCaptain vijaykanth is doing comedy in captain tv..he is drunken :)\r\nham\tOf course. I guess god's just got me on hold right now.\r\nham\tDo you hide anythiing or keeping distance from me\r\nham\tHavent.\r\nspam\tYou are being ripped off! Get your mobile content from www.clubmoby.com call 08717509990 poly/true/Pix/Ringtones/Games six downloads for only 3\r\nham\tSorry i din lock my keypad.\r\nham\tDid u got that persons story\r\nham\tAre you planning to come chennai?\r\nspam\tWe tried to contact you re your reply to our offer of a Video Phone 750 anytime any network mins Half Price Line Rental Camcorder Reply or call 08000930705\r\nham\tGod created gap btwn ur fingers so dat sum1 vry special will fill those gaps by holding ur hands.. Now plz dont ask y he created so much gap between legs !!!\r\nham\tWe are okay. Going to sleep now. Later\r\nham\tPlease protect yourself from e-threats. SIB never asks for sensitive information like Passwords,ATM/SMS PIN thru email. Never share your password with anybody.\r\nham\tFinally it has happened..! Aftr decades..! BEER is now cheaper than PETROL! The goverment expects us to \"DRINK\". . . But don't \"DRIVE \"\r\nspam\tA £400 XMAS REWARD IS WAITING FOR YOU! Our computer has randomly picked you from our loyal mobile customers to receive a £400 reward. Just call 09066380611 \r\nham\tWhere r e meeting tmr?\r\nham\tLol yes. But it will add some spice to your day.\r\nham\tHope you are having a great day.\r\nham\tOur Prasanth ettans mother passed away last night. Just pray for her and family.\r\nham\tK, I'll work something out\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08718738002 Identifier Code: 48922 Expires 21/11/04\r\nham\tThis message is from a great Doctor in India:-): 1) Do not drink APPY FIZZ. It contains Cancer causing age\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tYou call him and tell now infront of them. Call him now.\r\nham\tOk no prob...\r\nham\tLadies first and genus second k .\r\nham\tNo. Yes please. Been swimming?\r\nham\tMum not going robinson already.\r\nham\tOk set let u noe e details later...\r\nham\tNot..tel software name..\r\nham\tI send the print outs da.\r\nham\tIM REALY SOZ IMAT MY MUMS 2NITE WHAT ABOUT 2MORO \r\nham\tWhen I was born, GOD said, \"Oh No! Another IDIOT\". When you were born, GOD said, \"OH No! COMPETITION\". Who knew, one day these two will become FREINDS FOREVER!\r\nham\tI didnt get ur full msg..sometext is missing, send it again\r\nham\tProbably not, I'm almost out of gas and I get some cash tomorrow\r\nspam\tCustomer service announcement. We recently tried to make a delivery to you but were unable to do so, please call 07099833605 to re-schedule. Ref:9280114\r\nham\tI forgot 2 ask ü all smth.. There's a card on da present lei... How? Ü all want 2 write smth or sign on it?\r\nham\tI'm leaving my house now.\r\nspam\tHi babe its Chloe, how r u? I was smashed on saturday night, it was great! How was your weekend? U been missing me? SP visionsms.com Text stop to stop 150p/text\r\nham\tÜ ready then call me...\r\nham\tWewa is 130. Iriver 255. All 128 mb.\r\nham\tIt is a good thing I'm now getting the connection to bw\r\nham\tSry da..jst nw only i came to home..\r\nham\tThat's cool he'll be here all night, lemme know when you're around\r\nham\tAre you staying in town ?\r\nham\tHaha yeah, 2 oz is kind of a shitload\r\nham\tOk u can take me shopping when u get paid =D\r\nham\tMy life Means a lot to me, Not because I love my life, But because I love the people in my life, The world calls them friends, I call them my World:-).. Ge:-)..\r\nham\tAlright we'll bring it to you, see you in like <#> mins\r\nham\tBut pls dont play in others life.\r\nham\tEatin my lunch...\r\nham\tHmmm.but you should give it on one day..\r\nham\tDidn't try, g and I decided not to head out\r\nham\tOk no prob\r\nham\tSurly ill give it to you:-) while coming to review.\r\nham\tBy march ending, i should be ready. But will call you for sure. The problem is that my capital never complete. How far with you. How's work and the ladies\r\nham\tTessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas\r\nham\tPls give her the food preferably pap very slowly with loads of sugar. You can take up to an hour to give it. And then some water. Very very slowly.\r\nspam\tURGENT! Your Mobile No 07808726822 was awarded a £2,000 Bonus Caller Prize on 02/09/03! This is our 2nd attempt to contact YOU! Call 0871-872-9758 BOX95QU\r\nham\tA guy who gets used but is too dumb to realize it.\r\nham\tOkey dokey, i‘ll be over in a bit just sorting some stuff out.\r\nham\tDon no da:)whats you plan?\r\nham\tYes fine \r\nspam\tWIN: We have a winner! Mr. T. Foley won an iPod! More exciting prizes soon, so keep an eye on ur mobile or visit www.win-82050.co.uk\r\nham\tI liked the new mobile\r\nham\tAnytime...\r\nham\tMmmmmmm *snuggles into you* ...*deep contented sigh* ... *whispers* ... I fucking love you so much I can barely stand it ...\r\nham\tYar but they say got some error.\r\nham\tHey anyway i have to :-)\r\nham\tWow so healthy. Old airport rd lor. Cant thk of anything else. But i'll b bathing my dog later.\r\nham\tWif my family booking tour package.\r\nham\tDid you say bold, then torch later. Or one torch and 2bold?\r\nham\tHaha awesome, I might need to take you up on that, what you doin tonight?\r\nham\tYa i knw u vl giv..its ok thanks kano..anyway enjoy wit ur family wit 1st salary..:-);-)\r\nham\tHuh so slow i tot u reach long ago liao... U 2 more days only i 4 more leh...\r\nham\tThats cool princess! I will cover your face in hot sticky cum :)\r\nham\tBig brother‘s really scraped the barrel with this shower of social misfits\r\nham\tOops i thk i dun haf enuff... I go check then tell ü..\r\nham\tS:)8 min to go for lunch:)\r\nham\tHey. What happened? U switch off ur cell d whole day. This isnt good. Now if u do care, give me a call tomorrow.\r\nham\tK will do, addie & I are doing some art so I'll be here when you get home\r\nham\tMy uncles in Atlanta. Wish you guys a great semester.\r\nham\tAiyo... Her lesson so early... I'm still sleepin, haha... Okie, u go home liao den confirm w me lor...\r\nham\tForgot to tell ü smth.. Can ü like number the sections so that it's clearer..\r\nham\tYup. Anything lor, if u dun wan it's ok...\r\nham\tI'm home, my love ... If your still awake ... *loving kiss*\r\nham\tHELLO PEACH! MY CAKE TASTS LUSH!\r\nspam\tFREE GAME. Get Rayman Golf 4 FREE from the O2 Games Arcade. 1st get UR games settings. Reply POST, then save & activ8. Press 0 key for Arcade. Termsapply\r\nham\tThere'll be a minor shindig at my place later tonight, you interested?\r\nham\tJason says it's cool if we pick some up from his place in like an hour\r\nspam\tHad your mobile 10 mths? Update to the latest Camera/Video phones for FREE. KEEP UR SAME NUMBER, Get extra free mins/texts. Text YES for a call\r\nham\tI (Career Tel) have added u as a contact on INDYAROCKS.COM to send FREE SMS. To remove from phonebook - sms NO to <#>\r\nham\tI've reached already.\r\nham\tI dont know ask to my brother. Nothing problem some thing that. Just i told .\r\nham\tK:)eng rocking in ashes:)\r\nham\tWat time r ü going to xin's hostel?\r\nham\tGood Morning my Dear Shijutta........... Have a great & successful day.\r\nspam\tBuy Space Invaders 4 a chance 2 win orig Arcade Game console. Press 0 for Games Arcade (std WAP charge) See o2.co.uk/games 4 Terms + settings. No purchase\r\nham\tOh k:)after that placement there ah?\r\nham\tNot for possession, especially not first offense\r\nham\tNt only for driving even for many reasons she is called BBD..thts it chikku, then hw abt dvg cold..heard tht vinobanagar violence hw is the condition..and hw ru ? Any problem?\r\nham\tI bought the test yesterday. Its something that lets you know the exact day u ovulate.when will get 2u in about 2 to 3wks. But pls pls dont fret. I know u r worried. Pls relax. Also is there anything in ur past history u need to tell me?\r\nham\tWe have pizza if u want\r\nham\tI keep seeing weird shit and bein all \"woah\" then realising it's actually reasonable and I'm all \"oh\"\r\nham\tMany more happy returns of the day. I wish you happy birthday.\r\nham\tYa very nice. . .be ready on thursday\r\nham\tI am in hospital da. . I will return home in evening\r\nham\t\"Thinking of u ;) x\"\r\nspam\tCamera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.\r\nham\tOrh i tot u say she now still dun believe.\r\nham\tWhen you just put in the + sign, choose my number and the pin will show. Right?\r\nham\tThe beauty of life is in next second.. which hides thousands of secrets. I wish every second will be wonderful in ur life...!! gud n8\r\nham\tThanx u darlin!im cool thanx. A few bday drinks 2 nite. 2morrow off! Take care c u soon.xxx\r\nham\tIf you're still up, maybe leave the credit card so I can get gas when I get back like he told me to\r\nspam\tYour weekly Cool-Mob tones are ready to download !This weeks new Tones include: 1) Crazy Frog-AXEL F>>> 2) Akon-Lonely>>> 3) Black Eyed-Dont P >>>More info in n\r\nham\tWell boy am I glad G wasted all night at applebees for nothing\r\nspam\tCashbin.co.uk (Get lots of cash this weekend!) www.cashbin.co.uk Dear Welcome to the weekend We have got our biggest and best EVER cash give away!! These..\r\nham\tOk lor... Or u wan me go look 4 u?\r\nham\tU wan 2 haf lunch i'm in da canteen now.\r\nham\tDon't make life too stressfull.. Always find time to Laugh.. It may not add years to your Life! But surely adds more life to ur years!! Gud ni8..swt dreams..\r\nham\they, looks like I was wrong and one of the kappa guys numbers is still on my phone, if you want I can text him and see if he's around\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790121 from land line. Claim 3030. Valid 12hrs only 150ppm\r\nspam\tThanks 4 your continued support Your question this week will enter u in2 our draw 4 £100 cash. Name the NEW US President? txt ans to 80082\r\nham\tI'm home. Doc gave me pain meds says everything is fine.\r\nham\tIt's é only $140 ard...É rest all ard $180 at least...Which is é price 4 é 2 bedrm ($900)\r\nham\tMe too! Have a lovely night xxx\r\nham\tPrepare to be pleasured :)\r\nham\tHi.:)technical support.providing assistance to us customer through call and email:)\r\nham\tif you text on your way to cup stop that should work. And that should be BUS\r\nham\tWhens your radio show?\r\nspam\tYour unique user ID is 1172. For removal send STOP to 87239 customer services 08708034412\r\nham\tI'm not sure if its still available though\r\nham\twatever reLation u built up in dis world only thing which remains atlast iz lonlines with lotz n lot memories! feeling..\r\nham\tCHEERS LOU! YEAH WAS A GOODNITE SHAME U NEVA CAME! C YA GAILxx\r\nham\tHi..i got the money da:)\r\nham\tHi, Mobile no. <#> has added you in their contact list on www.fullonsms.com It s a great place to send free sms to people For more visit fullonsms.com\r\nham\tOk then u tell me wat time u coming later lor.\r\nham\tU repeat e instructions again. Wat's e road name of ur house?\r\nham\tSo many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. Sh!jas\r\nham\tQuite lor. But dun tell him wait he get complacent...\r\nham\tSorry completely forgot * will pop em round this week if your still here?\r\nham\tU R THE MOST BEAUTIFUL GIRL IVE EVER SEEN. U R MY BABY COME AND C ME IN THE COMMON ROOM\r\nham\tO we cant see if we can join denis and mina? Or does denis want alone time\r\nham\tSen told that he is going to join his uncle finance in cbe\r\nham\tYup... Hey then one day on fri we can ask miwa and jiayin take leave go karaoke \r\nham\tCall me, i am senthil from hsbc.\r\nham\tEspecially since i talk about boston all up in my personal statement, lol! I woulda changed that if i had realized it said nyc! It says boston now.\r\nham\tIndeed and by the way it was either or - not both !\r\nspam\tUrgent -call 09066649731from Landline. Your complimentary 4* Ibiza Holiday or £10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150ppm 18+\r\nham\tHoly living christ what is taking you so long\r\nham\tÜ thk of wat to eat tonight.\r\nham\tThanx. Yup we coming back on sun. Finish dinner going back 2 hotel now. Time flies, we're tog 4 exactly a mth today. Hope we'll haf many more mths to come...\r\nham\tWe're on the opposite side from where we dropped you off\r\nham\tYup. Izzit still raining heavily cos i'm in e mrt i can't c outside.\r\nham\tSend me your resume:-)\r\nham\tGd luck 4 ur exams :-)\r\nham\tOr u ask they all if next sat can a not. If all of them can make it then i'm ok lor.\r\nham\tSorry that was my uncle. I.ll keep in touch\r\nham\tSaw Guys and Dolls last night with Patrick Swayze it was great\r\nspam\tURGENT This is our 2nd attempt to contact U. Your £900 prize from YESTERDAY is still awaiting collection. To claim CALL NOW 09061702893\r\nspam\tSanta calling! Would your little ones like a call from Santa Xmas Eve? Call 09077818151 to book you time. Calls1.50ppm last 3mins 30s T&C www.santacalling.com\r\nham\tJust come home. I don't want u to be miserable\r\nham\tI dont know why she.s not getting your messages\r\nham\tits cool but tyler had to take off so we're gonna buy for him and drop it off at his place later tonight. Our total order is a quarter, you got enough?\r\nham\tThe guy at the car shop who was flirting with me got my phone number from the paperwork and called and texted me. I'm nervous because of course now he may have my address. Should i call his boss and tell him, knowing this may get him fired?\r\nham\tReverse is cheating. That is not mathematics.\r\nham\tHow do you plan to manage that\r\nham\tEr, hello, things didn‘t quite go to plan – is limping slowly home followed by aa and with exhaust hanging off\r\nham\tSorry for the delay. Yes masters\r\nham\tCall me when u finish then i come n pick u.\r\nspam\tPRIVATE! Your 2004 Account Statement for 078498****7 shows 786 unredeemed Bonus Points. To claim call 08719180219 Identifier Code: 45239 Expires 06.05.05\r\nham\tWhat's up my own oga. Left my phone at home and just saw ur messages. Hope you are good. Have a great weekend.\r\nham\tDon't worry though, I understand how important it is that I be put in my place with a poorly thought out punishment in the face of the worst thing that has ever happened to me. Brb gonna go kill myself\r\nham\tHoney, can you pls find out how much they sell Predicte in Nigeria. And how many times can it be used. Its very important to have a reply before monday\r\nham\tE admin building there? I might b slightly earlier... I'll call u when i'm reaching...\r\nham\tfyi I'm at usf now, swing by the room whenever\r\nham\ti can call in <#> min if thats ok\r\nham\tUmmmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear\r\nham\tÜ no home work to do meh... \r\nham\tAnything is valuable in only 2 situations: First- Before getting it... Second- After loosing it...\r\nham\tMe too. Mark is taking forever to pick up my prescription and the pain is coming back.\r\nham\tHow's ur paper?\r\nham\tGot smaller capacity one? Quite ex...\r\nspam\tCheck Out Choose Your Babe Videos @ sms.shsex.netUN fgkslpoPW fgkslpo\r\nham\tIm good! I have been thinking about you...\r\nspam\tu r a winner U ave been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810710p/min (18 )\r\nham\t:-) :-)\r\nham\tNot thought bout it... || Drink in tap & spile at seven. || Is that pub on gas st off broad st by canal. || Ok?\r\nham\tI am going to sleep. I am tired of travel.\r\nham\tHaha, just what I was thinkin\r\nham\tYup but it's not giving me problems now so mayb i'll jus leave it...\r\nham\tLol no. Just trying to make your day a little more interesting\r\nham\tHow long before you get reply, just defer admission til next semester\r\nham\tThe word \"Checkmate\" in chess comes from the Persian phrase \"Shah Maat\" which means; \"the king is dead..\" Goodmorning.. Have a good day..:)\r\nham\tPo de :-):):-):-):-). No need job aha.\r\nham\tRats. Hey did u ever vote for the next themes?\r\nspam\tNew Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today! From ONLY £1. www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18 TXTAUCTION.\r\nham\tI hope your pee burns tonite.\r\nham\tOH RITE. WELL IM WITH MY BEST MATE PETE, WHO I WENT OUT WITH 4 A WEEK+ NOW WERE 2GEVA AGAIN. ITS BEEN LONGER THAN A WEEK.\r\nham\tYay can't wait to party together!\r\nham\t....photoshop makes my computer shut down.\r\nham\tAll boys made fun of me today. Ok i have no problem. I just sent one message just for fun\r\nham\tThat's one of the issues but california is okay. No snow so its manageable\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08715203652 Identifier Code: 42810 Expires 29/10/0\r\nham\tHmmm.... Mayb can try e shoppin area one, but forgot e name of hotel...\r\nham\tAwesome, that gonna be soon or later tonight?\r\nham\tI need details about that online job.\r\nspam\tYOU HAVE WON! As a valued Vodafone customer our computer has picked YOU to win a £150 prize. To collect is easy. Just call 09061743386 \r\nham\tMissing you too.pray inshah allah\r\nham\tPls help me tell Ashley that i cant find her number oh\r\nham\tI am in escape theatre now. . Going to watch KAVALAN in a few minutes\r\nham\tS.this will increase the chance of winning.\r\nham\teither way works for me. I am <#> years old. Hope that doesnt bother you.\r\nham\tMaybe you should find something else to do instead???\r\nham\tGain the rights of a wife.dont demand it.i am trying as husband too.Lets see\r\nham\tI liked your new house\r\nham\tI'm fine. Hope you are also\r\nham\tAlso north carolina and texas atm, you would just go to the gre site and pay for the test results to be sent.\r\nham\tSame to u...\r\nham\tyes baby! I need to stretch open your pussy!\r\nham\tThanks and ! Or bomb and date as my phone wanted to say! \r\nham\tOk...\r\nham\tHey, a guy I know is breathing down my neck to get him some bud, anyway you'd be able to get a half track to usf tonight?\r\nham\t\"Response\" is one of d powerful weapon 2 occupy a place in others 'HEART'... So, always give response 2 who cares 4 U\"... Gud night..swt dreams..take care\r\nham\tNokia phone is lovly..\r\nspam\t**FREE MESSAGE**Thanks for using the Auction Subscription Service. 18 . 150p/MSGRCVD 2 Skip an Auction txt OUT. 2 Unsubscribe txt STOP CustomerCare 08718726270\r\nspam\tBored housewives! Chat n date now! 0871750.77.11! BT-national rate 10p/min only from landlines!\r\nham\tSorry da..today i wont come to play..i have driving clas..\r\nham\tI'm really sorry I lit your hair on fire\r\nham\tOh! Shit, I thought that was your trip! Loooooool ... That just makes SO much more sense now ... *grins* and the sofa reference was ... The \"sleep on a couch\" link you sent me ... Wasn't that how you went on your trip ? Oh ... And didn't your babe go with you for that celebration with your rents?\r\nham\tOkey dokey swashbuckling stuff what oh.\r\nham\tWatching cartoon, listening music & at eve had to go temple & church.. What about u?\r\nham\t1. Tension face 2. Smiling face 3. Waste face 4. Innocent face 5.Terror face 6.Cruel face 7.Romantic face 8.Lovable face 9.decent face <#> .joker face.\r\nham\tDip's cell dead. So i m coming with him. U better respond else we shall come back.\r\nham\tWell. You know what i mean. Texting\r\nham\tHi dis is yijue i would be happy to work wif ü all for gek1510...\r\nham\tLol! Oops sorry! Have fun. \r\nham\tWat happened to the cruise thing\r\nham\tI know dat feelin had it with Pete! Wuld get with em , nuther place nuther time mayb?\r\nspam\tlyricalladie(21/F) is inviting you to be her friend. Reply YES-910 or NO-910. See her: www.SMS.ac/u/hmmross STOP? Send STOP FRND to 62468\r\nham\tThe world's most happiest frnds never have the same characters... Dey just have the best understanding of their differences...\r\nspam\tNo 1 POLYPHONIC tone 4 ur mob every week! Just txt PT2 to 87575. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info\r\nham\tYeah just open chat and click friend lists. Then make the list. Easy as pie\r\nham\talright tyler's got a minor crisis and has to be home sooner than he thought so be here asap\r\nham\tWhen/where do I pick you up\r\nham\tAs usual u can call me ard 10 smth.\r\nham\tNew Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!\r\nham\tFor many things its an antibiotic and it can be used for chest abdomen and gynae infections even bone infections.\r\nham\tPoor girl can't go one day lmao\r\nham\tOr just do that 6times\r\nspam\tTodays Vodafone numbers ending with 4882 are selected to a receive a £350 award. If your number matches call 09064019014 to receive your £350 award.\r\nham\tYou have to pls make a note of all she.s exposed to. Also find out from her school if anyone else was vomiting. Is there a dog or cat in the house? Let me know later.\r\nham\tJapanese Proverb: If one Can do it, U too Can do it, If none Can do it,U must do it Indian version: If one Can do it, LET HIM DO it.. If none Can do it,LEAVE it!! And finally Kerala version: If one can do it, Stop him doing it.. If none can do it, Make a strike against it ...\r\nham\tSounds like there could be a lot of time spent in that chastity device boy ... *grins* ... Or take your beatings like a good dog. Going to lounge in a nice long bath now ?\r\nham\tIts worse if if uses half way then stops. Its better for him to complete it.\r\nham\tMiserable. They don't tell u that the side effects of birth control are massive gut wrenching cramps for the first 2 months. I didn't sleep at all last night.\r\nham\tSend me the new number\r\nham\tConvey my regards to him\r\nspam\tWant the latest Video handset? 750 anytime any network mins? Half price line rental? Reply or call 08000930705 for delivery tomorrow\r\nham\t2 and half years i missed your friendship:-)\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tOh for fuck's sake she's in like tallahassee\r\nham\tHaha, that was the first person I was gonna ask\r\nspam\tou are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18\r\nham\tTaka lor. Wat time u wan 2 come n look 4 us?\r\nspam\t* FREE* POLYPHONIC RINGTONE Text SUPER to 87131 to get your FREE POLY TONE of the week now! 16 SN PoBox202 NR31 7ZS subscription 450pw\r\nham\t\"I;m reaching in another 2 stops.\"\r\nham\tno, i *didn't* mean to post it. I wrote it, and like so many other times i've ritten stuff to you, i let it sit there. it WAS what i was feeling at the time. I was angry. Before i left, i hit send, then stop. It wasn't there. I checked on my phone when i got to my car. It wasn't there. You said you didn't sleep, you were bored. So why wouldn't THAT be the time to clean, fold laundry, etc.? At least make the bed?\r\nspam\tWarner Village 83118 C Colin Farrell in SWAT this wkend @Warner Village & get 1 free med. Popcorn!Just show [email protected] 4-7/12. C t&c @kiosk. Reply SONY 4 mre film offers\r\nham\tWill you come online today night\r\nham\tThen anything special?\r\nham\tI'm in solihull, | do you want anything?\r\nham\tWill do. Have a good day\r\nham\tWE REGRET TO INFORM U THAT THE NHS HAS MADE A MISTAKE.U WERE NEVER ACTUALLY BORN.PLEASE REPORT 2 YOR LOCAL HOSPITAL 2B TERMINATED.WE R SORRY 4 THE INCONVENIENCE\r\nham\tLove that holiday Monday feeling even if I have to go to the dentists in an hour\r\nham\tI am on the way to tirupur.\r\nspam\tGoal! Arsenal 4 (Henry, 7 v Liverpool 2 Henry scores with a simple shot from 6 yards from a pass by Bergkamp to give Arsenal a 2 goal margin after 78 mins.\r\nham\tYou've already got a flaky parent. It'snot supposed to be the child's job to support the parent...not until they're The Ride age anyway. I'm supposed to be there to support you. And now i've hurt you. unintentional. But hurt nonetheless.\r\nham\tWe took hooch for a walk toaday and i fell over! Splat! Grazed my knees and everything! Should have stayed at home! See you tomorrow! \r\nham\tJust dropped em off, omw back now\r\nspam\tThis is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08712101358 NOW! Only 10p per min. BT-national-rate\r\nham\tSitting in mu waiting for everyone to get out of my suite so I can take a shower\r\nham\tRe your call; You didn't see my facebook huh?\r\nham\tG says you never answer your texts, confirm/deny\r\nham\tIts so common hearin How r u? Wat r u doing? How was ur day? So let me ask u something different. Did u smile today? If not, do it now.... Gud evng.\r\nham\tHi Dear Call me its urgnt. I don't know whats your problem. You don't want to work or if you have any other problem at least tell me. Wating for your reply.\r\nham\tOh yah... We never cancel leh... Haha \r\nham\tWe can go 4 e normal pilates after our intro... \r\nham\tOk... Let u noe when i leave my house.\r\nham\tOh yes, why is it like torture watching england?\r\nham\tWanna do some art?! :D\r\nham\tJust hopeing that wasn‘t too pissed up to remember and has gone off to his sisters or something!\r\nspam\tGot what it takes 2 take part in the WRC Rally in Oz? U can with Lucozade Energy! Text RALLY LE to 61200 (25p), see packs or lucozade.co.uk/wrc & itcould be u!\r\nspam\tHi, the SEXYCHAT girls are waiting for you to text them. Text now for a great night chatting. send STOP to stop this service\r\nham\tGood morning, my boytoy! How's those yummy lips ? Where's my sexy buns now ? What do you do ? Do you think of me ? Do you crave me ? Do you need me ?\r\nham\tMatch started.india <#> for 2\r\nham\tOnce free call me sir.\r\nham\tHey do you want anything to buy:)\r\nham\tHey babe, how's it going ? Did you ever figure out where your going for New Years ?\r\nham\tK..k.:)congratulation ..\r\nham\tG wants to know where the fuck you are\r\nham\tNo it was cancelled yeah baby! Well that sounds important so i understand my darlin give me a ring later on this fone love Kate x\r\nham\tTomarrow i want to got to court. At <DECIMAL> . So you come to bus stand at 9.\r\nham\tÜ go home liao? Ask dad to pick me up at 6...\r\nham\tOmg you can make a wedding chapel in frontierville? Why do they get all the good stuff?\r\nham\tI'm eatin now lor, but goin back to work soon... E mountain deer show huh... I watch b4 liao, very nice...\r\nham\tCheck mail.i have mailed varma and kept copy to you regarding membership.take care.insha allah.\r\nham\tWrong phone! This phone! I answer this one but assume the other is people i don't well\r\nham\tAnyway I don't think I can secure anything up here, lemme know if you want me to drive down south and chill\r\nham\tI'm already back home so no probably not\r\nspam\tGreat News! Call FREEFONE 08006344447 to claim your guaranteed £1000 CASH or £2000 gift. Speak to a live operator NOW!\r\nspam\tHi this is Amy, we will be sending you a free phone number in a couple of days, which will give you an access to all the adult parties...\r\nham\tI am in bus on the way to calicut\r\nham\tHi its me you are probably having too much fun to get this message but i thought id txt u cos im bored! and james has been farting at me all night\r\nham\thi baby im sat on the bloody bus at the mo and i wont be home until about 7:30 wanna do somethin later? call me later ortxt back jess xx\r\nspam\tWelcome to Select, an O2 service with added benefits. You can now call our specially trained advisors FREE from your mobile by dialling 402.\r\nham\tI lost 4 pounds since my doc visit last week woot woot! Now I'm gonna celebrate by stuffing my face!\r\nham\tU coming back 4 dinner rite? Dad ask me so i re confirm wif u...\r\nham\tDoing my masters. When will you buy a bb cos i have for sale and how's bf\r\nham\tAhhhh...just woken up!had a bad dream about u tho,so i dont like u right now :) i didnt know anything about comedy night but i guess im up for it.\r\nham\tI'm vivek:)i got call from your number.\r\nham\tWhy didn't u call on your lunch?\r\nham\tWhat i mean was i left too early to check, cos i'm working a 9-6.\r\nham\tI want <#> rs da:)do you have it?\r\nham\tA bit of Ur smile is my hppnss, a drop of Ur tear is my sorrow, a part of Ur heart is my life, a heart like mine wil care for U, forevr as my GOODFRIEND\r\nham\tYup ok...\r\nham\tI want to see your pretty pussy...\r\nspam\tDear Voucher holder Have your next meal on us. Use the following link on your pc 2 enjoy a 2 4 1 dining experiencehttp://www.vouch4me.com/etlp/dining.asp\r\nham\tA few people are at the game, I'm at the mall with iouri and kaila\r\nspam\tURGENT! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09058094507 from land line. Claim 3030. Valid 12hrs only\r\nspam\tYou can donate £2.50 to UNICEF's Asian Tsunami disaster support fund by texting DONATE to 864233. £2.50 will be added to your next bill\r\nham\tFuture is not what we planned for tomorrow.....! it is the result of what we do today...! Do the best in present... enjoy the future.\r\nham\tI will cme i want to go to hos 2morow. After that i wil cme. This what i got from her dear what to do. She didnt say any time\r\nham\tWe are supposed to meet to discuss abt our trip... Thought xuhui told you? In the afternoon. Thought we can go for lesson after that\r\nham\tHey come online! Use msn... We are all there\r\nham\tI'm fine. Hope you are good. Do take care.\r\nham\tOops I was in the shower when u called. Hey a parking garage collapsed at university hospital. See I'm not crazy. Stuff like that DOES happen.\r\nham\tAiyo u so poor thing... Then u dun wan 2 eat? U bathe already?\r\nham\tYar... I tot u knew dis would happen long ago already.\r\nham\tYou are gorgeous! keep those pix cumming :) thank you!\r\nham\tA boy was late 2 home. His father: \"POWER OF FRNDSHIP\"\r\nham\tJADE ITS PAUL. Y DIDNT U TXT ME? DO U REMEMBER ME FROM BARMED? I WANT 2 TALK 2 U! TXT ME\r\nham\tSpending new years with my brother and his family. Lets plan to meet next week. Are you ready to be spoiled? :)\r\nham\tSo what u doing today?\r\nham\tI said its okay. Sorry\r\nham\tSlept? I thinkThis time ( <#> pm) is not dangerous\r\nham\tNetworking job is there.\r\nspam\tgoldviking (29/M) is inviting you to be his friend. Reply YES-762 or NO-762 See him: www.SMS.ac/u/goldviking STOP? Send STOP FRND to 62468\r\nham\tDont let studying stress you out. L8r.\r\nham\tThat's y u haf 2 keep me busy...\r\nham\tNo rushing. I'm not working. I'm in school so if we rush we go hungry.\r\nham\tWhich channel:-):-):):-).\r\nham\tSo your telling me I coulda been your real Valentine and I wasn't? U never pick me for NOTHING!!\r\nspam\tPhony £350 award - Todays Voda numbers ending XXXX are selected to receive a £350 award. If you have a match please call 08712300220 quoting claim code 3100 standard rates app\r\nham\tWe made it! Eta at taunton is 12:30 as planned, hope that‘s still okday?! Good to see you! :-xx\r\nham\tI'm hungry buy smth home...\r\nham\t\"HEY KATE, HOPE UR OK... WILL GIVE U A BUZ WEDLUNCH. GO OUTSOMEWHERE 4 ADRINK IN TOWN..CUD GO 2WATERSHD 4 A BIT? PPL FROMWRK WILL BTHERE. LOVE PETEXXX.\"\r\nham\tMy drive can only be read. I need to write\r\nham\tJust looked it up and addie goes back Monday, sucks to be her\r\nham\tHappy new year. Hope you are having a good semester\r\nham\tEsplanade lor. Where else...\r\nham\tCan you talk with me..\r\nham\tHmph. Go head, big baller.\r\nham\tWell its not like you actually called someone a punto. That woulda been worse.\r\nham\tNope. Since ayo travelled, he has forgotten his guy\r\nham\tYou still around? Looking to pick up later\r\nspam\tCDs 4u: Congratulations ur awarded £500 of CD gift vouchers or £125 gift guaranteed & Freeentry 2 £100 wkly draw xt MUSIC to 87066 TnCs www.ldew.com1win150ppmx3age16 \r\nham\tThere's someone here that has a year <#> toyota camry like mr olayiwola's own. Mileage is <#> k.its clean but i need to know how much will it sell for. If i can raise the dough for it how soon after landing will it sell. Holla back.\r\nham\tGuess which pub im in? Im as happy as a pig in clover or whatever the saying is! \r\nham\tILL B DOWN SOON\r\nham\tOh k. . I will come tomorrow\r\nham\tGo fool dont cheat others ok\r\nham\tMy mobile number.pls sms ur mail id.convey regards to achan,amma.Rakhesh.Qatar\r\nham\tBy the way, 'rencontre' is to meet again. Mountains dont....\r\nspam\tYou have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712412 between 10am-7pm Cost 10p\r\nham\tU attend ur driving lesson how many times a wk n which day?\r\nham\tUncle G, just checking up on you. Do have a rewarding month\r\nham\tHello boytoy ! Geeee ... I'm missing you today. I like to send you a tm and remind you I'm thinking of you ... And you are loved ... *loving kiss*\r\nham\tI think the other two still need to get cash but we can def be ready by 9\r\nham\tHey gals...U all wanna meet 4 dinner at nìte? \r\nspam\tDear 0776xxxxxxx U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs\r\nham\tBabe ! What are you doing ? Where are you ? Who are you talking to ? Do you think of me ? Are you being a good boy? Are you missing me? Do you love me ?\r\nham\tGreat! How is the office today?\r\nham\tIt's cool, we can last a little while. Getting more any time soon?\r\nham\t:-( sad puppy noise\r\nham\tYes its possible but dint try. Pls dont tell to any one k\r\nham\tAnyway holla at me whenever you're around because I need an excuse to go creep on people in sarasota\r\nham\tWhere you. What happen\r\nham\tI was gonna ask you lol but i think its at 7\r\nspam\tUr cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/meg. CC: 08718720201 HG/Suite342/2lands Row/W1j6HL\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S.I.M. points. Call 08715203685 Identifier Code:4xx26 Expires 13/10/04\r\nham\tGo chase after her and run her over while she's crossing the street\r\nspam\tI'd like to tell you my deepest darkest fantasies. Call me 09094646631 just 60p/min. To stop texts call 08712460324 (nat rate)\r\nham\tIs there coming friday is leave for pongal?do you get any news from your work place.\r\nham\tHey... Very inconvenient for your sis a not huh?\r\nham\tOk i vl..do u know i got adsense approved..\r\nham\t* Was really good to see you the other day dudette, been missing you!\r\nham\tI want to go to perumbavoor\r\nham\tHow many times i told in the stage all use to laugh. You not listen aha.\r\nspam\tYou won't believe it but it's true. It's Incredible Txts! Reply G now to learn truly amazing things that will blow your mind. From O2FWD only 18p/txt\r\nham\t(You didn't hear it from me)\r\nham\tThanks for being there for me just to talk to on saturday. You are very dear to me. I cherish having you as a brother and role model.\r\nham\tPls clarify back if an open return ticket that i have can be preponed for me to go back to kerala.\r\nspam\tNatalie (20/F) is inviting you to be her friend. Reply YES-165 or NO-165 See her: www.SMS.ac/u/natalie2k9 STOP? Send STOP FRND to 62468\r\nham\tShe ran off with a younger man. we will make pretty babies together :)\r\nspam\tJamster! To get your free wallpaper text HEART to 88888 now! T&C apply. 16 only. Need Help? Call 08701213186.\r\nham\tO ic lol. Should play 9 doors sometime yo\r\nham\tDunno, my dad said he coming home 2 bring us out 4 lunch. Yup i go w u lor. I call u when i reach school lor...\r\nham\tWe have sent JD for Customer Service cum Accounts Executive to ur mail id, For details contact us\r\nham\tDesires- u going to doctor 4 liver. And get a bit stylish. Get ur hair managed. Thats it.\r\nham\tHmmm.still we dont have opener?\r\nham\tYeah so basically any time next week you can get away from your mom & get up before 3\r\nham\tEdison has rightly said, \"A fool can ask more questions than a wise man can answer\" Now you know why all of us are speechless during ViVa.. GM,GN,GE,GNT:-)\r\nham\tI will vote for wherever my heart guides me\r\nham\tWith my sis lor... We juz watched italian job.\r\nham\tTick, tick, tick .... Where are you ? I could die of loneliness you know ! *pouts* *stomps feet* I need you ...\r\nham\tLmao you know me so well...\r\nspam\tDouble Mins & Double Txt & 1/2 price Linerental on Latest Orange Bluetooth mobiles. Call MobileUpd8 for the very latest offers. 08000839402 or call2optout/LF56\r\nham\tAm on a train back from northampton so i'm afraid not! I'm staying skyving off today ho ho! Will be around wednesday though. Do you fancy the comedy club this week by the way?\r\nham\tGoodnight da thangam I really miss u dear.\r\nham\tHey next sun 1030 there's a basic yoga course... at bugis... We can go for that... Pilates intro next sat.... Tell me what time you r free\r\nham\tGeeeee ... Your internet is really bad today, eh ?\r\nspam\tFree video camera phones with Half Price line rental for 12 mths and 500 cross ntwk mins 100 txts. Call MobileUpd8 08001950382 or Call2OptOut/674\r\nham\tI think i am disturbing her da\r\nham\tSorry, I'll call you later. I am in meeting sir.\r\nham\tHavent stuck at orchard in my dad's car. Going 4 dinner now. U leh? So r they free tonight?\r\nham\tOk i also wan 2 watch e 9 pm show...\r\nham\tI dunno lei... Like dun haf...\r\nham\tBut your brother transfered only <#> + <#> . Pa.\r\nham\tI calls you later. Afternoon onwords mtnl service get problem in south mumbai. I can hear you but you cann't listen me.\r\nspam\t83039 62735=£450 UK Break AccommodationVouchers terms & conditions apply. 2 claim you mustprovide your claim number which is 15541 \r\nham\tTalk to g and x about that\r\nham\tHai dear friends... This is my new & present number..:) By Rajitha Raj (Ranju)\r\nspam\t5p 4 alfie Moon's Children in need song on ur mob. Tell ur m8s. Txt Tone charity to 8007 for Nokias or Poly charity for polys: zed 08701417012 profit 2 charity.\r\nham\tAs in different styles?\r\nspam\tWIN a £200 Shopping spree every WEEK Starting NOW. 2 play text STORE to 88039. SkilGme. TsCs08714740323 1Winawk! age16 £1.50perweeksub.\r\nham\tGud ni8 dear..slp well..take care..swt dreams..Muah..\r\nham\tI want to sent <#> mesages today. Thats y. Sorry if i hurts\r\nspam\tThis is the 2nd attempt to contract U, you have won this weeks top prize of either £1000 cash or £200 prize. Just call 09066361921\r\nham\tWell, i'm glad you didn't find it totally disagreeable ... Lol\r\nham\tGuy, no flash me now. If you go call me, call me. How madam. Take care oh.\r\nspam\tDo you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705.\r\nham\tMark works tomorrow. He gets out at 5. His work is by your house so he can meet u afterwards.\r\nham\t\"Keep ur problems in ur heart, b'coz nobody will fight for u. Only u & u have to fight for ur self & win the battle. -VIVEKANAND- G 9t.. SD..\r\nham\tYeah, give me a call if you've got a minute\r\nham\t\"HI BABE UAWAKE?FEELLIKW SHIT.JUSTFOUND OUT VIA ALETTER THATMUM GOTMARRIED 4thNOV.BEHIND OURBACKS FUCKINNICE!SELFISH,DEVIOUSBITCH.ANYWAY,IL CALL U\"\r\nham\tAmazing : If you rearrange these letters it gives the same meaning... Dormitory = Dirty room Astronomer = Moon starer The eyes = They see Election results = Lies lets recount Mother-in-law = Woman Hitler Eleven plus two =Twelve plus one Its Amazing... !:-)\r\nham\tAiya we discuss later lar... Pick ü up at 4 is it?\r\nham\tHey happy birthday...\r\nham\tSorry i missed your call. Can you please call back.\r\nham\tOmg if its not one thing its another. My cat has worms :/ when does this bad day end?\r\nham\tGood morning, im suffering from fever and dysentry ..will not be able to come to office today.\r\nham\tI wont do anything de.\r\nham\tWhat type of stuff do you sing?\r\nham\tSt andre, virgil's cream\r\nham\tNo no. I will check all rooms befor activities\r\nham\tMy fri ah... Okie lor,goin 4 my drivin den go shoppin after tt...\r\nham\tGokila is talking with you aha:)\r\nham\tHi Shanil,Rakhesh here.thanks,i have exchanged the uncut diamond stuff.leaving back. Excellent service by Dino and Prem.\r\nham\tK.k.this month kotees birthday know?\r\nham\tBut i'm really really broke oh. No amount is too small even <#> \r\nham\tSorry about that this is my mates phone and i didnt write it love Kate\r\nspam\tTheMob>Hit the link to get a premium Pink Panther game, the new no. 1 from Sugababes, a crazy Zebra animation or a badass Hoody wallpaper-all 4 FREE!\r\nham\tAh, well that confuses things, doesnt it? I thought was friends with now. Maybe i did the wrong thing but i already sort of invited -tho he may not come cos of money.\r\nham\tAight, call me once you're close\r\nham\tNope thats fine. I might have a nap tho! \r\nspam\tThis msg is for your mobile content order It has been resent as previous attempt failed due to network error Queries to [email protected]\r\nham\tIn other news after hassling me to get him weed for a week andres has no money. HAUGHAIGHGTUJHYGUJ\r\nham\tA Boy loved a gal. He propsd bt she didnt mind. He gv lv lttrs, Bt her frnds threw thm. Again d boy decided 2 aproach d gal , dt time a truck was speeding towards d gal. Wn it was about 2 hit d girl,d boy ran like hell n saved her. She asked 'hw cn u run so fast?' D boy replied \"Boost is d secret of my energy\" n instantly d girl shouted \"our energy\" n Thy lived happily 2gthr drinking boost evrydy Moral of d story:- I hv free msgs:D;): gud ni8\r\nham\tI wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob\r\nham\tDing me on ya break fassyole! Blacko from londn\r\nham\tI REALLY NEED 2 KISS U I MISS U MY BABY FROM UR BABY 4EVA\r\nham\tThe sign of maturity is not when we start saying big things.. But actually it is, when we start understanding small things... *HAVE A NICE EVENING* BSLVYL\r\nham\tOh you got many responsibilities.\r\nspam\tYou have 1 new message. Please call 08715205273\r\nham\tI've reached sch already...\r\nspam\tDecember only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update VCo FREE on 08002986906 \r\nham\tU definitely need a module from e humanities dis sem izzit? U wan 2 take other modules 1st?\r\nham\tArgh why the fuck is nobody in town ;_;\r\nspam\tGet 3 Lions England tone, reply lionm 4 mono or lionp 4 poly. 4 more go 2 www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply.\r\nham\tThanks. Fills me with complete calm and reassurance! \r\nham\tAslamalaikkum....insha allah tohar beeen muht albi mufti mahfuuz...meaning same here....\r\nham\tAre you driving or training?\r\nham\tLol for real. She told my dad I have cancer\r\nspam\tPRIVATE! Your 2003 Account Statement for 078\r\nham\tOops I did have it, <#> ?\r\nham\t\"NOT ENUFCREDEIT TOCALL.SHALL ILEAVE UNI AT 6 +GET A BUS TO YOR HOUSE?\"\r\nham\tHi Chikku, send some nice msgs\r\nham\tHe is impossible to argue with and he always treats me like his sub, like he never released me ... Which he did and I will remind him of that if necessary\r\nham\tAfter my work ah... Den 6 plus lor... U workin oso rite... Den go orchard lor, no other place to go liao...\r\nham\tTo the wonderful Okors, have a great month. We cherish you guys and wish you well each day. MojiBiola\r\nham\tCuz ibored. And don wanna study\r\nham\tWot about on wed nite I am 3 then but only til 9!\r\nham\tRose for red,red for blood,blood for heart,heart for u. But u for me.... Send tis to all ur friends.. Including me.. If u like me.. If u get back, 1-u r poor in relation! 2-u need some 1 to support 3-u r frnd 2 many 4-some1 luvs u 5+- some1 is praying god to marry u.:-) try it....\r\nham\tAny way where are you and what doing.\r\nham\tThat sucks. I'll go over so u can do my hair. You'll do it free right?\r\nham\tit's still not working. And this time i also tried adding zeros. That was the savings. The checking is <#> \r\nham\tHmm... Dunno leh, mayb a bag 4 goigng out dat is not too small. Or jus anything except perfume, smth dat i can keep.\r\nham\tSday only joined.so training we started today:)\r\nham\tSorry * was at the grocers.\r\nham\tThere are some nice pubs near here or there is Frankie n Bennys near the warner cinema?\r\nspam\tYOU VE WON! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tYup... I havent been there before... You want to go for the yoga? I can call up to book \r\nham\tOh shut it. Omg yesterday I had a dream that I had 2 kids both boys. I was so pissed. Not only about the kids but them being boys. I even told mark in my dream that he was changing diapers cause I'm not getting owed in the face.\r\nham\tYeah I imagine he would be really gentle. Unlike the other docs who treat their patients like turkeys.\r\nspam\tFREE for 1st week! No1 Nokia tone 4 ur mobile every week just txt NOKIA to 8077 Get txting and tell ur mates. www.getzed.co.uk POBox 36504 W45WQ 16+ norm150p/tone\r\nham\tNow that you have started dont stop. Just pray for more good ideas and anything i see that can help you guys i.ll forward you a link.\r\nham\tHi darlin im on helens fone im gonna b up the princes 2 nite please come up tb love Kate\r\nham\tI'm in office now da:)where are you?\r\nham\tAiyar u so poor thing... I give u my support k... Jia you! I'll think of u...\r\nham\tOh unintentionally not bad timing. Great. Fingers the trains play along! Will give fifteen min warning.\r\nspam\tGet your garden ready for summer with a FREE selection of summer bulbs and seeds worth £33:50 only with The Scotsman this Saturday. To stop go2 notxt.co.uk\r\nham\tK..then come wenever u lik to come and also tel vikky to come by getting free time..:-)\r\nham\tPls call me da. What happen.\r\nham\tHappy new year to u and ur family...may this new year bring happiness , stability and tranquility to ur vibrant colourful life:):)\r\nham\tNo problem with the renewal. I.ll do it right away but i dont know his details.\r\nham\tIdk. I'm sitting here in a stop and shop parking lot right now bawling my eyes out because i feel like i'm a failure in everything. Nobody wants me and now i feel like i'm failing you.\r\nham\tHaven't left yet so probably gonna be here til dinner\r\nham\tLike <#> , same question\r\nham\tMY NEW YEARS EVE WAS OK. I WENT TO A PARTY WITH MY BOYFRIEND. WHO IS THIS SI THEN HEY\r\nham\tSir, I need Velusamy sir's date of birth and company bank facilities details.\r\nham\tK k:) sms chat with me.\r\nham\tI will come with karnan car. Please wait till 6pm will directly goto doctor.\r\nham\tNo but the bluray player can\r\nham\tOk... Then r we meeting later?\r\nham\tLol no. I just need to cash in my nitros. Hurry come on before I crash out!\r\nham\tJust send a text. We'll skype later.\r\nham\tOk leave no need to ask\r\nspam\tCongrats 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid ur mates, play java games, Dload polypH music, noline rentl. bx420. ip4. 5we. 150p\r\nham\tÜ still got lessons? Ü in sch?\r\nham\tY she dun believe leh? I tot i told her it's true already. I thk she muz c us tog then she believe.\r\nham\tOh did you charge camera\r\nham\tI‘ve got some salt, you can rub it in my open wounds if you like!\r\nham\tNow i'm going for lunch.\r\nham\tI'm in school now n i'll be in da lab doing some stuff give me a call when ü r done.\r\nham\tOh k. . I will come tomorrow\r\nham\tAight, text me tonight and we'll see what's up\r\nham\tU 2.\r\nham\tWater logging in desert. Geoenvironmental implications.\r\nham\tRaji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.\r\nham\tCompany is very good.environment is terrific and food is really nice:)\r\nham\tVery strange. and are watching the 2nd one now but i'm in bed. Sweet dreams, miss u \r\nspam\tSMS AUCTION - A BRAND NEW Nokia 7250 is up 4 auction today! Auction is FREE 2 join & take part! Txt NOKIA to 86021 now!\r\nham\tHi hope u r both ok, he said he would text and he hasn't, have u seen him, let me down gently please \r\nham\tBabe! I fucking love you too !! You know? Fuck it was so good to hear your voice. I so need that. I crave it. I can't get enough. I adore you, Ahmad *kisses*\r\nham\tK sure am in my relatives home. Sms me de. Pls:-)\r\nham\tI sent them. Do you like?\r\nham\tFuuuuck I need to stop sleepin, sup\r\nham\tI'm in town now so i'll jus take mrt down later.\r\nham\tI just cooked a rather nice salmon a la you\r\nham\tI uploaded mine to Facebook\r\nham\tWHAT TIME U WRKIN?\r\nham\tOkie\r\nspam\tree entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+\r\nspam\tOur records indicate u maybe entitled to 5000 pounds in compensation for the Accident you had. To claim 4 free reply with CLAIM to this msg. 2 stop txt STOP\r\nham\tSorry, I'll call later\r\nham\tOh oh... Den muz change plan liao... Go back have to yan jiu again...\r\nham\tIt's wylie, you in tampa or sarasota?\r\nham\tOk... Take ur time n enjoy ur dinner...\r\nham\tDarren was saying dat if u meeting da ge den we dun meet 4 dinner. Cos later u leave xy will feel awkward. Den u meet him 4 lunch lor.\r\nspam\tSpook up your mob with a Halloween collection of a logo & pic message plus a free eerie tone, txt CARD SPOOK to 8007 zed 08701417012150p per logo/pic \r\nham\tI like cheap! But i‘m happy to splash out on the wine if it makes you feel better..\r\nham\tShe.s fine. I have had difficulties with her phone. It works with mine. Can you pls send her another friend request.\r\nham\tUgh my leg hurts. Musta overdid it on mon.\r\nspam\tCall Germany for only 1 pence per minute! Call from a fixed line via access number 0844 861 85 85. No prepayment. Direct access! www.telediscount.co.uk\r\nspam\tYOU VE WON! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins\r\nham\tWOT STUDENT DISCOUNT CAN U GET ON BOOKS?\r\nham\tMe fine..absolutly fine\r\nham\tHow come she can get it? Should b quite diff to guess rite...\r\nspam\tHad your mobile 11mths ? Update for FREE to Oranges latest colour camera mobiles & unlimited weekend calls. Call Mobile Upd8 on freefone 08000839402 or 2StopTxt\r\nham\tI will reach ur home in <#> minutes\r\nham\tBabe, I'm answering you, can't you see me ? Maybe you'd better reboot YM ... I got the photo ... It's great !\r\nham\tHi.what you think about match?\r\nham\tI know you are thinkin malaria. But relax, children cant handle malaria. She would have been worse and its gastroenteritis. If she takes enough to replace her loss her temp will reduce. And if you give her malaria meds now she will just vomit. Its a self limiting illness she has which means in a few days it will completely stop\r\nham\tDai i downloaded but there is only exe file which i can only run that exe after installing.\r\nham\tIt is only yesterday true true.\r\nham\tK.k.how is your business now?\r\nham\t3 pa but not selected.\r\nspam\tNatalja (25/F) is inviting you to be her friend. Reply YES-440 or NO-440 See her: www.SMS.ac/u/nat27081980 STOP? Send STOP FRND to 62468\r\nham\tI keep ten rs in my shelf:) buy two egg.\r\nham\tI am late. I will be there at\r\nham\tWell thats nice. Too bad i cant eat it\r\nham\tI accidentally brought em home in the box\r\nham\tPls she needs to dat slowly or she will vomit more.\r\nham\tI have to take exam with in march 3\r\nham\tJane babes not goin 2 wrk, feel ill after lst nite. Foned in already cover 4 me chuck.:-)\r\nham\t5 nights...We nt staying at port step liao...Too ex\r\nham\tIf I die I want u to have all my stuffs.\r\nham\t\"OH FUCK. JUSWOKE UP IN A BED ON A BOATIN THE DOCKS. SLEPT WID 25 YEAR OLD. SPINOUT! GIV U DA GOSSIP L8R. XXX\"\r\nham\tSmile in Pleasure Smile in Pain Smile when trouble pours like Rain Smile when sum1 Hurts U Smile becoz SOMEONE still Loves to see u Smiling!!\r\nham\tPrabha..i'm soryda..realy..frm heart i'm sory\r\nham\tI re-met alex nichols from middle school and it turns out he's dealing!\r\nspam\tPRIVATE! Your 2003 Account Statement for <fone no> shows 800 un-redeemed S. I. M. points. Call 08715203656 Identifier Code: 42049 Expires 26/10/04\r\nham\tIt means u could not keep ur words.\r\nham\tNope, I'm still in the market\r\nham\tI realise you are a busy guy and i'm trying not to be a bother. I have to get some exams outta the way and then try the cars. Do have a gr8 day\r\nspam\tYOU ARE CHOSEN TO RECEIVE A £350 AWARD! Pls call claim number 09066364311 to collect your award which you are selected to receive as a valued mobile customer.\r\nham\tHey what how about your project. Started aha da.\r\nham\tOk cool. See ya then.\r\nham\tAm on the uworld site. Am i buying the qbank only or am i buying it with the self assessment also?\r\nham\tYour opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..\r\nspam\tSomeonone you know is trying to contact you via our dating service! To find out who it could be call from your mobile or landline 09064015307 BOX334SK38ch \r\nham\tYeah I can still give you a ride\r\nham\tJay wants to work out first, how's 4 sound?\r\nham\tGud gud..k, chikku tke care.. sleep well gud nyt\r\nham\tIts a part of checking IQ\r\nham\tHmm thinking lor...\r\nham\tOf course ! Don't tease me ... You know I simply must see ! *grins* ... Do keep me posted my prey ... *loving smile* *devouring kiss*\r\nham\tthanks for the temales it was wonderful. Thank. Have a great week.\r\nham\tThank you princess! I want to see your nice juicy booty...\r\nham\tHaven't eaten all day. I'm sitting here staring at this juicy pizza and I can't eat it. These meds are ruining my life.\r\nham\tGud ni8 dear..slp well..take care..swt dreams..Muah..\r\nham\tU come n search tat vid..not finishd..\r\nham\tK I'm leaving soon, be there a little after 9\r\nspam\tUrgent! Please call 09061213237 from a landline. £5000 cash or a 4* holiday await collection. T &Cs SAE PO Box 177 M227XY. 16+\r\nham\tYeah work is fine, started last week, all the same stuff as before, dull but easy and guys are fun!\r\nham\tYou do your studies alone without anyones help. If you cant no need to study.\r\nham\tPlease tell me not all of my car keys are in your purse\r\nham\tI didnt get anything da\r\nham\tOk... Sweet dreams...\r\nham\tWell she's in for a big surprise!\r\nham\tAs usual..iam fine, happy & doing well..:)\r\nham\t1 in cbe. 2 in chennai.\r\nham\tCan help u swoop by picking u up from wherever ur other birds r meeting if u want.\r\nham\tIf anyone calls for a treadmill say you'll buy it. Make sure its working. I found an ad on Craigslist selling for $ <#> .\r\nham\tI absolutely LOVE South Park! I only recently started watching the office.\r\nham\tDid you see that film:)\r\nham\tPls speak with me. I wont ask anything other then you friendship.\r\nham\tStorming msg: Wen u lift d phne, u say \"HELLO\" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? \"Margaret Hello\" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@\r\nham\tGud ni8.swt drms.take care\r\nham\tHI DARLIN ITS KATE ARE U UP FOR DOIN SOMETHIN TONIGHT? IM GOING TO A PUB CALLED THE SWAN OR SOMETHING WITH MY PARENTS FOR ONE DRINK SO PHONE ME IF U CAN\r\nham\tAnything lar then ü not going home 4 dinner?\r\nham\t\"ER, ENJOYIN INDIANS AT THE MO..yeP. SaLL gOoD HehE ;> hows bout u shexy? Pete Xx\"\r\nspam\tIf you don't, your prize will go to another customer. T&C at www.t-c.biz 18+ 150p/min Polo Ltd Suite 373 London W1J 6HL Please call back if busy \r\nham\tDid u fix the teeth?if not do it asap.ok take care.\r\nham\tSo u wan 2 come for our dinner tonight a not?\r\nham\tHello.How u doing?What u been up 2?When will u b moving out of the flat, cos I will need to arrange to pick up the lamp, etc. Take care. Hello caroline!\r\nham\tIts too late:)but its k.wish you the same.\r\nham\tHi. Hope ur day * good! Back from walk, table booked for half eight. Let me know when ur coming over.\r\nham\tOh yeah clearly it's my fault\r\nham\tDunno leh cant remember mayb lor. So wat time r we meeting tmr?\r\nham\tBest msg: It's hard to be with a person, when u know that one more step foward will make u fall in love.. & One step back can ruin ur friendship.. good night:-) ...\r\nspam\tURGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790126 from land line. Claim 3030. Valid 12hrs only 150ppm\r\nham\tHelloooo... Wake up..! \"Sweet\" \"morning\" \"welcomes\" \"You\" \"Enjoy\" \"This Day\" \"with full of joy\".. \"GUD MRNG\".\r\nham\tVikky, come around <TIME> ..\r\nham\tAnd how you will do that, princess? :)\r\nham\tI have gone into get info bt dont know what to do\r\nham\tYeah, probably here for a while\r\nham\tSent me ur email id soon\r\nspam\tURGENT! You have won a 1 week FREE membership in our £100,000 Prize Jackpot! Txt the word: CLAIM to No: 81010 T&C www.dbuk.net LCCLTD POBOX 4403LDNW1A7RW18\r\nham\tI'm still pretty weak today .. Bad day ?\r\nham\tHey ! Don't forget ... You are MINE ... For ME ... My possession ... MY property ... MMM ... *childish smile* ...\r\nham\tAn excellent thought by a misundrstud frnd: I knw u hate me bt the day wen u'll knw the truth u'll hate urself:-( Gn:-)\r\nham\tHey! Congrats 2u2. id luv 2 but ive had 2 go home!\r\nham\tDear where you. Call me\r\nham\tXy trying smth now. U eat already? We havent...\r\nspam\tUrgent! Please call 09061213237 from landline. £5000 cash or a luxury 4* Canary Islands Holiday await collection. T&Cs SAE PO Box 177. M227XY. 150ppm. 16+\r\nham\tI donno its in your genes or something\r\nspam\tXMAS iscoming & ur awarded either £500 CD gift vouchers & free entry 2 r £100 weekly draw txt MUSIC to 87066 TnC www.Ldew.com1win150ppmx3age16subscription \r\nham\tAlex says he's not ok with you not being ok with it\r\nham\tAre u coming to the funeral home\r\nham\tMy darling sister. How are you doing. When's school resuming. Is there a minimum wait period before you reapply? Do take care\r\nham\tI.ll hand her my phone to chat wit u\r\nham\tWell good morning mr . Hows london treatin' ya treacle?\r\nham\tI can't make it tonight\r\nham\tAt WHAT TIME should i come tomorrow\r\nham\tAbout <#> bucks. The banks fees are fixed. Better to call the bank and find out.\r\nham\tI can. But it will tell quite long, cos i haven't finish my film yet...\r\nham\tPls ask macho how much is budget for bb bold 2 is cos i saw a new one for <#> dollars.\r\nham\t\"Hi missed your Call and my mumHas beendropping red wine all over theplace! what is your adress?\"\r\nham\tIll be at yours in about 3 mins but look out for me\r\nham\tWhat you did in leave.\r\nham\tI'm coming back on Thursday. Yay. Is it gonna be ok to get the money. Cheers. Oh yeah and how are you. Everything alright. Hows school. Or do you call it work now\r\nham\tJolly good! By the way, will give u tickets for sat eve 7.30. Speak before then x\r\nham\tyeah, that's what I was thinking\r\nham\tK.k:)i'm going to tirunelvali this week to see my uncle ..i already spend the amount by taking dress .so only i want money.i will give it on feb 1\r\nham\tHere got ur favorite oyster... N got my favorite sashimi... Ok lar i dun say already... Wait ur stomach start rumbling...\r\nham\tMy sister going to earn more than me da.\r\nspam\tGet the official ENGLAND poly ringtone or colour flag on yer mobile for tonights game! Text TONE or FLAG to 84199. Optout txt ENG STOP Box39822 W111WX £1.50\r\nham\tHahaha..use your brain dear\r\nham\tJus finish watching tv... U?\r\nham\tK, fyi I'm back in my parents' place in south tampa so I might need to do the deal somewhere else\r\nham\tGood morning, my Love ... I go to sleep now and wish you a great day full of feeling better and opportunity ... You are my last thought babe, I LOVE YOU *kiss*\r\nham\tKothi print out marandratha.\r\nham\tBut we havent got da topic yet rite?\r\nham\tOk no problem... Yup i'm going to sch at 4 if i rem correctly...\r\nham\tThanks, I'll keep that in mind\r\nham\tAah bless! How's your arm?\r\nham\tDear Sir,Salam Alaikkum.Pride and Pleasure meeting you today at the Tea Shop.We are pleased to send you our contact number at Qatar.Rakhesh an Indian.Pls save our Number.Respectful Regards.\r\nham\tGal n boy walking in d park. gal-can i hold ur hand? boy-y? do u think i would run away? gal-no, jst wana c how it feels walking in heaven with an prince..GN:-)\r\nham\tWhat makes you most happy?\r\nham\tWishing you a wonderful week.\r\nham\tSweet heart how are you?\r\nham\tSir, waiting for your letter.\r\nham\tDude im no longer a pisces. Im an aquarius now.\r\nham\tX course it 2yrs. Just so her messages on messenger lik you r sending me\r\nham\tI think steyn surely get one wicket:)\r\nham\tNeither [in sterm voice] - i'm studying. All fine with me! Not sure the thing will be resolved, tho. Anyway. Have a fab hols\r\nham\tGarbage bags, eggs, jam, bread, hannaford wheat chex\r\nham\tNo. It's not pride. I'm almost <#> years old and shouldn't be takin money from my kid. You're not supposed to have to deal with this stuff. This is grownup stuff--why i don't tell you.\r\nham\tSounds better than my evening im just doing my costume. Im not sure what time i finish tomorrow but i will txt you at the end.\r\nham\tMy birthday is on feb <#> da. .\r\nham\tSo when do you wanna gym?\r\nham\tYou'd like that wouldn't you? Jerk!\r\nham\tAre u awake? Is there snow there?\r\nham\tAnd of course you should make a stink!\r\nspam\tu r subscribed 2 TEXTCOMP 250 wkly comp. 1st wk?s free question follows, subsequent wks charged@150p/msg.2 unsubscribe txt STOP 2 84128,custcare 08712405020\r\nham\tNo go. No openings for that room 'til after thanksgiving without an upcharge.\r\nham\tWhen you guys planning on coming over?\r\nham\tWat ü doing now?\r\nham\tMy Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!\r\nham\tNo sir. That's why i had an 8-hr trip on the bus last week. Have another audition next wednesday but i think i might drive this time.\r\nham\tDo I? I thought I put it back in the box\r\nham\tI'm home...\r\nham\tNo one interested. May be some business plan.\r\nham\tYup it's at paragon... I havent decided whether 2 cut yet... Hee...\r\nham\tGood morning princess! Have a great day!\r\nham\tGuai... Ü shd haf seen him when he's naughty... Ü so free today? Can go jogging...\r\nham\tAiyo cos i sms ü then ü neva reply so i wait 4 ü to reply lar. I tot ü havent finish ur lab wat.\r\nham\tLiving is very simple.. Loving is also simple.. Laughing is too simple.. Winning is tooo simple.. But, Being 'SIMPLE' is very difficult...;-) :-)\r\nham\tTell me something. Thats okay.\r\nham\tOk\r\nham\tHmm. Shall i bring a bottle of wine to keep us amused? Just joking! I'll still bring a bottle. Red or white? See you tomorrow\r\nham\tThis is ur face test ( 1 2 3 4 5 6 7 8 9 <#> ) select any number i will tell ur face astrology.... am waiting. quick reply...\r\nham\tHey, iouri gave me your number, I'm wylie, ryan's friend\r\nham\tYep get with the program. You're slacking.\r\nham\tI'm in inside office..still filling forms.don know when they leave me.\r\nham\tI think your mentor is , but not 100 percent sure.\r\nspam\tCall 09095350301 and send our girls into erotic ecstacy. Just 60p/min. To stop texts call 08712460324 (nat rate)\r\nspam\tCamera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.\r\nspam\tA £400 XMAS REWARD IS WAITING FOR YOU! Our computer has randomly picked you from our loyal mobile customers to receive a £400 reward. Just call 09066380611\r\nham\tJust trying to figure out when I'm suppose to see a couple different people this week. We said we'd get together but I didn't set dates\r\nspam\tIMPORTANT MESSAGE. This is a final contact attempt. You have important messages waiting out our customer claims dept. Expires 13/4/04. Call 08717507382 NOW!\r\nham\tHi mom we might be back later than <#> \r\nspam\tdating:i have had two of these. Only started after i sent a text to talk sport radio last week. Any connection do you think or coincidence?\r\nham\tLol, oh you got a friend for the dog ?\r\nham\tOk., is any problem to u frm him? Wats matter?\r\nham\tK I'll head out in a few mins, see you there\r\nham\tDo u konw waht is rael FRIENDSHIP Im gving yuo an exmpel: Jsut ese tihs msg.. Evrey splleing of tihs msg is wrnog.. Bt sitll yuo can raed it wihtuot ayn mitsake.. GOODNIGHT & HAVE A NICE SLEEP..SWEET DREAMS..\r\nham\tI cant pick the phone right now. Pls send a message\r\nham\tI don't want you to leave. But i'm barely doing what i can to stay sane. fighting with you constantly isn't helping.\r\nspam\tThe current leading bid is 151. To pause this auction send OUT. Customer Care: 08718726270\r\nspam\tFree entry to the gr8prizes wkly comp 4 a chance to win the latest Nokia 8800, PSP or £250 cash every wk.TXT GREAT to 80878 http//www.gr8prizes.com 08715705022\r\nham\tSomebody set up a website where you can play hold em using eve online spacebucks\r\nham\tIts sunny in california. The weather's just cool\r\nspam\tYou have 1 new message. Call 0207-083-6089\r\nham\tI can make it up there, squeezed <#> bucks out of my dad\r\nham\tGood day to You too.Pray for me.Remove the teeth as its painful maintaining other stuff.\r\nham\tHow are you babes. Hope your doing ok. I had a shit nights sleep. I fell asleep at 5.Im knackered and im dreading work tonight. What are thou upto tonight. X\r\nham\tHow do friends help us in problems? They give the most stupid suggestion that Lands us into another problem and helps us forgt the previous problem\r\nham\tI'm at work. Please call\r\nham\tI will be gentle baby! Soon you will be taking all <#> inches deep inside your tight pussy...\r\nham\tNOT MUCH NO FIGHTS. IT WAS A GOOD NITE!!\r\nham\tOk.ok ok..then..whats ur todays plan\r\nham\tNt joking seriously i told\r\nham\tWatching ajith film ah?\r\nham\tOoooooh I forgot to tell u I can get on yoville on my phone\r\nham\tAll done, all handed in. Don't know if mega shop in asda counts as celebration but thats what i'm doing!\r\nham\tI dont know exactly could you ask chechi.\r\nham\tDunno lei shd b driving lor cos i go sch 1 hr oni.\r\nham\tAs in i want custom officer discount oh.\r\nham\tThat's necessarily respectful\r\nham\tHi. Hope you had a good day. Have a better night.\r\nham\tAnd he's apparently bffs with carly quick now\r\nham\tHARD BUT TRUE: How much you show & express your love to someone....that much it will hurt when they leave you or you get seperated...!鈥┾??〨ud evening...\r\nham\tBabes I think I got ur brolly I left it in English wil bring it in 2mrw 4 u luv Franxx\r\nham\tHi babe its me thanks for coming even though it didnt go that well!i just wanted my bed! Hope to see you soon love and kisses xxx\r\nham\tSo gd got free ice cream... I oso wan...\r\nham\tPls give her prometazine syrup. 5mls then <#> mins later feed.\r\nham\tSo how many days since then?\r\nham\tDear are you angry i was busy dear\r\nham\tYup he msg me: is tat yijue? Then i tot it's my group mate cos we meeting today mah... I'm askin if ü leaving earlier or wat mah cos mayb ü haf to walk v far...\r\nham\t... Are you in the pub?\r\nham\tThere is a first time for everything :)\r\nham\tDaddy, shu shu is looking 4 u... U wan me 2 tell him u're not in singapore or wat?\r\nham\tI ask if u meeting da ge tmr nite...\r\nham\tGr8. So how do you handle the victoria island traffic. Plus when's the album due\r\nham\tNite nite pocay wocay luv u more than n e thing 4eva I promise ring u 2morrowxxxx\r\nham\tEast coast\r\nham\tYou should get more chicken broth if you want ramen unless there's some I don't know about\r\nham\tMy slave! I want you to take 2 or 3 pictures of yourself today in bright light on your cell phone! Bright light!\r\nham\tNope. I just forgot. Will show next week\r\nham\tSo how are you really. What are you up to. How's the masters. And so on.\r\nham\tI'm at bruce & fowler now but I'm in my mom's car so I can't park (long story)\r\nham\tI dont know oh. Hopefully this month.\r\nham\tHi elaine, is today's meeting confirmed?\r\nham\tOk k..sry i knw 2 siva..tats y i askd..\r\nham\tSorry, I'll call later\r\nham\tU horrible gal... U knew dat i was going out wif him yest n u still come n ask me...\r\nham\tOtherwise had part time job na-tuition..\r\nham\tOh yeah! And my diet just flew out the window\r\nspam\tSanta Calling! Would your little ones like a call from Santa Xmas eve? Call 09058094583 to book your time.\r\nham\tYou didnt complete your gist oh.\r\nham\tEr yeah, i will b there at 15:26, sorry! Just tell me which pub/cafe to sit in and come wen u can\r\nham\tIf you can make it any time tonight or whenever you can it's cool, just text me whenever you're around\r\nham\tIf I was I wasn't paying attention\r\nham\tThanx a lot 4 ur help!\r\nham\tYou're gonna have to be way more specific than that\r\nham\tJesus armand really is trying to tell everybody he can find\r\nham\tI'm wif him now buying tix lar...\r\nham\tMode men or have you left.\r\nham\tAm slow in using biola's fne\r\nham\t\"What are youdoing later? Sar xxx\"\r\nham\tHey i've booked the 2 lessons on sun liao...\r\nham\tThank you. do you generally date the brothas?\r\nham\tBy the way, make sure u get train to worc foregate street not shrub hill. Have fun night x\r\nham\tI thought i'd get him a watch, just cos thats the kind of thing u get4an18th. And he loves so much!\r\nspam\tYou have won a guaranteed 32000 award or maybe even £1000 cash to claim ur award call free on 0800 ..... (18+). Its a legitimat efreefone number wat do u think???\r\nham\tGood morning. At the repair shop--the ONLY reason i'm up at this hour.\r\nham\tAnd that's fine, I got enough bud to last most of the night at least\r\nham\tI am back. Good journey! Let me know if you need any of the receipts. Shall i tell you like the pendent?\r\nham\tSo that takes away some money worries\r\nham\taight we can pick some up, you open before tonight?\r\nspam\tLatest News! Police station toilet stolen, cops have nothing to go on!\r\nham\tSac needs to carry on:)\r\nham\tJust sing HU. I think its also important to find someone female that know the place well preferably a citizen that is also smart to help you navigate through. Even things like choosing a phone plan require guidance. When in doubt ask especially girls.\r\nham\tWhat???? Hello wats talks email address?\r\nham\tExcept theres a chick with huge boobs.\r\nham\tIm just wondering what your doing right now?\r\nham\tWishing you a beautiful day. Each moment revealing even more things to keep you smiling. Do enjoy it.\r\nspam\t\"For the most sparkling shopping breaks from 45 per person; call 0121 2025050 or visit www.shortbreaks.org.uk\"\r\nham\tArun can u transfr me d amt\r\nham\tSorry, I'll call later\r\nham\tIf you hear a loud scream in about <#> minutes its cause my Gyno will be shoving things up me that don't belong :/\r\nspam\tDecember only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906\r\nham\tOk i thk i got it. Then u wan me 2 come now or wat?\r\nspam\tTxt: CALL to No: 86888 & claim your reward of 3 hours talk time to use from your phone now! Subscribe6GBP/mnth inc 3hrs 16 stop?txtStop www.gamb.tv\r\nham\tU GOIN OUT 2NITE?\r\nham\tI will treasure every moment we spend together...\r\nham\tShall I bring us a bottle of wine to keep us amused? Only joking! I‘ll bring one anyway\r\nspam\thttp//tms. widelive.com/index. wml?id=820554ad0a1705572711&first=true¡C C Ringtone¡\r\nspam\tGet your garden ready for summer with a FREE selection of summer bulbs and seeds worth £33:50 only with The Scotsman this Saturday. To stop go2 notxt.co.uk\r\nspam\tURGENT! Last weekend's draw shows that you have won £1000 cash or a Spanish holiday! CALL NOW 09050000332 to claim. T&C: RSTM, SW7 3SS. 150ppm\r\nham\tOk lor.\r\nham\tI thought slide is enough.\r\nham\tYup\r\nham\tWell obviously not because all the people in my cool college life went home ;_;\r\nham\tOk lor ü reaching then message me.\r\nham\tWhere's mummy's boy ? Is he being good or bad ? Is he being positive or negative ? Why is mummy being made to wait? Hmmmm?\r\nham\tDhoni have luck to win some big title.so we will win:)\r\nham\tYes princess! I want to please you every night. Your wish is my command...\r\nham\tWhat Today-sunday..sunday is holiday..so no work..\r\nham\tNo probably <#> %.\r\nham\tReally do hope the work doesnt get stressful. Have a gr8 day.\r\nham\tHave you seen who's back at Holby?!\r\nham\tShall call now dear having food\r\nspam\tURGENT We are trying to contact you Last weekends draw shows u have won a £1000 prize GUARANTEED Call 09064017295 Claim code K52 Valid 12hrs 150p pm\r\nham\tSo li hai... Me bored now da lecturer repeating last weeks stuff waste time... \r\nham\t, , and picking them up from various points | going 2 yeovil | and they will do the motor project 4 3 hours | and then u take them home. || 12 2 5.30 max. || Very easy\r\nham\tAlso fuck you and your family for going to rhode island or wherever the fuck and leaving me all alone the week I have a new bong >:(\r\nham\tOfcourse I also upload some songs\r\nspam\t2p per min to call Germany 08448350055 from your BT line. Just 2p per min. Check PlanetTalkInstant.com for info & T's & C's. Text stop to opt out\r\nham\tK. I will sent it again\r\nham\tOh thanks a lot..i already bought 2 eggs ..\r\nham\tK. I will sent it again\r\nham\tU studying in sch or going home? Anyway i'll b going 2 sch later.\r\nspam\tMarvel Mobile Play the official Ultimate Spider-man game (£4.50) on ur mobile right now. Text SPIDER to 83338 for the game & we ll send u a FREE 8Ball wallpaper\r\nham\tI think if he rule tamilnadu..then its very tough for our people.\r\nham\tCool, we shall go and see, have to go to tip anyway. Are you at home, got something to drop in later? So lets go to town tonight! Maybe mum can take us in.\r\nham\tGood afternoon, my love ... How goes your day ? How did you sleep ? I hope your well, my boytoy ... I think of you ...\r\nham\tYes... I trust u to buy new stuff ASAP so I can try it out\r\nspam\tSMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE\r\nham\tWhy did I wake up on my own >:(\r\nham\tNow get step 2 outta the way. Congrats again.\r\nham\tLove has one law; Make happy the person you love. In the same way friendship has one law; Never make ur friend feel alone until you are alive.... Gud night\r\nspam\tPRIVATE! Your 2003 Account Statement for 07808247860 shows 800 un-redeemed S. I. M. points. Call 08719899229 Identifier Code: 40411 Expires 06/11/04\r\nham\tApo all other are mokka players only\r\nham\tPerhaps * is much easy give your account identification, so i will tomorrow at UNI\r\nham\tWait . I will msg after <#> min.\r\nham\tWhat i told before i tell. Stupid hear after i wont tell anything to you. You dad called to my brother and spoken. Not with me.\r\nham\tGod's love has no limit. God's grace has no measure. God's power has no boundaries. May u have God's endless blessings always in ur life...!! Gud ni8\r\nham\tI want to be inside you every night...\r\nham\tMachan you go to gym tomorrow, i wil come late goodnight.\r\nham\tLol they were mad at first but then they woke up and gave in.\r\nham\tI went to project centre\r\nham\tIt‘s reassuring, in this crazy world.\r\nham\tJust making dinner, you ?\r\nham\tYes. Please leave at <#> . So that at <#> we can leave\r\nham\tOh... Okie lor...We go on sat... \r\nham\tYou are a great role model. You are giving so much and i really wish each day for a miracle but God as a reason for everything and i must say i wish i knew why but i dont. I've looked up to you since i was young and i still do. Have a great day.\r\nham\tYa, i'm referin to mei's ex wat... No ah, waitin 4 u to treat, somebody shld b rich liao...So gd, den u dun have to work frm tmr onwards...\r\nham\tMiles and smiles r made frm same letters but do u know d difference..? smile on ur face keeps me happy even though I am miles away from u.. :-)keep smiling.. Good nyt\r\nham\tBy the way, i've put a skip right outside the front of the house so you can see which house it is. Just pull up before it.\r\nham\tCan you pls send me that company name. In saibaba colany\r\nham\tNo. I dont want to hear anything\r\nham\tYou are a big chic. Common. Declare\r\nham\tThats cool. I want to please you...\r\nham\tGoing to join tomorrow.\r\nspam\tYou are awarded a SiPix Digital Camera! call 09061221061 from landline. Delivery within 28days. T Cs Box177. M221BP. 2yr warranty. 150ppm. 16 . p p£3.99\r\nham\tI want to tell you how bad I feel that basically the only times I text you lately are when I need drugs\r\nspam\tPRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S.I.M. points. Call 08718738001 Identifier Code: 49557 Expires 26/11/04\r\nham\tTotal disappointment, when I texted you was the craziest shit got :(\r\nham\tIts just the effect of irritation. Just ignore it\r\nham\tWhat about this one then.\r\nham\tI think that tantrum's finished so yeah I'll be by at some point\r\nham\tCompliments to you. Was away from the system. How your side.\r\nham\thappened here while you were adventuring\r\nham\tHey chief, can you give me a bell when you get this. Need to talk to you about this royal visit on the 1st june. \r\nham\tOk which your another number\r\nham\tI know you are thinkin malaria. But relax, children cant handle malaria. She would have been worse and its gastroenteritis. If she takes enough to replace her loss her temp will reduce. And if you give her malaria meds now she will just vomit. Its a self limiting illness she has which means in a few days it will completely stop\r\nham\tAiyah ok wat as long as got improve can already wat...\r\nspam\tWant explicit SEX in 30 secs? Ring 02073162414 now! Costs 20p/min Gsex POBOX 2667 WC1N 3XX\r\nham\tI can't believe how attached I am to seeing you every day. I know you will do the best you can to get to me babe. I will go to teach my class at your midnight\r\nham\tJust sleeping..and surfing\r\nspam\tASKED 3MOBILE IF 0870 CHATLINES INCLU IN FREE MINS. INDIA CUST SERVs SED YES. L8ER GOT MEGA BILL. 3 DONT GIV A SHIT. BAILIFF DUE IN DAYS. I O £250 3 WANT £800\r\nham\tYeah it's jus rite...\r\nham\tArmand says get your ass over to epsilon\r\nham\tU still havent got urself a jacket ah?\r\nham\tI'm taking derek & taylor to walmart, if I'm not back by the time you're done just leave the mouse on my desk and I'll text you when priscilla's ready\r\nham\tHi its in durban are you still on this number\r\nham\tIc. There are a lotta childporn cars then.\r\nspam\tHad your contract mobile 11 Mnths? Latest Motorola, Nokia etc. all FREE! Double Mins & Text on Orange tariffs. TEXT YES for callback, no to remove from records.\r\nham\tNo, I was trying it all weekend ;V\r\nham\tYou know, wot people wear. T shirts, jumpers, hat, belt, is all we know. We r at Cribbs\r\nham\tCool, what time you think you can get here?\r\nham\tWen did you get so spiritual and deep. That's great\r\nham\tHave a safe trip to Nigeria. Wish you happiness and very soon company to share moments with\r\nham\tHahaha..use your brain dear\r\nham\tWell keep in mind I've only got enough gas for one more round trip barring a sudden influx of cash\r\nham\tYeh. Indians was nice. Tho it did kane me off a bit he he. We shud go out 4 a drink sometime soon. Mite hav 2 go 2 da works 4 a laugh soon. Love Pete x x\r\nham\tYes i have. So that's why u texted. Pshew...missing you so much\r\nham\tNo. I meant the calculation is the same. That <#> units at <#> . This school is really expensive. Have you started practicing your accent. Because its important. And have you decided if you are doing 4years of dental school or if you'll just do the nmde exam.\r\nham\tSorry, I'll call later\r\nham\tif you aren't here in the next <#> hours imma flip my shit\r\nham\tAnything lor. Juz both of us lor.\r\nham\tGet me out of this dump heap. My mom decided to come to lowes. BORING.\r\nham\tOk lor... Sony ericsson salesman... I ask shuhui then she say quite gd 2 use so i considering...\r\nham\tArd 6 like dat lor.\r\nham\tWhy don't you wait 'til at least wednesday to see if you get your .\r\nham\tHuh y lei...\r\nspam\tREMINDER FROM O2: To get 2.50 pounds free call credit and details of great offers pls reply 2 this text with your valid name, house no and postcode\r\nspam\tThis is the 2nd time we have tried 2 contact u. U have won the £750 Pound prize. 2 claim is easy, call 087187272008 NOW1! Only 10p per minute. BT-national-rate.\r\nham\tWill ü b going to esplanade fr home?\r\nham\tPity, * was in mood for that. So...any other suggestions?\r\nham\tThe guy did some bitching but I acted like i'd be interested in buying something else next week and he gave it to us for free\r\nham\tRofl. Its true to its name\r\n"
],
[
"import pandas as pd\n# Dataset available using filepath 'smsspamcollection/SMSSpamCollection'\ndf = pd.read_table(\"smsspamcollection/SMSSpamCollection\", names=['label', 'sms_message'] )\n\n# Output printing out first 5 rows\ndf[:5]",
"_____no_output_____"
]
],
[
[
"### Step 1.2: Data Preprocessing ###\n\nNow that we have a basic understanding of what our dataset looks like, let's convert our labels to binary variables, 0 to represent 'ham'(i.e. not spam) and 1 to represent 'spam' for ease of computation. \n\nYou might be wondering why do we need to do this step? The answer to this lies in how scikit-learn handles inputs. Scikit-learn only deals with numerical values and hence if we were to leave our label values as strings, scikit-learn would do the conversion internally(more specifically, the string labels will be cast to unknown float values). \n\nOur model would still be able to make predictions if we left our labels as strings but we could have issues later when calculating performance metrics, for example when calculating our precision and recall scores. Hence, to avoid unexpected 'gotchas' later, it is good practice to have our categorical values be fed into our model as integers. ",
"_____no_output_____"
],
[
">**Instructions:**\n* Convert the values in the 'label' column to numerical values using map method as follows:\n{'ham':0, 'spam':1} This maps the 'ham' value to 0 and the 'spam' value to 1.\n* Also, to get an idea of the size of the dataset we are dealing with, print out number of rows and columns using \n'shape'.",
"_____no_output_____"
]
],
[
[
"'''\nSolution\n'''\ndf['names'] = df.label.map(lambda x: 1 if x == \"spam\" else 0)\n",
"_____no_output_____"
]
],
[
[
"### Step 2.1: Bag of Words ###\n\nWhat we have here in our data set is a large collection of text data (5,572 rows of data). Most ML algorithms rely on numerical data to be fed into them as input, and email/sms messages are usually text heavy. \n\nHere we'd like to introduce the Bag of Words (BoW) concept which is a term used to specify the problems that have a 'bag of words' or a collection of text data that needs to be worked with. The basic idea of BoW is to take a piece of text and count the frequency of the words in that text. It is important to note that the BoW concept treats each word individually and the order in which the words occur does not matter. \n\nUsing a process which we will go through now, we can convert a collection of documents to a matrix, with each document being a row and each word (token) being the column, and the corresponding (row, column) values being the frequency of occurrence of each word or token in that document.\n\nFor example: \n\nLet's say we have 4 documents, which are text messages\nin our case, as follows:\n\n`['Hello, how are you!',\n'Win money, win from home.',\n'Call me now',\n'Hello, Call you tomorrow?']`\n\nOur objective here is to convert this set of texts to a frequency distribution matrix, as follows:\n\n<img src=\"images/countvectorizer.png\" height=\"542\" width=\"542\">\n\nHere as we can see, the documents are numbered in the rows, and each word is a column name, with the corresponding value being the frequency of that word in the document.\n\nLet's break this down and see how we can do this conversion using a small set of documents.\n\nTo handle this, we will be using sklearn's \n[count vectorizer](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.CountVectorizer.html#sklearn.feature_extraction.text.CountVectorizer) method which does the following:\n\n* It tokenizes the string (separates the string into individual words) and gives an integer ID to each token.\n* It counts the occurrence of each of those tokens.\n\n**Please Note:** \n\n* The CountVectorizer method automatically converts all tokenized words to their lower case form so that it does not treat words like 'He' and 'he' differently. It does this using the `lowercase` parameter which is by default set to `True`.\n\n* It also ignores all punctuation so that words followed by a punctuation mark (for example: 'hello!') are not treated differently than the same words not prefixed or suffixed by a punctuation mark (for example: 'hello'). It does this using the `token_pattern` parameter which has a default regular expression which selects tokens of 2 or more alphanumeric characters.\n\n* The third parameter to take note of is the `stop_words` parameter. Stop words refer to the most commonly used words in a language. They include words like 'am', 'an', 'and', 'the', etc. By setting this parameter value to `english`, CountVectorizer will automatically ignore all words (from our input text) that are found in the built in list of English stop words in scikit-learn. This is extremely helpful as stop words can skew our calculations when we are trying to find certain key words that are indicative of spam.\n\nWe will dive into the application of each of these into our model in a later step, but for now it is important to be aware of such preprocessing techniques available to us when dealing with textual data.",
"_____no_output_____"
],
[
"### Step 2.2: Implementing Bag of Words from scratch ###\n\nBefore we dive into scikit-learn's Bag of Words (BoW) library to do the dirty work for us, let's implement it ourselves first so that we can understand what's happening behind the scenes. \n\n**Step 1: Convert all strings to their lower case form.**\n\nLet's say we have a document set:\n\n```\ndocuments = ['Hello, how are you!',\n 'Win money, win from home.',\n 'Call me now.',\n 'Hello, Call hello you tomorrow?']\n```\n>>**Instructions:**\n* Convert all the strings in the documents set to their lower case. Save them into a list called 'lower_case_documents'. You can convert strings to their lower case in python by using the lower() method.\n",
"_____no_output_____"
]
],
[
[
"'''\nSolution:\n'''\ndocuments = ['Hello, how are you!',\n 'Win money, win from home.',\n 'Call me now.',\n 'Hello, Call hello you tomorrow?']\n\nlower_case_documents = [w.lower() for w in documents]\n\n \nprint(lower_case_documents)",
"['hello, how are you!', 'win money, win from home.', 'call me now.', 'hello, call hello you tomorrow?']\n"
]
],
[
[
"**Step 2: Removing all punctuation**\n\n>>**Instructions:**\nRemove all punctuation from the strings in the document set. Save the strings into a list called \n'sans_punctuation_documents'. ",
"_____no_output_____"
]
],
[
[
"'''\nSolution:\n'''\npunctuation = \",.?!\"\nimport string\n\nsans_punctuation_documents = [w.translate({ord(c): None for c in \".,_!?\"})for w in lower_case_documents]\n\n\n\n \nprint(sans_punctuation_documents)",
"['hello how are you', 'win money win from home', 'call me now', 'hello call hello you tomorrow']\n"
]
],
[
[
"**Step 3: Tokenization**\n\nTokenizing a sentence in a document set means splitting up the sentence into individual words using a delimiter. The delimiter specifies what character we will use to identify the beginning and end of a word. Most commonly, we use a single space as the delimiter character for identifying words, and this is true in our documents in this case also.",
"_____no_output_____"
],
[
">>**Instructions:**\nTokenize the strings stored in 'sans_punctuation_documents' using the split() method. Store the final document set \nin a list called 'preprocessed_documents'.\n",
"_____no_output_____"
]
],
[
[
"'''\nSolution:\n'''\nimport itertools\npreprocessed_documents = [w.split() for w in sans_punctuation_documents]\npreprocessed_documents = list(itertools.chain(*preprocessed_documents))\n\n\nprint(preprocessed_documents)",
"['hello', 'how', 'are', 'you', 'win', 'money', 'win', 'from', 'home', 'call', 'me', 'now', 'hello', 'call', 'hello', 'you', 'tomorrow']\n"
]
],
[
[
"**Step 4: Count frequencies**\n\nNow that we have our document set in the required format, we can proceed to counting the occurrence of each word in each document of the document set. We will use the `Counter` method from the Python `collections` library for this purpose. \n\n`Counter` counts the occurrence of each item in the list and returns a dictionary with the key as the item being counted and the corresponding value being the count of that item in the list. ",
"_____no_output_____"
],
[
">>**Instructions:**\nUsing the Counter() method and preprocessed_documents as the input, create a dictionary with the keys being each word in each document and the corresponding values being the frequency of occurrence of that word. Save each Counter dictionary as an item in a list called 'frequency_list'.\n",
"_____no_output_____"
]
],
[
[
"'''\nSolution\n'''\nfrequency_list = []\nimport pprint\nfrom collections import Counter\n\nfrequency_list = Counter(preprocessed_documents)\n\n \npprint.pprint(frequency_list)",
"Counter({'hello': 3,\n 'you': 2,\n 'win': 2,\n 'call': 2,\n 'how': 1,\n 'are': 1,\n 'money': 1,\n 'from': 1,\n 'home': 1,\n 'me': 1,\n 'now': 1,\n 'tomorrow': 1})\n"
]
],
[
[
"Congratulations! You have implemented the Bag of Words process from scratch! As we can see in our previous output, we have a frequency distribution dictionary which gives a clear view of the text that we are dealing with.\n\nWe should now have a solid understanding of what is happening behind the scenes in the `sklearn.feature_extraction.text.CountVectorizer` method of scikit-learn. \n\nWe will now implement `sklearn.feature_extraction.text.CountVectorizer` method in the next step.",
"_____no_output_____"
],
[
"### Step 2.3: Implementing Bag of Words in scikit-learn ###\n\nNow that we have implemented the BoW concept from scratch, let's go ahead and use scikit-learn to do this process in a clean and succinct way. We will use the same document set as we used in the previous step. ",
"_____no_output_____"
]
],
[
[
"'''\nHere we will look to create a frequency matrix on a smaller document set to make sure we understand how the \ndocument-term matrix generation happens. We have created a sample document set 'documents'.\n'''\ndocuments = ['Hello, how are you!',\n 'Win money, win from home.',\n 'Call me now.',\n 'Hello, Call hello you tomorrow?']",
"_____no_output_____"
]
],
[
[
">>**Instructions:**\nImport the sklearn.feature_extraction.text.CountVectorizer method and create an instance of it called 'count_vector'. ",
"_____no_output_____"
]
],
[
[
"'''\nSolution\n'''\nfrom sklearn.feature_extraction.text import CountVectorizer\ncount_vector = CountVectorizer(documents)\ncount_vector",
"_____no_output_____"
]
],
[
[
"**Data preprocessing with CountVectorizer()**\n\nIn Step 2.2, we implemented a version of the CountVectorizer() method from scratch that entailed cleaning our data first. This cleaning involved converting all of our data to lower case and removing all punctuation marks. CountVectorizer() has certain parameters which take care of these steps for us. They are:\n\n* `lowercase = True`\n \n The `lowercase` parameter has a default value of `True` which converts all of our text to its lower case form.\n\n\n* `token_pattern = (?u)\\\\b\\\\w\\\\w+\\\\b`\n \n The `token_pattern` parameter has a default regular expression value of `(?u)\\\\b\\\\w\\\\w+\\\\b` which ignores all punctuation marks and treats them as delimiters, while accepting alphanumeric strings of length greater than or equal to 2, as individual tokens or words.\n\n\n* `stop_words`\n\n The `stop_words` parameter, if set to `english` will remove all words from our document set that match a list of English stop words defined in scikit-learn. Considering the small size of our dataset and the fact that we are dealing with SMS messages and not larger text sources like e-mail, we will not use stop words, and we won't be setting this parameter value.\n\nYou can take a look at all the parameter values of your `count_vector` object by simply printing out the object as follows:",
"_____no_output_____"
]
],
[
[
"'''\nPractice node:\nPrint the 'count_vector' object which is an instance of 'CountVectorizer()'\n'''\n# No need to revise this code\nprint(count_vector)",
"CountVectorizer(analyzer='word', binary=False, decode_error='strict',\n dtype=<class 'numpy.int64'>, encoding='utf-8',\n input=['Hello, how are you!', 'Win money, win from home.', 'Call me now.', 'Hello, Call hello you tomorrow?'],\n lowercase=True, max_df=1.0, max_features=None, min_df=1,\n ngram_range=(1, 1), preprocessor=None, stop_words=None,\n strip_accents=None, token_pattern='(?u)\\\\b\\\\w\\\\w+\\\\b',\n tokenizer=None, vocabulary=None)\n"
]
],
[
[
">>**Instructions:**\nFit your document dataset to the CountVectorizer object you have created using fit(), and get the list of words \nwhich have been categorized as features using the get_feature_names() method.",
"_____no_output_____"
]
],
[
[
"'''\nSolution:\n'''\n# No need to revise this code\ncount_vector.fit(documents)\ncount_vector.get_feature_names()",
"_____no_output_____"
]
],
[
[
"The `get_feature_names()` method returns our feature names for this dataset, which is the set of words that make up our vocabulary for 'documents'.",
"_____no_output_____"
],
[
">>**Instructions:**\nCreate a matrix with each row representing one of the 4 documents, and each column representing a word (feature name). \nEach value in the matrix will represent the frequency of the word in that column occurring in the particular document in that row. \nYou can do this using the transform() method of CountVectorizer, passing in the document data set as the argument. The transform() method returns a matrix of NumPy integers, which you can convert to an array using\ntoarray(). Call the array 'doc_array'.\n",
"_____no_output_____"
]
],
[
[
"'''\nSolution\n'''\ndoc_array = [d for d in documents]\ndoc_array",
"_____no_output_____"
]
],
[
[
"Now we have a clean representation of the documents in terms of the frequency distribution of the words in them. To make it easier to understand our next step is to convert this array into a dataframe and name the columns appropriately.",
"_____no_output_____"
],
[
">>**Instructions:**\nConvert the 'doc_array' we created into a dataframe, with the column names as the words (feature names). Call the dataframe 'frequency_matrix'.\n",
"_____no_output_____"
]
],
[
[
"'''\nSolution\n'''\nimport pandas as pd\nfrequency_matrix = pd.DataFrame(doc_array)\nfrequency_matrix",
"_____no_output_____"
]
],
[
[
"Congratulations! You have successfully implemented a Bag of Words problem for a document dataset that we created. \n\nOne potential issue that can arise from using this method is that if our dataset of text is extremely large (say if we have a large collection of news articles or email data), there will be certain values that are more common than others simply due to the structure of the language itself. For example, words like 'is', 'the', 'an', pronouns, grammatical constructs, etc., could skew our matrix and affect our analyis. \n\nThere are a couple of ways to mitigate this. One way is to use the `stop_words` parameter and set its value to `english`. This will automatically ignore all the words in our input text that are found in a built-in list of English stop words in scikit-learn.\n\nAnother way of mitigating this is by using the [tfidf](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.TfidfVectorizer.html#sklearn.feature_extraction.text.TfidfVectorizer) method. This method is out of scope for the context of this lesson.",
"_____no_output_____"
],
[
"### Step 3.1: Training and testing sets ###\n\nNow that we understand how to use the Bag of Words approach, we can return to our original, larger UCI dataset and proceed with our analysis. Our first step is to split our dataset into a training set and a testing set so we can first train, and then test our model. ",
"_____no_output_____"
],
[
"\n>>**Instructions:**\nSplit the dataset into a training and testing set using the train_test_split method in sklearn, and print out the number of rows we have in each of our training and testing data. Split the data\nusing the following variables:\n* `X_train` is our training data for the 'sms_message' column.\n* `y_train` is our training data for the 'label' column\n* `X_test` is our testing data for the 'sms_message' column.\n* `y_test` is our testing data for the 'label' column. \n",
"_____no_output_____"
]
],
[
[
"'''\nSolution \n'''\n# split into training and testing sets\n\nfrom sklearn.model_selection import train_test_split\n\nX_train, X_test, y_train, y_test = train_test_split(df['sms_message'], \n df['label'], \n random_state=1)\n\nprint('Number of rows in the total set: {}'.format(df.shape[0]))\nprint('Number of rows in the training set: {}'.format(X_train.shape[0]))\nprint('Number of rows in the test set: {}'.format(X_test.shape[0]))",
"_____no_output_____"
]
],
[
[
"### Step 3.2: Applying Bag of Words processing to our dataset. ###\n\nNow that we have split the data, our next objective is to follow the steps from \"Step 2: Bag of Words,\" and convert our data into the desired matrix format. To do this we will be using CountVectorizer() as we did before. There are two steps to consider here:\n\n* First, we have to fit our training data (`X_train`) into `CountVectorizer()` and return the matrix.\n* Secondly, we have to transform our testing data (`X_test`) to return the matrix. \n\nNote that `X_train` is our training data for the 'sms_message' column in our dataset and we will be using this to train our model. \n\n`X_test` is our testing data for the 'sms_message' column and this is the data we will be using (after transformation to a matrix) to make predictions on. We will then compare those predictions with `y_test` in a later step. \n\nFor now, we have provided the code that does the matrix transformations for you!",
"_____no_output_____"
]
],
[
[
"'''\n[Practice Node]\n\nThe code for this segment is in 2 parts. First, we are learning a vocabulary dictionary for the training data \nand then transforming the data into a document-term matrix; secondly, for the testing data we are only \ntransforming the data into a document-term matrix.\n\nThis is similar to the process we followed in Step 2.3.\n\nWe will provide the transformed data to students in the variables 'training_data' and 'testing_data'.\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\n# Instantiate the CountVectorizer method\ncount_vector = CountVectorizer()\n\n# Fit the training data and then return the matrix\ntraining_data = count_vector.fit_transform(X_train)\n\n# Transform testing data and return the matrix. Note we are not fitting the testing data into the CountVectorizer()\ntesting_data = count_vector.transform(X_test)",
"_____no_output_____"
]
],
[
[
"### Step 4.1: Bayes Theorem implementation from scratch ###\n\nNow that we have our dataset in the format that we need, we can move onto the next portion of our mission which is the algorithm we will use to make our predictions to classify a message as spam or not spam. Remember that at the start of the mission we briefly discussed the Bayes theorem but now we shall go into a little more detail. In layman's terms, the Bayes theorem calculates the probability of an event occurring, based on certain other probabilities that are related to the event in question. It is composed of \"prior probabilities\" - or just \"priors.\" These \"priors\" are the probabilities that we are aware of, or that are given to us. And Bayes theorem is also composed of the \"posterior probabilities,\" or just \"posteriors,\" which are the probabilities we are looking to compute using the \"priors\". \n\nLet us implement the Bayes Theorem from scratch using a simple example. Let's say we are trying to find the odds of an individual having diabetes, given that he or she was tested for it and got a positive result. \nIn the medical field, such probabilities play a very important role as they often deal with life and death situations. \n\nWe assume the following:\n\n`P(D)` is the probability of a person having Diabetes. Its value is `0.01`, or in other words, 1% of the general population has diabetes (disclaimer: these values are assumptions and are not reflective of any actual medical study).\n\n`P(Pos)` is the probability of getting a positive test result.\n\n`P(Neg)` is the probability of getting a negative test result.\n\n`P(Pos|D)` is the probability of getting a positive result on a test done for detecting diabetes, given that you have diabetes. This has a value `0.9`. In other words the test is correct 90% of the time. This is also called the Sensitivity or True Positive Rate.\n\n`P(Neg|~D)` is the probability of getting a negative result on a test done for detecting diabetes, given that you do not have diabetes. This also has a value of `0.9` and is therefore correct, 90% of the time. This is also called the Specificity or True Negative Rate.\n\nThe Bayes formula is as follows:\n\n<img src=\"images/bayes_formula.png\" height=\"242\" width=\"242\">\n\n* `P(A)` is the prior probability of A occurring independently. In our example this is `P(D)`. This value is given to us.\n\n* `P(B)` is the prior probability of B occurring independently. In our example this is `P(Pos)`.\n\n* `P(A|B)` is the posterior probability that A occurs given B. In our example this is `P(D|Pos)`. That is, **the probability of an individual having diabetes, given that this individual got a positive test result. This is the value that we are looking to calculate.**\n\n* `P(B|A)` is the prior probability of B occurring, given A. In our example this is `P(Pos|D)`. This value is given to us.",
"_____no_output_____"
],
[
"Putting our values into the formula for Bayes theorem we get:\n\n`P(D|Pos) = P(D) * P(Pos|D) / P(Pos)`\n\nThe probability of getting a positive test result `P(Pos)` can be calculated using the Sensitivity and Specificity as follows:\n\n`P(Pos) = [P(D) * Sensitivity] + [P(~D) * (1-Specificity))]`",
"_____no_output_____"
]
],
[
[
"'''\nInstructions:\nCalculate probability of getting a positive test result, P(Pos)\n'''",
"_____no_output_____"
],
[
"'''\nSolution (skeleton code will be provided)\n'''\n# P(D)\np_diabetes = 0.01\n\n# P(~D)\np_no_diabetes = 0.99\n\n# Sensitivity or P(Pos|D)\np_pos_diabetes = 0.9\n\n# Specificity or P(Neg|~D)\np_neg_no_diabetes = 0.9\n\n# P(Pos)\np_pos = # TODO\nprint('The probability of getting a positive test result P(Pos) is: {}',format(p_pos))",
"_____no_output_____"
]
],
[
[
"**Using all of this information we can calculate our posteriors as follows:**\n \nThe probability of an individual having diabetes, given that, that individual got a positive test result:\n\n`P(D|Pos) = (P(D) * Sensitivity)) / P(Pos)`\n\nThe probability of an individual not having diabetes, given that, that individual got a positive test result:\n\n`P(~D|Pos) = (P(~D) * (1-Specificity)) / P(Pos)`\n\nThe sum of our posteriors will always equal `1`. ",
"_____no_output_____"
]
],
[
[
"'''\nInstructions:\nCompute the probability of an individual having diabetes, given that, that individual got a positive test result.\nIn other words, compute P(D|Pos).\n\nThe formula is: P(D|Pos) = (P(D) * P(Pos|D) / P(Pos)\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\n# P(D|Pos)\np_diabetes_pos = # TODO\nprint('Probability of an individual having diabetes, given that that individual got a positive test result is:\\\n',format(p_diabetes_pos)) ",
"_____no_output_____"
],
[
"'''\nInstructions:\nCompute the probability of an individual not having diabetes, given that, that individual got a positive test result.\nIn other words, compute P(~D|Pos).\n\nThe formula is: P(~D|Pos) = P(~D) * P(Pos|~D) / P(Pos)\n\nNote that P(Pos|~D) can be computed as 1 - P(Neg|~D). \n\nTherefore:\nP(Pos|~D) = p_pos_no_diabetes = 1 - 0.9 = 0.1\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\n# P(Pos|~D)\np_pos_no_diabetes = 0.1\n\n# P(~D|Pos)\np_no_diabetes_pos = # TODO\nprint 'Probability of an individual not having diabetes, given that that individual got a positive test result is:'\\\n,p_no_diabetes_pos",
"_____no_output_____"
]
],
[
[
"Congratulations! You have implemented Bayes Theorem from scratch. Your analysis shows that even if you get a positive test result, there is only an 8.3% chance that you actually have diabetes and a 91.67% chance that you do not have diabetes. This is of course assuming that only 1% of the entire population has diabetes which is only an assumption.",
"_____no_output_____"
],
[
"**What does the term 'Naive' in 'Naive Bayes' mean ?** \n\nThe term 'Naive' in Naive Bayes comes from the fact that the algorithm considers the features that it is using to make the predictions to be independent of each other, which may not always be the case. So in our Diabetes example, we are considering only one feature, that is the test result. Say we added another feature, 'exercise'. Let's say this feature has a binary value of `0` and `1`, where the former signifies that the individual exercises less than or equal to 2 days a week and the latter signifies that the individual exercises greater than or equal to 3 days a week. If we had to use both of these features, namely the test result and the value of the 'exercise' feature, to compute our final probabilities, Bayes' theorem would fail. Naive Bayes' is an extension of Bayes' theorem that assumes that all the features are independent of each other. ",
"_____no_output_____"
],
[
"### Step 4.2: Naive Bayes implementation from scratch ###\n\n",
"_____no_output_____"
],
[
"Now that you have understood the ins and outs of Bayes Theorem, we will extend it to consider cases where we have more than one feature. \n\nLet's say that we have two political parties' candidates, 'Jill Stein' of the Green Party and 'Gary Johnson' of the Libertarian Party and we have the probabilities of each of these candidates saying the words 'freedom', 'immigration' and 'environment' when they give a speech:\n\n* Probability that Jill Stein says 'freedom': 0.1 ---------> `P(F|J)`\n* Probability that Jill Stein says 'immigration': 0.1 -----> `P(I|J)`\n* Probability that Jill Stein says 'environment': 0.8 -----> `P(E|J)`\n\n\n* Probability that Gary Johnson says 'freedom': 0.7 -------> `P(F|G)`\n* Probability that Gary Johnson says 'immigration': 0.2 ---> `P(I|G)`\n* Probability that Gary Johnson says 'environment': 0.1 ---> `P(E|G)`\n\n\nAnd let us also assume that the probability of Jill Stein giving a speech, `P(J)` is `0.5` and the same for Gary Johnson, `P(G) = 0.5`. \n\n\nGiven this, what if we had to find the probabilities of Jill Stein saying the words 'freedom' and 'immigration'? This is where the Naive Bayes' theorem comes into play as we are considering two features, 'freedom' and 'immigration'.\n\nNow we are at a place where we can define the formula for the Naive Bayes' theorem:\n\n<img src=\"images/naivebayes.png\" height=\"342\" width=\"342\">\n\nHere, `y` is the class variable (in our case the name of the candidate) and `x1` through `xn` are the feature vectors (in our case the individual words). The theorem makes the assumption that each of the feature vectors or words (`xi`) are independent of each other.",
"_____no_output_____"
],
[
"To break this down, we have to compute the following posterior probabilities:\n\n* `P(J|F,I)`: Given the words 'freedom' and 'immigration' were said, what's the probability they were said by Jill?\n\n Using the formula and our knowledge of Bayes' theorem, we can compute this as follows: `P(J|F,I)` = `(P(J) * P(F|J) * P(I|J)) / P(F,I)`. Here `P(F,I)` is the probability of the words 'freedom' and 'immigration' being said in a speech.\n \n\n* `P(G|F,I)`: Given the words 'freedom' and 'immigration' were said, what's the probability they were said by Gary?\n \n Using the formula, we can compute this as follows: `P(G|F,I)` = `(P(G) * P(F|G) * P(I|G)) / P(F,I)`",
"_____no_output_____"
]
],
[
[
"'''\nInstructions: Compute the probability of the words 'freedom' and 'immigration' being said in a speech, or\nP(F,I).\n\nThe first step is multiplying the probabilities of Jill Stein giving a speech with her individual \nprobabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_j_text.\n\nThe second step is multiplying the probabilities of Gary Johnson giving a speech with his individual \nprobabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_g_text.\n\nThe third step is to add both of these probabilities and you will get P(F,I).\n'''",
"_____no_output_____"
],
[
"'''\nSolution: Step 1\n'''\n# P(J)\np_j = 0.5\n\n# P(F/J)\np_j_f = 0.1\n\n# P(I/J)\np_j_i = 0.1\n\np_j_text = # TODO\nprint(p_j_text)",
"_____no_output_____"
],
[
"'''\nSolution: Step 2\n'''\n# P(G)\np_g = 0.5\n\n# P(F/G)\np_g_f = 0.7\n\n# P(I/G)\np_g_i = 0.2\n\np_g_text = # TODO\nprint(p_g_text)",
"_____no_output_____"
],
[
"'''\nSolution: Step 3: Compute P(F,I) and store in p_f_i\n'''\np_f_i = # TODO\nprint('Probability of words freedom and immigration being said are: ', format(p_f_i))",
"_____no_output_____"
]
],
[
[
"Now we can compute the probability of `P(J|F,I)`, the probability of Jill Stein saying the words 'freedom' and 'immigration' and `P(G|F,I)`, the probability of Gary Johnson saying the words 'freedom' and 'immigration'.",
"_____no_output_____"
]
],
[
[
"'''\nInstructions:\nCompute P(J|F,I) using the formula P(J|F,I) = (P(J) * P(F|J) * P(I|J)) / P(F,I) and store it in a variable p_j_fi\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\np_j_fi = # TODO\nprint('The probability of Jill Stein saying the words Freedom and Immigration: ', format(p_j_fi))",
"_____no_output_____"
],
[
"'''\nInstructions:\nCompute P(G|F,I) using the formula P(G|F,I) = (P(G) * P(F|G) * P(I|G)) / P(F,I) and store it in a variable p_g_fi\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\np_g_fi = # TODO\nprint('The probability of Gary Johnson saying the words Freedom and Immigration: ', format(p_g_fi))",
"_____no_output_____"
]
],
[
[
"And as we can see, just like in the Bayes' theorem case, the sum of our posteriors is equal to 1. \n\nCongratulations! You have implemented the Naive Bayes' theorem from scratch. Our analysis shows that there is only a 6.6% chance that Jill Stein of the Green Party uses the words 'freedom' and 'immigration' in her speech as compared with the 93.3% chance for Gary Johnson of the Libertarian party.",
"_____no_output_____"
],
[
"For another example of Naive Bayes, let's consider searching for images using the term 'Sacramento Kings' in a search engine. In order for us to get the results pertaining to the Scramento Kings NBA basketball team, the search engine needs to be able to associate the two words together and not treat them individually. If the search engine only searched for the words individually, we would get results of images tagged with 'Sacramento,' like pictures of city landscapes, and images of 'Kings,' which might be pictures of crowns or kings from history. But associating the two terms together would produce images of the basketball team. In the first approach we would treat the words as independent entities, so it would be considered 'naive.' We don't usually want this approach from a search engine, but it can be extremely useful in other cases. \n\n\nApplying this to our problem of classifying messages as spam, the Naive Bayes algorithm *looks at each word individually and not as associated entities* with any kind of link between them. In the case of spam detectors, this usually works, as there are certain red flag words in an email which are highly reliable in classifying it as spam. For example, emails with words like 'viagra' are usually classified as spam.",
"_____no_output_____"
],
[
"### Step 5: Naive Bayes implementation using scikit-learn ###\n\nNow let's return to our spam classification context. Thankfully, sklearn has several Naive Bayes implementations that we can use, so we do not have to do the math from scratch. We will be using sklearn's `sklearn.naive_bayes` method to make predictions on our SMS messages dataset. \n\nSpecifically, we will be using the multinomial Naive Bayes algorithm. This particular classifier is suitable for classification with discrete features (such as in our case, word counts for text classification). It takes in integer word counts as its input. On the other hand, Gaussian Naive Bayes is better suited for continuous data as it assumes that the input data has a Gaussian (normal) distribution.",
"_____no_output_____"
]
],
[
[
"'''\nInstructions:\n\nWe have loaded the training data into the variable 'training_data' and the testing data into the \nvariable 'testing_data'.\n\nImport the MultinomialNB classifier and fit the training data into the classifier using fit(). Name your classifier\n'naive_bayes'. You will be training the classifier using 'training_data' and 'y_train' from our split earlier. \n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\nfrom sklearn.naive_bayes import MultinomialNB\nnaive_bayes = # TODO\nnaive_bayes.fit(# TODO)",
"_____no_output_____"
],
[
"'''\nInstructions:\nNow that our algorithm has been trained using the training data set we can now make some predictions on the test data\nstored in 'testing_data' using predict(). Save your predictions into the 'predictions' variable.\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\npredictions = naive_bayes.predict(# TODO)",
"_____no_output_____"
]
],
[
[
"Now that predictions have been made on our test set, we need to check the accuracy of our predictions.",
"_____no_output_____"
],
[
"### Step 6: Evaluating our model ###\n\nNow that we have made predictions on our test set, our next goal is to evaluate how well our model is doing. There are various mechanisms for doing so, so first let's review them.\n\n**Accuracy** measures how often the classifier makes the correct prediction. It’s the ratio of the number of correct predictions to the total number of predictions (the number of test data points).\n\n**Precision** tells us what proportion of messages we classified as spam, actually were spam.\nIt is a ratio of true positives (words classified as spam, and which actually are spam) to all positives (all words classified as spam, regardless of whether that was the correct classification). In other words, precision is the ratio of\n\n`[True Positives/(True Positives + False Positives)]`\n\n**Recall (sensitivity)** tells us what proportion of messages that actually were spam were classified by us as spam.\nIt is a ratio of true positives (words classified as spam, and which actually are spam) to all the words that were actually spam. In other words, recall is the ratio of\n\n`[True Positives/(True Positives + False Negatives)]`\n\nFor classification problems that are skewed in their classification distributions like in our case - for example if we had 100 text messages and only 2 were spam and the other 98 weren't - accuracy by itself is not a very good metric. We could classify 90 messages as not spam (including the 2 that were spam but we classify them as not spam, hence they would be false negatives) and 10 as spam (all 10 false positives) and still get a reasonably good accuracy score. For such cases, precision and recall come in very handy. These two metrics can be combined to get the **F1 score**, which is the weighted average of the precision and recall scores. This score can range from 0 to 1, with 1 being the best possible F1 score.",
"_____no_output_____"
],
[
"We will be using all 4 of these metrics to make sure our model does well. For all 4 metrics whose values can range from 0 to 1, having a score as close to 1 as possible is a good indicator of how well our model is doing.",
"_____no_output_____"
]
],
[
[
"'''\nInstructions:\nCompute the accuracy, precision, recall and F1 scores of your model using your test data 'y_test' and the predictions\nyou made earlier stored in the 'predictions' variable.\n'''",
"_____no_output_____"
],
[
"'''\nSolution\n'''\nfrom sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score\nprint('Accuracy score: ', format(accuracy_score(# TODO)))\nprint('Precision score: ', format(precision_score(# TODO)))\nprint('Recall score: ', format(recall_score(# TODO)))\nprint('F1 score: ', format(f1_score(# TODO)))",
"_____no_output_____"
]
],
[
[
"### Step 7: Conclusion ###\n\nOne of the major advantages that Naive Bayes has over other classification algorithms is its ability to handle an extremely large number of features. In our case, each word is treated as a feature and there are thousands of different words. Also, it performs well even with the presence of irrelevant features and is relatively unaffected by them. The other major advantage it has is its relative simplicity. Naive Bayes' works well right out of the box and tuning its parameters is rarely ever necessary, except usually in cases where the distribution of the data is known. \nIt rarely ever overfits the data. Another important advantage is that its model training and prediction times are very fast for the amount of data it can handle. All in all, Naive Bayes' really is a gem of an algorithm!\n\nCongratulations! You have successfully designed a model that can efficiently predict if an SMS message is spam or not!\n\nThank you for learning with us!",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
d09a66e1e2f7cbdd0009e0bb18331d0368ee290e | 1,978 | ipynb | Jupyter Notebook | examples/Datashader_Example.ipynb | lnijhawan/ipyradiant | d804e9031ef39c1ea75fedd52d110302c065ad84 | [
"BSD-3-Clause"
] | null | null | null | examples/Datashader_Example.ipynb | lnijhawan/ipyradiant | d804e9031ef39c1ea75fedd52d110302c065ad84 | [
"BSD-3-Clause"
] | null | null | null | examples/Datashader_Example.ipynb | lnijhawan/ipyradiant | d804e9031ef39c1ea75fedd52d110302c065ad84 | [
"BSD-3-Clause"
] | null | null | null | 25.358974 | 97 | 0.569262 | [
[
[
"# Visualizing Large Graphs with Datashader\n\n`DatashaderVisualizer` is capable of responsively showing very large graphs, but is less\ninteractive than the [CytoscapeVisualizer](./Cytoscape_Example.ipynb).",
"_____no_output_____"
]
],
[
[
"import ipywidgets as W\nimport traitlets as T\nfrom rdflib import Graph\n\nfrom ipyradiant import DatashaderVisualizer, LayoutSelector",
"_____no_output_____"
]
],
[
[
"Here a `DatashaderVisualizer` is linked to a to show largest dataset from the\n[example data](./data/README.md). The `LayoutSelector` changes the layout algorithm\nused. A small `HTML` widget also reports which nodes are selected. Try exploring the\nvarious tools offered in the toolbar.",
"_____no_output_____"
]
],
[
[
"g = Graph().parse(\"data/tree.jsonld\", format=\"json-ld\")\nds = DatashaderVisualizer(graph=g)\nls = LayoutSelector(vis=ds)\nsn = W.HTML()\nT.dlink(\n (ds, \"selected_nodes\"),\n (sn, \"value\"),\n lambda n: \"Selected Nodes: <pre>{}</pre>\".format(\"\\n\".join(sorted(n))),\n)\nds_ex = W.HBox([ds, W.VBox([ls, sn])])\nds_ex",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09a833df1a2df9f82b40bac11662a1a424add50 | 175,441 | ipynb | Jupyter Notebook | notebooks/AutoCompose.ipynb | prajwalcr/AutoCompose | 8a3744494ba18b6bc6f63d06abff0a64c29c1d5b | [
"MIT"
] | 3 | 2021-06-16T05:12:47.000Z | 2022-02-16T08:57:28.000Z | notebooks/AutoCompose.ipynb | prajwalcr/AutoCompose | 8a3744494ba18b6bc6f63d06abff0a64c29c1d5b | [
"MIT"
] | null | null | null | notebooks/AutoCompose.ipynb | prajwalcr/AutoCompose | 8a3744494ba18b6bc6f63d06abff0a64c29c1d5b | [
"MIT"
] | null | null | null | 54.825313 | 39,962 | 0.618704 | [
[
[
"# Setup",
"_____no_output_____"
],
[
"### Installing Dependencies and Mounting",
"_____no_output_____"
]
],
[
[
"%%capture\n!pip install transformers",
"_____no_output_____"
],
[
"# Mount Google Drive\nfrom google.colab import drive # import drive from google colab\n\nROOT = \"/content/drive\" \ndrive.mount(ROOT, force_remount=True) ",
"Mounted at /content/drive\n"
]
],
[
[
"### Imports",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n% matplotlib inline\n\nimport random\nimport json\nimport time\nimport datetime\nimport os\n\nfrom transformers import GPT2Tokenizer, GPT2LMHeadModel, GPT2Config, AdamW, get_linear_schedule_with_warmup",
"_____no_output_____"
],
[
"import torch\ntorch.manual_seed(64)\nfrom torch.utils.data import Dataset, random_split, DataLoader, RandomSampler, SequentialSampler",
"_____no_output_____"
],
[
"!pip show torch",
"Name: torch\nVersion: 1.8.1+cu101\nSummary: Tensors and Dynamic neural networks in Python with strong GPU acceleration\nHome-page: https://pytorch.org/\nAuthor: PyTorch Team\nAuthor-email: [email protected]\nLicense: BSD-3\nLocation: /usr/local/lib/python3.7/dist-packages\nRequires: numpy, typing-extensions\nRequired-by: torchvision, torchtext, fastai\n"
]
],
[
[
"### Setting Device",
"_____no_output_____"
]
],
[
[
"%cd /content/drive/MyDrive/AutoCompose/",
"/content/drive/MyDrive/AutoCompose\n"
],
[
"!nvidia-smi",
"Fri May 28 04:53:31 2021 \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 465.19.01 Driver Version: 460.32.03 CUDA Version: 11.2 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n| N/A 41C P0 27W / 70W | 15086MiB / 15109MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n"
],
[
"device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\ndevice",
"_____no_output_____"
]
],
[
[
"# Data Preparation",
"_____no_output_____"
],
[
"### Data Collection",
"_____no_output_____"
]
],
[
[
"with open(\"data/anticipation.json\", \"r\") as f:\n data = json.load(f)\ndata = [poem for poem in data if len(poem[\"poem\"].split()) < 100]\nprint(len(data))\ndata[:5]",
"25070\n"
]
],
[
[
"### Data Model",
"_____no_output_____"
]
],
[
[
"class PoemDataset(Dataset):\n def __init__(self, poems, tokenizer, max_length=768, gpt2_type=\"gpt2\"):\n self.tokenizer = tokenizer\n self.input_ids = []\n self.attn_masks = []\n\n for poem in poems:\n\n encodings_dict = tokenizer(\"<|startoftext|>\"+poem[\"poem\"]+\"<|endoftext|>\",\n truncation=True,\n max_length=max_length,\n padding=\"max_length\")\n \n self.input_ids.append(torch.tensor(encodings_dict[\"input_ids\"]))\n self.attn_masks.append(torch.tensor(encodings_dict[\"attention_mask\"]))\n\n def __len__(self):\n return len(self.input_ids)\n\n def __getitem__(self, idx):\n return self.input_ids[idx], self.attn_masks[idx]",
"_____no_output_____"
],
[
"# Loading GPT2 Tokenizer\ntokenizer = GPT2Tokenizer.from_pretrained('gpt2', \n bos_token='<|startoftext|>', \n eos_token='<|endoftext|>', \n pad_token='<|pad|>')",
"_____no_output_____"
]
],
[
[
"### Rough",
"_____no_output_____"
]
],
[
[
"print(tokenizer.encode(\"<|startoftext|> Hello World <|endoftext|>\", padding=\"max_length\", max_length=10))\nprint(len(tokenizer))",
"[50257, 18435, 2159, 220, 50256, 50258, 50258, 50258, 50258, 50258]\n50259\n"
],
[
"# Finding length of maximum token in dataset\nmax_length = max([len(tokenizer.encode(poem[\"poem\"])) for poem in data])\nprint(max_length)\nmax_length = 100",
"345\n"
],
[
"x = [len(tokenizer.encode(poem[\"poem\"])) for poem in data if len(tokenizer.encode(poem[\"poem\"])) < 100]",
"_____no_output_____"
],
[
"y = [len(tokenizer.encode(poem[\"poem\"])) - len(poem[\"poem\"].split()) for poem in data]",
"_____no_output_____"
],
[
"print(sum(y)/len(y))",
"_____no_output_____"
],
[
"print(max(x), len(x))\nplt.hist(x, bins = 5)\nplt.show",
"1967 382741\n"
]
],
[
[
"### Dataset Creation",
"_____no_output_____"
]
],
[
[
"batch_size = 32\nmax_length = 100",
"_____no_output_____"
],
[
"dataset = PoemDataset(data, tokenizer, max_length=max_length)\n\n# Split data into train and validation sets\ntrain_size = int(0.9*len(dataset))\nval_size = len(dataset) - train_size\n\ntrain_dataset, val_dataset = random_split(dataset, [train_size, val_size])\nprint(\"Number of samples for training =\", train_size)\nprint(\"Number of samples for validation =\", val_size)",
"Number of samples for training = 22563\nNumber of samples for validation = 2507\n"
],
[
"train_dataset[0]",
"_____no_output_____"
],
[
"train_dataloader = DataLoader(train_dataset,\n sampler=RandomSampler(train_dataset),\n batch_size=batch_size)\n\nval_dataloader = DataLoader(val_dataset,\n sampler=SequentialSampler(val_dataset),\n batch_size=batch_size)",
"_____no_output_____"
]
],
[
[
"# Finetune GPT2 Language Model",
"_____no_output_____"
],
[
"### Importing Pre-Trained GPT2 Model",
"_____no_output_____"
]
],
[
[
"# Load model configuration\nconfig = GPT2Config.from_pretrained(\"gpt2\")\n\n# Create model instance and set embedding length\nmodel = GPT2LMHeadModel.from_pretrained(\"gpt2\", config=config)\nmodel.resize_token_embeddings(len(tokenizer))\n\n# Running the model on GPU\nmodel = model.to(device)",
"_____no_output_____"
],
[
"# <<< Optional >>>\n# Setting seeds to enable reproducible runs\nseed_val = 42\n\nrandom.seed(seed_val)\nnp.random.seed(seed_val)\ntorch.manual_seed(seed_val)\ntorch.cuda.manual_seed_all(seed_val)",
"_____no_output_____"
]
],
[
[
"### Scheduling Optimizer",
"_____no_output_____"
]
],
[
[
"epochs = 4\nwarmup_steps = 1e2\nsample_every = 100",
"_____no_output_____"
],
[
"print(len(train_dataloader))\nprint(len(train_dataset))",
"706\n22563\n"
],
[
"# Using AdamW optimizer with default parameters\noptimizer = AdamW(model.parameters(), lr=5e-4, eps=1e-8)\n\n# Toatl training steps is the number of data points times the number of epochs\ntotal_training_steps = len(train_dataloader)*epochs\n\n# Setting a variable learning rate using scheduler\nscheduler = get_linear_schedule_with_warmup(optimizer,\n num_warmup_steps=warmup_steps,\n num_training_steps=total_training_steps)",
"_____no_output_____"
]
],
[
[
"### Training ",
"_____no_output_____"
]
],
[
[
"def format_time(elapsed):\n return str(datetime.timedelta(seconds=int(round(elapsed))))",
"_____no_output_____"
],
[
"total_t0 = time.time()\n\ntraining_stats = []\n\nmodel = model.to(device)\n\nfor epoch_i in range(epochs):\n print(f'Beginning epoch {epoch_i+1} of {epochs}')\n\n t0 = time.time()\n total_train_loss = 0\n model.train()\n\n # Labels are shifted by 1 timestep\n for step, batch in enumerate(train_dataloader):\n b_input_ids = batch[0].to(device)\n b_labels = batch[0].to(device)\n b_masks = batch[1].to(device)\n\n model.zero_grad()\n\n outputs = model(b_input_ids,\n labels=b_labels,\n attention_mask=b_masks)\n \n loss = outputs[0]\n\n batch_loss = loss.item()\n total_train_loss += batch_loss\n\n # Sampling every x steps\n if step != 0 and step % sample_every == 0:\n\n elapsed = format_time(time.time()-t0)\n print(f'Batch {step} of {len(train_dataloader)}. Loss: {batch_loss}. Time: {elapsed}')\n\n model.eval()\n\n sample_outputs = model.generate(\n bos_token_id=random.randint(1,30000),\n do_sample=True, \n top_k=50, \n max_length = 200,\n top_p=0.95, \n num_return_sequences=1\n )\n for i, sample_output in enumerate(sample_outputs):\n print(f'Example ouput: {tokenizer.decode(sample_output, skip_special_tokens=True)}')\n print()\n\n model.train()\n\n loss.backward()\n optimizer.step()\n scheduler.step()\n\n avg_train_loss = total_train_loss / len(train_dataloader)\n training_time = format_time(time.time()-t0)\n print(f'Average Training Loss: {avg_train_loss}. Epoch time: {training_time}')\n print()\n\n t0 = time.time()\n model.eval()\n\n total_eval_loss = 0\n nb_eval_steps = 0\n\n for batch in val_dataloader:\n b_input_ids = batch[0].to(device)\n b_labels = batch[0].to(device)\n b_masks = batch[1].to(device)\n \n with torch.no_grad(): \n\n outputs = model(b_input_ids, \n attention_mask = b_masks,\n labels=b_labels)\n \n loss = outputs[0] \n \n batch_loss = loss.item()\n total_eval_loss += batch_loss \n\n avg_val_loss = total_eval_loss / len(val_dataloader) \n val_time = format_time(time.time() - t0) \n print(f'Validation loss: {avg_val_loss}. Validation Time: {val_time}')\n print()\n\n # Record all statistics from this epoch.\n training_stats.append(\n {\n 'epoch': epoch_i + 1,\n 'Training Loss': avg_train_loss,\n 'Valid. Loss': avg_val_loss,\n 'Training Time': training_time,\n 'Validation Time': val_time\n }\n )\n print(\"------------------------------\")\n\nprint(f'Total training took {format_time(time.time()-total_t0)}')",
"Beginning epoch 1 of 4\n"
]
],
[
[
"### Visualizations",
"_____no_output_____"
]
],
[
[
"pd.set_option('precision', 2)\ndf_stats = pd.DataFrame(data=training_stats)\ndf_stats = df_stats.set_index('epoch')\n\n# Use plot styling from seaborn.\nsns.set(style='darkgrid')\n\n# Increase the plot size and font size.\nsns.set(font_scale=1.5)\nplt.rcParams[\"figure.figsize\"] = (12,6)\n\n# Plot the learning curve.\nplt.plot(df_stats['Training Loss'], 'b-o', label=\"Training\")\nplt.plot(df_stats['Valid. Loss'], 'g-o', label=\"Validation\")\n\n# Label the plot.\nplt.title(\"Training & Validation Loss\")\nplt.xlabel(\"Epoch\")\nplt.ylabel(\"Loss\")\nplt.legend()\nplt.xticks([1, 2, 3, 4])\n\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Generate Poems",
"_____no_output_____"
]
],
[
[
"model.eval()\n\nprompt = \"<|startoftext|>\"\n\ngenerated = torch.tensor(tokenizer.encode(prompt)).unsqueeze(0)\ngenerated = generated.to(device)\n\nsample_outputs = model.generate(\n generated, \n do_sample=True, \n top_k=50, \n max_length = 300,\n top_p=0.95, \n num_return_sequences=3\n )\n\nfor i, sample_output in enumerate(sample_outputs):\n print(\"{}: {}\\n\\n\".format(i, tokenizer.decode(sample_output, skip_special_tokens=True)))",
"Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n"
]
],
[
[
"### Saving and Loading Finetuned Model",
"_____no_output_____"
]
],
[
[
"output_dir = \"/content/drive/My Drive/AutoCompose/models/anticipation2\"\n\n# Save generated poems\n# sample_outputs = model.generate(\n# generated, \n# do_sample=True, \n# top_k=50, \n# max_length = 300,\n# top_p=0.95, \n# num_return_sequences=25\n# )\n\n# with open(os.path.join(output_dir, 'generated_poems.txt'), \"w\") as outfile:\n# for i, sample_output in enumerate(sample_outputs):\n# outfile.write(tokenizer.decode(sample_output, skip_special_tokens=True)+\"\\n\\n\")\n\n# Save a trained model, configuration and tokenizer using `save_pretrained()`.\n# They can then be reloaded using `from_pretrained()`\nmodel_to_save = model.module if hasattr(model, 'module') else model\nmodel_to_save.save_pretrained(output_dir)\ntokenizer.save_pretrained(output_dir)\n\n# Good practice: save your training arguments together with the trained model\n# torch.save(training_stats, os.path.join(output_dir, 'training_args.bin'))",
"_____no_output_____"
],
[
"# Save generated poems\nsample_outputs = model.generate(\n generated, \n do_sample=True, \n top_k=50, \n max_length = 300,\n top_p=0.95, \n num_return_sequences=25\n )\n\nwith open(os.path.join(output_dir, 'generated_poems.txt'), \"w\") as outfile:\n for i, sample_output in enumerate(sample_outputs):\n outfile.write(tokenizer.decode(sample_output, skip_special_tokens=True)+\"\\n\\n\")",
"Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n"
],
[
"# Loading saved model\nmodel_dir = \"/content/drive/My Drive/AutoCompose/models/neutral\"\n\nmodel = GPT2LMHeadModel.from_pretrained(model_dir)\ntokenizer = GPT2Tokenizer.from_pretrained(model_dir)\nmodel.to(device)",
"_____no_output_____"
]
],
[
[
"# Version Control",
"_____no_output_____"
]
],
[
[
"!git config --global user.email \"[email protected]\"\n!git config --global user.name \"prajwal\"",
"_____no_output_____"
],
[
"import json\nf = open(\"AutoComposeCreds.json\")\ndata = json.load(f)\nf.close()\nprint(data)\nusername=\"prajwalcr\"\nrepository=\"AutoCompose\"\ngit_token = data[\"git-token\"]",
"{'git-token': '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'}\n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\n"
],
[
"!git clone https://{git_token}@github.com/{username}/{repository}",
"Cloning into 'AutoCompose'...\nremote: Enumerating objects: 7, done.\u001b[K\nremote: Counting objects: 100% (7/7), done.\u001b[K\nremote: Compressing objects: 100% (6/6), done.\u001b[K\nremote: Total 7 (delta 1), reused 3 (delta 0), pack-reused 0\u001b[K\nUnpacking objects: 100% (7/7), done.\n"
],
[
"%cd /content/drive/MyDrive/AutoCompose/",
"/content/drive/MyDrive/AutoCompose\n"
],
[
"!git pull",
"remote: Enumerating objects: 7, done.\u001b[K\nremote: Counting objects: 14% (1/7)\u001b[K\rremote: Counting objects: 28% (2/7)\u001b[K\rremote: Counting objects: 42% (3/7)\u001b[K\rremote: Counting objects: 57% (4/7)\u001b[K\rremote: Counting objects: 71% (5/7)\u001b[K\rremote: Counting objects: 85% (6/7)\u001b[K\rremote: Counting objects: 100% (7/7)\u001b[K\rremote: Counting objects: 100% (7/7), done.\u001b[K\nremote: Compressing objects: 25% (1/4)\u001b[K\rremote: Compressing objects: 50% (2/4)\u001b[K\rremote: Compressing objects: 75% (3/4)\u001b[K\rremote: Compressing objects: 100% (4/4)\u001b[K\rremote: Compressing objects: 100% (4/4), done.\u001b[K\nremote: Total 4 (delta 2), reused 0 (delta 0), pack-reused 0\u001b[K\nUnpacking objects: 25% (1/4) \rUnpacking objects: 50% (2/4) \rUnpacking objects: 75% (3/4) \rUnpacking objects: 100% (4/4) \rUnpacking objects: 100% (4/4), done.\nFrom https://github.com/prajwalcr/AutoCompose\n 0c2f765..1c1e5d5 main -> origin/main\nUpdating 0c2f765..1c1e5d5\nFast-forward\n models/neutral/training_args.bin | Bin \u001b[31m751\u001b[m -> \u001b[32m0\u001b[m bytes\n 1 file changed, 0 insertions(+), 0 deletions(-)\n delete mode 100644 models/neutral/training_args.bin\n"
],
[
"!git push",
"Counting objects: 1 \rCounting objects: 9, done.\nDelta compression using up to 2 threads.\nCompressing objects: 11% (1/9) \rCompressing objects: 22% (2/9) \rCompressing objects: 33% (3/9) \rCompressing objects: 44% (4/9) \rCompressing objects: 55% (5/9) \rCompressing objects: 66% (6/9) \rCompressing objects: 77% (7/9) \rCompressing objects: 88% (8/9) \rCompressing objects: 100% (9/9) \rCompressing objects: 100% (9/9), done.\nWriting objects: 11% (1/9) \rWriting objects: 22% (2/9) \rWriting objects: 33% (3/9) \rWriting objects: 55% (5/9) \rWriting objects: 66% (6/9) \rWriting objects: 77% (7/9) \rWriting objects: 88% (8/9) \rWriting objects: 100% (9/9) \rWriting objects: 100% (9/9), 7.89 KiB | 1.97 MiB/s, done.\nTotal 9 (delta 4), reused 0 (delta 0)\nremote: Resolving deltas: 0% (0/4)\u001b[K\rremote: Resolving deltas: 25% (1/4)\u001b[K\rremote: Resolving deltas: 50% (2/4)\u001b[K\rremote: Resolving deltas: 75% (3/4)\u001b[K\rremote: Resolving deltas: 100% (4/4)\u001b[K\rremote: Resolving deltas: 100% (4/4), completed with 2 local objects.\u001b[K\nTo https://github.com/prajwalcr/AutoCompose\n 20659d9..1dd1ced main -> main\n"
],
[
"!git add .\n!git commit -m \"anger model trained on uni-m dataset added\"",
"[main 400edf4] anger model trained on uni-m dataset added\n 7 files changed, 50042 insertions(+)\n create mode 100644 models/added_tokens.json\n create mode 100644 models/config.json\n create mode 100644 models/merges.txt\n create mode 100644 models/pytorch_model.bin\n create mode 100644 models/special_tokens_map.json\n create mode 100644 models/tokenizer_config.json\n create mode 100644 models/vocab.json\n"
],
[
"!git filter-branch --tree-filter 'rm -rf models/' HEAD",
"Rewrite 400edf470dbe08bda7da9d66626990063bc9d770 (7/7) (24 seconds passed, remaining 0 predicted) \nRef 'refs/heads/main' was rewritten\n"
],
[
"!git add .",
"_____no_output_____"
],
[
"!git status",
"On branch main\nYour branch is up to date with 'origin/main'.\n\nChanges to be committed:\n (use \"git reset HEAD <file>...\" to unstage)\n\n\t\u001b[32mnew file: models/anticipation2/added_tokens.json\u001b[m\n\t\u001b[32mnew file: models/anticipation2/config.json\u001b[m\n\t\u001b[32mnew file: models/anticipation2/generated_poems.txt\u001b[m\n\t\u001b[32mnew file: models/anticipation2/merges.txt\u001b[m\n\t\u001b[32mnew file: models/anticipation2/special_tokens_map.json\u001b[m\n\t\u001b[32mnew file: models/anticipation2/tokenizer_config.json\u001b[m\n\t\u001b[32mnew file: models/anticipation2/vocab.json\u001b[m\n\t\u001b[32mnew file: models/disgust2/added_tokens.json\u001b[m\n\t\u001b[32mnew file: models/disgust2/config.json\u001b[m\n\t\u001b[32mnew file: models/disgust2/generated_poems.txt\u001b[m\n\t\u001b[32mnew file: models/disgust2/merges.txt\u001b[m\n\t\u001b[32mnew file: models/disgust2/special_tokens_map.json\u001b[m\n\t\u001b[32mnew file: models/disgust2/tokenizer_config.json\u001b[m\n\t\u001b[32mnew file: models/disgust2/vocab.json\u001b[m\n\t\u001b[32mnew file: models/surprise2/added_tokens.json\u001b[m\n\t\u001b[32mnew file: models/surprise2/config.json\u001b[m\n\t\u001b[32mnew file: models/surprise2/generated_poems.txt\u001b[m\n\t\u001b[32mnew file: models/surprise2/merges.txt\u001b[m\n\t\u001b[32mnew file: models/surprise2/special_tokens_map.json\u001b[m\n\t\u001b[32mnew file: models/surprise2/tokenizer_config.json\u001b[m\n\t\u001b[32mnew file: models/surprise2/vocab.json\u001b[m\n\n"
],
[
"!git commit -m \"new models added\"",
"[main 1dd1ced] new models added\n 21 files changed, 150599 insertions(+)\n create mode 100644 models/anticipation2/added_tokens.json\n create mode 100644 models/anticipation2/config.json\n create mode 100644 models/anticipation2/generated_poems.txt\n create mode 100644 models/anticipation2/merges.txt\n create mode 100644 models/anticipation2/special_tokens_map.json\n create mode 100644 models/anticipation2/tokenizer_config.json\n create mode 100644 models/anticipation2/vocab.json\n create mode 100644 models/disgust2/added_tokens.json\n create mode 100644 models/disgust2/config.json\n create mode 100644 models/disgust2/generated_poems.txt\n create mode 100644 models/disgust2/merges.txt\n create mode 100644 models/disgust2/special_tokens_map.json\n create mode 100644 models/disgust2/tokenizer_config.json\n create mode 100644 models/disgust2/vocab.json\n create mode 100644 models/surprise2/added_tokens.json\n create mode 100644 models/surprise2/config.json\n create mode 100644 models/surprise2/generated_poems.txt\n create mode 100644 models/surprise2/merges.txt\n create mode 100644 models/surprise2/special_tokens_map.json\n create mode 100644 models/surprise2/tokenizer_config.json\n create mode 100644 models/surprise2/vocab.json\n"
],
[
"",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09a8cda1a83a7a80451324b6d771b7753f474ac | 754,140 | ipynb | Jupyter Notebook | 2. Define the Network Architecture.ipynb | SyedaZainabAkhtar/Facial-Keypoint-Detection | 361b40fca78e85f9edcaabfcc3de4cb4de9eeab8 | [
"MIT"
] | 1 | 2020-05-07T23:29:49.000Z | 2020-05-07T23:29:49.000Z | 2. Define the Network Architecture.ipynb | SyedaZainabAkhtar/Facial-Keypoint-Detection | 361b40fca78e85f9edcaabfcc3de4cb4de9eeab8 | [
"MIT"
] | null | null | null | 2. Define the Network Architecture.ipynb | SyedaZainabAkhtar/Facial-Keypoint-Detection | 361b40fca78e85f9edcaabfcc3de4cb4de9eeab8 | [
"MIT"
] | null | null | null | 502.090546 | 160,704 | 0.93627 | [
[
[
"## Define the Convolutional Neural Network\n\nIn this notebook and in `models.py`:\n1. Define a CNN with images as input and keypoints as output\n2. Construct the transformed FaceKeypointsDataset, just as before\n3. Train the CNN on the training data, tracking loss\n4. See how the trained model performs on test data\n5. If necessary, modify the CNN structure and model hyperparameters, so that it performs *well* **\\***\n\n**\\*** What does *well* mean?\n\n\"Well\" means that the model's loss decreases during training **and**, when applied to test image data, the model produces keypoints that closely match the true keypoints of each face. And you'll see examples of this later in the notebook.\n\n---\n",
"_____no_output_____"
],
[
"## CNN Architecture\n\nRecall that CNN's are defined by a few types of layers:\n* Convolutional layers\n* Maxpooling layers\n* Fully-connected layers\n\n### Define model in the provided file `models.py` file\n",
"_____no_output_____"
],
[
"## PyTorch Neural Nets\n\nTo define a neural network in PyTorch, we have defined the layers of a model in the function `__init__` and defined the feedforward behavior of a network that employs those initialized layers in the function `forward`, which takes in an input image tensor, `x`. The structure of this Net class is shown below and left for you to fill in.\n\nNote: During training, PyTorch will be able to perform backpropagation by keeping track of the network's feedforward behavior and using autograd to calculate the update to the weights in the network.\n\n#### Define the Layers in ` __init__`\nAs a reminder, a conv/pool layer may be defined like this (in `__init__`):\n```\n# 1 input image channel (for grayscale images), 32 output channels/feature maps, 3x3 square convolution kernel\nself.conv1 = nn.Conv2d(1, 32, 3)\n\n# maxpool that uses a square window of kernel_size=2, stride=2\nself.pool = nn.MaxPool2d(2, 2) \n```\n\n#### Refer to Layers in `forward`\nThen referred to in the `forward` function like this, in which the conv1 layer has a ReLu activation applied to it before maxpooling is applied:\n```\nx = self.pool(F.relu(self.conv1(x)))\n```\n\nBest practice is to place any layers whose weights will change during the training process in `__init__` and refer to them in the `forward` function; any layers or functions that always behave in the same way, such as a pre-defined activation function, should appear *only* in the `forward` function.",
"_____no_output_____"
],
[
"#### Why models.py\n\nWe are tasked with defining the network in the `models.py` file so that any models we define can be saved and loaded by name in different notebooks in this project directory. For example, by defining a CNN class called `Net` in `models.py`, we can then create that same architecture in this and other notebooks by simply importing the class and instantiating a model:\n```\n from models import Net\n net = Net()\n```",
"_____no_output_____"
]
],
[
[
"# load the data if you need to; if you have already loaded the data, you may comment this cell out\n# -- DO NOT CHANGE THIS CELL -- #\n!mkdir /data\n!wget -P /data/ https://s3.amazonaws.com/video.udacity-data.com/topher/2018/May/5aea1b91_train-test-data/train-test-data.zip\n!unzip -n /data/train-test-data.zip -d /data",
"mkdir: cannot create directory ‘/data’: File exists\n--2019-02-24 07:21:17-- https://s3.amazonaws.com/video.udacity-data.com/topher/2018/May/5aea1b91_train-test-data/train-test-data.zip\nResolving s3.amazonaws.com (s3.amazonaws.com)... 52.216.176.157\nConnecting to s3.amazonaws.com (s3.amazonaws.com)|52.216.176.157|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 338613624 (323M) [application/zip]\nSaving to: ‘/data/train-test-data.zip.1’\n\ntrain-test-data.zip 100%[===================>] 322.93M 93.8MB/s in 3.6s \n\n2019-02-24 07:21:21 (89.3 MB/s) - ‘/data/train-test-data.zip.1’ saved [338613624/338613624]\n\nArchive: /data/train-test-data.zip\n"
]
],
[
[
"<div class=\"alert alert-info\">**Note:** Workspaces automatically close connections after 30 minutes of inactivity (including inactivity while training!). Use the code snippet below to keep your workspace alive during training. (The active_session context manager is imported below.)\n</div>\n```\nfrom workspace_utils import active_session\n\nwith active_session():\n train_model(num_epochs)\n```\n",
"_____no_output_____"
]
],
[
[
"# import the usual resources\nimport matplotlib.pyplot as plt\nimport numpy as np\n\n# import utilities to keep workspaces alive during model training\nfrom workspace_utils import active_session\n\n# watch for any changes in model.py, if it changes, re-load it automatically\n%load_ext autoreload\n%autoreload 2",
"_____no_output_____"
],
[
"## Define the Net in models.py\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n## Once you've define the network, you can instantiate it\n# one example conv layer has been provided for you\nfrom models import Net\n\nnet = Net()\nprint(net)",
"Net(\n (conv1): Conv2d(1, 32, kernel_size=(5, 5), stride=(1, 1))\n (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n (conv2): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1))\n (conv3): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1))\n (conv3_bn): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n (conv4): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1))\n (conv5): Conv2d(256, 512, kernel_size=(1, 1), stride=(1, 1))\n (fc1): Linear(in_features=18432, out_features=1024, bias=True)\n (fc2): Linear(in_features=1024, out_features=512, bias=True)\n (fc3): Linear(in_features=512, out_features=136, bias=True)\n (dropout): Dropout(p=0.25)\n)\n"
]
],
[
[
"## Transform the dataset \n\nTo prepare for training, we have created a transformed dataset of images and keypoints.\n\n### Define a data transform\n\nIn PyTorch, a convolutional neural network expects a torch image of a consistent size as input. For efficient training, and so our model's loss does not blow up during training, it is also suggested that we normalize the input images and keypoints. The necessary transforms have been defined in `data_load.py` and we **do not** need to modify these.\n\nTo define the data transform below, we have used a [composition](http://pytorch.org/tutorials/beginner/data_loading_tutorial.html#compose-transforms) of:\n1. Rescaling and/or cropping the data, such that we are left with a square image (the suggested size is 224x224px)\n2. Normalizing the images and keypoints; turning each RGB image into a grayscale image with a color range of [0, 1] and transforming the given keypoints into a range of [-1, 1]\n3. Turning these images and keypoints into Tensors\n\n**This transform will be applied to the training data and, later, the test data**. It will change how we go about displaying these images and keypoints, but these steps are essential for efficient training.\n",
"_____no_output_____"
]
],
[
[
"from torch.utils.data import Dataset, DataLoader\nfrom torchvision import transforms, utils\n\n# the dataset we created in Notebook 1 is copied in the helper file `data_load.py`\nfrom data_load import FacialKeypointsDataset\n# the transforms we defined in Notebook 1 are in the helper file `data_load.py`\nfrom data_load import Rescale, RandomCrop, Normalize, ToTensor\n\n\n## define the data_transform using transforms.Compose([all tx's, . , .])\n# order matters! i.e. rescaling should come before a smaller crop\ndata_transform = transforms.Compose([Rescale(250),\n RandomCrop(224),\n Normalize(),\n ToTensor()])\n\n# testing that you've defined a transform\nassert(data_transform is not None), 'Define a data_transform'",
"_____no_output_____"
],
[
"# create the transformed dataset\ntransformed_dataset = FacialKeypointsDataset(csv_file='/data/training_frames_keypoints.csv',\n root_dir='/data/training/',\n transform=data_transform)\n\n\nprint('Number of images: ', len(transformed_dataset))\n\n# iterate through the transformed dataset and print some stats about the first few samples\nfor i in range(4):\n sample = transformed_dataset[i]\n print(i, sample['image'].size(), sample['keypoints'].size())",
"Number of images: 3462\n0 torch.Size([1, 224, 224]) torch.Size([68, 2])\n1 torch.Size([1, 224, 224]) torch.Size([68, 2])\n2 torch.Size([1, 224, 224]) torch.Size([68, 2])\n3 torch.Size([1, 224, 224]) torch.Size([68, 2])\n"
]
],
[
[
"## Batching and loading data\n\nNext, having defined the transformed dataset, we can use PyTorch's DataLoader class to load the training data in batches of whatever size as well as to shuffle the data for training the model. You can read more about the parameters of the DataLoader in [this documentation](http://pytorch.org/docs/master/data.html).\n\n#### Batch size\nDecide on a good batch size for training your model. Try both small and large batch sizes and note how the loss decreases as the model trains. Too large a batch size may cause your model to crash and/or run out of memory while training.\n\n**Note for Windows users**: Please change the `num_workers` to 0 or you may face some issues with your DataLoader failing.",
"_____no_output_____"
]
],
[
[
"# load training data in batches\nbatch_size = 10\n\ntrain_loader = DataLoader(transformed_dataset, \n batch_size=batch_size,\n shuffle=True, \n num_workers=4)\n",
"_____no_output_____"
]
],
[
[
"## Before training\n\nTake a look at how this model performs before it trains. You should see that the keypoints it predicts start off in one spot and don't match the keypoints on a face at all! It's interesting to visualize this behavior so that you can compare it to the model after training and see how the model has improved.\n\n#### Load in the test dataset\n\nThe test dataset is one that this model has *not* seen before, meaning it has not trained with these images. We'll load in this test data and before and after training, see how our model performs on this set!\n\nTo visualize this test data, we have to go through some un-transformation steps to turn our images into python images from tensors and to turn our keypoints back into a recognizable range. ",
"_____no_output_____"
]
],
[
[
"# load in the test data, using the dataset class\n# AND apply the data_transform you defined above\n\n# create the test dataset\ntest_dataset = FacialKeypointsDataset(csv_file='/data/test_frames_keypoints.csv',\n root_dir='/data/test/',\n transform=data_transform)\n\n",
"_____no_output_____"
],
[
"# load test data in batches\nbatch_size = 10\n\ntest_loader = DataLoader(test_dataset, \n batch_size=batch_size,\n shuffle=True, \n num_workers=4)",
"_____no_output_____"
]
],
[
[
"## Apply the model on a test sample\n\nTo test the model on a test sample of data, we have to follow these steps:\n1. Extract the image and ground truth keypoints from a sample\n2. Wrap the image in a Variable, so that the net can process it as input and track how it changes as the image moves through the network.\n3. Make sure the image is a FloatTensor, which the model expects.\n4. Forward pass the image through the net to get the predicted, output keypoints.\n\nThis function test how the network performs on the first batch of test data. It returns the images, the transformed images, the predicted keypoints (produced by the model), and the ground truth keypoints.",
"_____no_output_____"
]
],
[
[
"# test the model on a batch of test images\n\ndef net_sample_output():\n \n # iterate through the test dataset\n for i, sample in enumerate(test_loader):\n \n # get sample data: images and ground truth keypoints\n images = sample['image']\n key_pts = sample['keypoints']\n\n # convert images to FloatTensors\n images = images.type(torch.FloatTensor)\n\n # forward pass to get net output\n output_pts = net(images)\n \n # reshape to batch_size x 68 x 2 pts\n output_pts = output_pts.view(output_pts.size()[0], 68, -1)\n \n # break after first image is tested\n if i == 0:\n return images, output_pts, key_pts\n ",
"_____no_output_____"
]
],
[
[
"#### Debugging tips\n\nIf you get a size or dimension error here, make sure that your network outputs the expected number of keypoints! Or if you get a Tensor type error, look into changing the above code that casts the data into float types: `images = images.type(torch.FloatTensor)`.",
"_____no_output_____"
]
],
[
[
"# call the above function\n# returns: test images, test predicted keypoints, test ground truth keypoints\ntest_images, test_outputs, gt_pts = net_sample_output()\n\n# print out the dimensions of the data to see if they make sense\nprint(test_images.data.size())\nprint(test_outputs.data.size())\nprint(gt_pts.size())",
"torch.Size([10, 1, 224, 224])\ntorch.Size([10, 68, 2])\ntorch.Size([10, 68, 2])\n"
]
],
[
[
"## Visualize the predicted keypoints\n\nOnce we've had the model produce some predicted output keypoints, we can visualize these points in a way that's similar to how we've displayed this data before, only this time, we have to \"un-transform\" the image/keypoint data to display it.\n\nThe *new* function, `show_all_keypoints` displays a grayscale image, its predicted keypoints and its ground truth keypoints (if provided).",
"_____no_output_____"
]
],
[
[
"def show_all_keypoints(image, predicted_key_pts, gt_pts=None):\n \"\"\"Show image with predicted keypoints\"\"\"\n \n # image is grayscale\n plt.imshow(image, cmap='gray')\n plt.scatter(predicted_key_pts[:, 0], predicted_key_pts[:, 1], s=20, marker='.', c='m')\n # plot ground truth points as green pts\n if gt_pts is not None:\n plt.scatter(gt_pts[:, 0], gt_pts[:, 1], s=20, marker='.', c='g')\n",
"_____no_output_____"
]
],
[
[
"#### Un-transformation\n\nNext, you'll see a helper function. `visualize_output` that takes in a batch of images, predicted keypoints, and ground truth keypoints and displays a set of those images and their true/predicted keypoints.\n\nThis function's main role is to take batches of image and keypoint data (the input and output of your CNN), and transform them into numpy images and un-normalized keypoints (x, y) for normal display. The un-transformation process turns keypoints and images into numpy arrays from Tensors *and* it undoes the keypoint normalization done in the Normalize() transform; it's assumed that you applied these transformations when you loaded your test data.",
"_____no_output_____"
]
],
[
[
"# visualize the output\n# by default this shows a batch of 10 images\ndef visualize_output(test_images, test_outputs, gt_pts=None, batch_size=10):\n\n for i in range(batch_size):\n plt.figure(figsize=(20,10))\n ax = plt.subplot(1, batch_size, i+1)\n\n # un-transform the image data\n image = test_images[i].data # get the image from it's Variable wrapper\n image = image.numpy() # convert to numpy array from a Tensor\n image = np.transpose(image, (1, 2, 0)) # transpose to go from torch to numpy image\n\n # un-transform the predicted key_pts data\n predicted_key_pts = test_outputs[i].data\n predicted_key_pts = predicted_key_pts.numpy()\n # undo normalization of keypoints \n predicted_key_pts = predicted_key_pts*50.0+100\n \n # plot ground truth points for comparison, if they exist\n ground_truth_pts = None\n if gt_pts is not None:\n ground_truth_pts = gt_pts[i] \n ground_truth_pts = ground_truth_pts*50.0+100\n \n # call show_all_keypoints\n show_all_keypoints(np.squeeze(image), predicted_key_pts, ground_truth_pts)\n \n plt.axis('off')\n\n plt.show()\n \n# call it\nvisualize_output(test_images, test_outputs, gt_pts)",
"_____no_output_____"
]
],
[
[
"## Training\n\n#### Loss function\nTraining a network to predict keypoints is different than training a network to predict a class; instead of outputting a distribution of classes and using cross entropy loss, we have to choose a loss function that is suited for regression, which directly compares a predicted value and target value. Read about the various kinds of loss functions (like MSE or L1/SmoothL1 loss) in [this documentation](http://pytorch.org/docs/master/_modules/torch/nn/modules/loss.html).\n\n### Define the loss and optimization\n\nNext, we will define how the model will train by deciding on the loss function and optimizer.\n\n---",
"_____no_output_____"
]
],
[
[
"## Define the loss and optimization\nimport torch.optim as optim\n\ncriterion = nn.MSELoss()\noptimizer = optim.Adam(net.parameters(), lr = 0.001)\n",
"_____no_output_____"
]
],
[
[
"## Training and Initial Observation\n\nNow, we will train on our batched training data from `train_loader` for a number of epochs. ",
"_____no_output_____"
]
],
[
[
"def train_net(n_epochs):\n\n # prepare the net for training\n net.train()\n training_loss = []\n\n for epoch in range(n_epochs): # loop over the dataset multiple times\n \n running_loss = 0.0\n\n # train on batches of data, assumes you already have train_loader\n for batch_i, data in enumerate(train_loader):\n # get the input images and their corresponding labels\n images = data['image']\n key_pts = data['keypoints']\n\n # flatten pts\n key_pts = key_pts.view(key_pts.size(0), -1)\n\n # convert variables to floats for regression loss\n key_pts = key_pts.type(torch.FloatTensor)\n images = images.type(torch.FloatTensor)\n\n # forward pass to get outputs\n output_pts = net(images)\n\n # calculate the loss between predicted and target keypoints\n loss = criterion(output_pts, key_pts)\n\n # zero the parameter (weight) gradients\n optimizer.zero_grad()\n \n # backward pass to calculate the weight gradients\n loss.backward()\n\n # update the weights\n optimizer.step()\n\n # print loss statistics\n running_loss += loss.item()\n if batch_i % 10 == 9: # print every 10 batches\n print('Epoch: {}, Batch: {}, Avg. Loss: {}'.format(epoch + 1, batch_i+1, running_loss/10))\n running_loss = 0.0\n training_loss.append(running_loss)\n\n print('Finished Training')\n return training_loss\n",
"_____no_output_____"
],
[
"# train your network\nn_epochs = 10 # start small, and increase when you've decided on your model structure and hyperparams\n\n# this is a Workspaces-specific context manager to keep the connection\n# alive while training your model, not part of pytorch\nwith active_session():\n training_loss = train_net(n_epochs)\n",
"Epoch: 1, Batch: 10, Avg. Loss: 0.44416433572769165\nEpoch: 1, Batch: 20, Avg. Loss: 0.2175885707139969\nEpoch: 1, Batch: 30, Avg. Loss: 0.21557604521512985\nEpoch: 1, Batch: 40, Avg. Loss: 0.22412515580654144\nEpoch: 1, Batch: 50, Avg. Loss: 0.22654051929712296\nEpoch: 1, Batch: 60, Avg. Loss: 0.30102653875947\nEpoch: 1, Batch: 70, Avg. Loss: 0.27754531651735304\nEpoch: 1, Batch: 80, Avg. Loss: 0.18360766917467117\nEpoch: 1, Batch: 90, Avg. Loss: 0.19977265149354934\nEpoch: 1, Batch: 100, Avg. Loss: 0.1754001908004284\nEpoch: 1, Batch: 110, Avg. Loss: 0.2066241219639778\nEpoch: 1, Batch: 120, Avg. Loss: 0.2339693248271942\nEpoch: 1, Batch: 130, Avg. Loss: 0.2992959037423134\nEpoch: 1, Batch: 140, Avg. Loss: 0.1668638050556183\nEpoch: 1, Batch: 150, Avg. Loss: 0.204581156373024\nEpoch: 1, Batch: 160, Avg. Loss: 0.1761942133307457\nEpoch: 1, Batch: 170, Avg. Loss: 0.2436549559235573\nEpoch: 1, Batch: 180, Avg. Loss: 0.22114874571561813\nEpoch: 1, Batch: 190, Avg. Loss: 0.21025069504976274\nEpoch: 1, Batch: 200, Avg. Loss: 0.19692750722169877\nEpoch: 1, Batch: 210, Avg. Loss: 0.17479709014296532\nEpoch: 1, Batch: 220, Avg. Loss: 0.16560218408703803\nEpoch: 1, Batch: 230, Avg. Loss: 0.22620025351643563\nEpoch: 1, Batch: 240, Avg. Loss: 0.2600567892193794\nEpoch: 1, Batch: 250, Avg. Loss: 0.19481791257858277\nEpoch: 1, Batch: 260, Avg. Loss: 0.20341289937496185\nEpoch: 1, Batch: 270, Avg. Loss: 0.2502629436552525\nEpoch: 1, Batch: 280, Avg. Loss: 0.31687320619821546\nEpoch: 1, Batch: 290, Avg. Loss: 0.24206747263669967\nEpoch: 1, Batch: 300, Avg. Loss: 0.181122512370348\nEpoch: 1, Batch: 310, Avg. Loss: 0.18619744032621383\nEpoch: 1, Batch: 320, Avg. Loss: 0.19950464963912964\nEpoch: 1, Batch: 330, Avg. Loss: 0.18625058680772782\nEpoch: 1, Batch: 340, Avg. Loss: 0.1919226199388504\nEpoch: 2, Batch: 10, Avg. Loss: 0.17994826808571815\nEpoch: 2, Batch: 20, Avg. Loss: 0.17717353999614716\nEpoch: 2, Batch: 30, Avg. Loss: 0.33826584964990614\nEpoch: 2, Batch: 40, Avg. Loss: 0.18035859763622283\nEpoch: 2, Batch: 50, Avg. Loss: 0.18490905538201333\nEpoch: 2, Batch: 60, Avg. Loss: 0.17429713830351828\nEpoch: 2, Batch: 70, Avg. Loss: 0.16824107691645623\nEpoch: 2, Batch: 80, Avg. Loss: 0.20174753591418265\nEpoch: 2, Batch: 90, Avg. Loss: 0.17901046574115753\nEpoch: 2, Batch: 100, Avg. Loss: 0.2804532080888748\nEpoch: 2, Batch: 110, Avg. Loss: 0.18819820284843444\nEpoch: 2, Batch: 120, Avg. Loss: 0.2395803727209568\nEpoch: 2, Batch: 130, Avg. Loss: 0.1707100823521614\nEpoch: 2, Batch: 140, Avg. Loss: 0.16374161392450332\nEpoch: 2, Batch: 150, Avg. Loss: 0.23562793731689452\nEpoch: 2, Batch: 160, Avg. Loss: 0.1692538984119892\nEpoch: 2, Batch: 170, Avg. Loss: 0.18574669808149338\nEpoch: 2, Batch: 180, Avg. Loss: 0.2092479333281517\nEpoch: 2, Batch: 190, Avg. Loss: 0.20595604106783866\nEpoch: 2, Batch: 200, Avg. Loss: 0.1704283781349659\nEpoch: 2, Batch: 210, Avg. Loss: 0.18333087041974067\nEpoch: 2, Batch: 220, Avg. Loss: 0.2201328806579113\nEpoch: 2, Batch: 230, Avg. Loss: 0.16914209797978402\nEpoch: 2, Batch: 240, Avg. Loss: 0.18277649506926535\nEpoch: 2, Batch: 250, Avg. Loss: 0.14628473967313765\nEpoch: 2, Batch: 260, Avg. Loss: 0.1851978249847889\nEpoch: 2, Batch: 270, Avg. Loss: 0.15777018591761588\nEpoch: 2, Batch: 280, Avg. Loss: 0.18683725073933602\nEpoch: 2, Batch: 290, Avg. Loss: 0.26570322662591933\nEpoch: 2, Batch: 300, Avg. Loss: 0.1489314191043377\nEpoch: 2, Batch: 310, Avg. Loss: 0.16021296083927156\nEpoch: 2, Batch: 320, Avg. Loss: 0.20100241377949715\nEpoch: 2, Batch: 330, Avg. Loss: 0.1482290431857109\nEpoch: 2, Batch: 340, Avg. Loss: 0.15948158055543898\nEpoch: 3, Batch: 10, Avg. Loss: 0.16632870882749556\nEpoch: 3, Batch: 20, Avg. Loss: 0.16252302303910254\nEpoch: 3, Batch: 30, Avg. Loss: 0.1941835440695286\nEpoch: 3, Batch: 40, Avg. Loss: 0.17453592792153358\nEpoch: 3, Batch: 50, Avg. Loss: 0.15712366104125977\nEpoch: 3, Batch: 60, Avg. Loss: 0.19639338552951813\nEpoch: 3, Batch: 70, Avg. Loss: 0.14165712669491767\nEpoch: 3, Batch: 80, Avg. Loss: 0.1641607791185379\nEpoch: 3, Batch: 90, Avg. Loss: 0.22535918951034545\nEpoch: 3, Batch: 100, Avg. Loss: 0.1496770717203617\nEpoch: 3, Batch: 110, Avg. Loss: 0.17060958817601204\nEpoch: 3, Batch: 120, Avg. Loss: 0.15815025568008423\nEpoch: 3, Batch: 130, Avg. Loss: 0.24258503317832947\nEpoch: 3, Batch: 140, Avg. Loss: 0.13050371333956717\nEpoch: 3, Batch: 150, Avg. Loss: 0.15643921196460725\nEpoch: 3, Batch: 160, Avg. Loss: 0.2554766945540905\nEpoch: 3, Batch: 170, Avg. Loss: 0.16950705721974374\nEpoch: 3, Batch: 180, Avg. Loss: 0.18972161114215852\nEpoch: 3, Batch: 190, Avg. Loss: 0.19966743439435958\nEpoch: 3, Batch: 200, Avg. Loss: 0.1554355204105377\nEpoch: 3, Batch: 210, Avg. Loss: 0.2077905237674713\nEpoch: 3, Batch: 220, Avg. Loss: 0.22310657650232316\nEpoch: 3, Batch: 230, Avg. Loss: 0.16497120931744574\nEpoch: 3, Batch: 240, Avg. Loss: 0.12373218983411789\nEpoch: 3, Batch: 250, Avg. Loss: 0.1253780096769333\nEpoch: 3, Batch: 260, Avg. Loss: 0.1158442385494709\nEpoch: 3, Batch: 270, Avg. Loss: 0.17302169278264046\nEpoch: 3, Batch: 280, Avg. Loss: 0.14890095293521882\nEpoch: 3, Batch: 290, Avg. Loss: 0.1600910909473896\nEpoch: 3, Batch: 300, Avg. Loss: 0.17859926261007786\nEpoch: 3, Batch: 310, Avg. Loss: 0.1675376258790493\nEpoch: 3, Batch: 320, Avg. Loss: 0.12146469689905644\nEpoch: 3, Batch: 330, Avg. Loss: 0.17346090376377105\nEpoch: 3, Batch: 340, Avg. Loss: 0.11471948400139809\nEpoch: 4, Batch: 10, Avg. Loss: 0.12982016652822495\nEpoch: 4, Batch: 20, Avg. Loss: 0.1547395683825016\nEpoch: 4, Batch: 30, Avg. Loss: 0.13711554780602456\nEpoch: 4, Batch: 40, Avg. Loss: 0.10902057327330113\nEpoch: 4, Batch: 50, Avg. Loss: 0.1522909879684448\nEpoch: 4, Batch: 60, Avg. Loss: 0.15426650159060956\nEpoch: 4, Batch: 70, Avg. Loss: 0.20043503269553184\nEpoch: 4, Batch: 80, Avg. Loss: 0.15553959980607032\nEpoch: 4, Batch: 90, Avg. Loss: 0.15711386799812316\nEpoch: 4, Batch: 100, Avg. Loss: 0.11917371302843094\nEpoch: 4, Batch: 110, Avg. Loss: 0.16310055255889894\nEpoch: 4, Batch: 120, Avg. Loss: 0.18770732805132867\nEpoch: 4, Batch: 130, Avg. Loss: 0.17742627635598182\nEpoch: 4, Batch: 140, Avg. Loss: 0.13330501392483712\nEpoch: 4, Batch: 150, Avg. Loss: 0.16039124578237535\nEpoch: 4, Batch: 160, Avg. Loss: 0.16864079087972642\nEpoch: 4, Batch: 170, Avg. Loss: 0.17521585449576377\nEpoch: 4, Batch: 180, Avg. Loss: 0.18260546922683715\nEpoch: 4, Batch: 190, Avg. Loss: 0.12056737542152404\nEpoch: 4, Batch: 200, Avg. Loss: 0.15565713196992875\nEpoch: 4, Batch: 210, Avg. Loss: 0.13360178768634795\nEpoch: 4, Batch: 220, Avg. Loss: 0.11777338460087776\nEpoch: 4, Batch: 230, Avg. Loss: 0.1321411430835724\nEpoch: 4, Batch: 240, Avg. Loss: 0.13344260156154633\nEpoch: 4, Batch: 250, Avg. Loss: 0.17048801556229592\nEpoch: 4, Batch: 260, Avg. Loss: 0.13590663447976112\nEpoch: 4, Batch: 270, Avg. Loss: 0.1297525607049465\nEpoch: 4, Batch: 280, Avg. Loss: 0.12346211075782776\nEpoch: 4, Batch: 290, Avg. Loss: 0.11441868990659713\nEpoch: 4, Batch: 300, Avg. Loss: 0.09622283503413201\nEpoch: 4, Batch: 310, Avg. Loss: 0.10409215800464153\nEpoch: 4, Batch: 320, Avg. Loss: 0.1077029250562191\nEpoch: 4, Batch: 330, Avg. Loss: 0.09445305243134498\nEpoch: 4, Batch: 340, Avg. Loss: 0.14615374356508254\nEpoch: 5, Batch: 10, Avg. Loss: 0.08935939706861973\nEpoch: 5, Batch: 20, Avg. Loss: 0.13090906366705896\nEpoch: 5, Batch: 30, Avg. Loss: 0.12208336554467678\nEpoch: 5, Batch: 40, Avg. Loss: 0.1021390851587057\nEpoch: 5, Batch: 50, Avg. Loss: 0.09983213432133198\nEpoch: 5, Batch: 60, Avg. Loss: 0.09231629967689514\nEpoch: 5, Batch: 70, Avg. Loss: 0.08448658213019371\nEpoch: 5, Batch: 80, Avg. Loss: 0.22084250263869762\nEpoch: 5, Batch: 90, Avg. Loss: 0.08608474209904671\nEpoch: 5, Batch: 100, Avg. Loss: 0.08027125261723995\nEpoch: 5, Batch: 110, Avg. Loss: 0.10804186388850212\nEpoch: 5, Batch: 120, Avg. Loss: 0.08836470693349838\nEpoch: 5, Batch: 130, Avg. Loss: 0.0829862378537655\nEpoch: 5, Batch: 140, Avg. Loss: 0.19752345271408558\nEpoch: 5, Batch: 150, Avg. Loss: 0.08963002376258374\nEpoch: 5, Batch: 160, Avg. Loss: 0.06960084065794944\nEpoch: 5, Batch: 170, Avg. Loss: 0.08151629604399205\nEpoch: 5, Batch: 180, Avg. Loss: 0.07939450107514859\nEpoch: 5, Batch: 190, Avg. Loss: 0.0987637486308813\nEpoch: 5, Batch: 200, Avg. Loss: 0.13194181062281132\nEpoch: 5, Batch: 210, Avg. Loss: 0.07572487816214561\nEpoch: 5, Batch: 220, Avg. Loss: 0.07940045520663261\nEpoch: 5, Batch: 230, Avg. Loss: 0.073162255063653\nEpoch: 5, Batch: 240, Avg. Loss: 0.10245590042322875\nEpoch: 5, Batch: 250, Avg. Loss: 0.07136505246162414\nEpoch: 5, Batch: 260, Avg. Loss: 0.07238047756254673\nEpoch: 5, Batch: 270, Avg. Loss: 0.09554972760379314\nEpoch: 5, Batch: 280, Avg. Loss: 0.0861436627805233\nEpoch: 5, Batch: 290, Avg. Loss: 0.07735931761562824\nEpoch: 5, Batch: 300, Avg. Loss: 0.07247286401689053\nEpoch: 5, Batch: 310, Avg. Loss: 0.08516214117407798\nEpoch: 5, Batch: 320, Avg. Loss: 0.061742888763546944\nEpoch: 5, Batch: 330, Avg. Loss: 0.20301748774945735\nEpoch: 5, Batch: 340, Avg. Loss: 0.08581218272447586\nEpoch: 6, Batch: 10, Avg. Loss: 0.08547550290822983\nEpoch: 6, Batch: 20, Avg. Loss: 0.06933667249977589\nEpoch: 6, Batch: 30, Avg. Loss: 0.0979898788034916\nEpoch: 6, Batch: 40, Avg. Loss: 0.06936869733035564\nEpoch: 6, Batch: 50, Avg. Loss: 0.07842021211981773\nEpoch: 6, Batch: 60, Avg. Loss: 0.13185384795069693\nEpoch: 6, Batch: 70, Avg. Loss: 0.2263725183904171\nEpoch: 6, Batch: 80, Avg. Loss: 0.10198132544755936\nEpoch: 6, Batch: 90, Avg. Loss: 0.07260950207710266\nEpoch: 6, Batch: 100, Avg. Loss: 0.12453909926116466\nEpoch: 6, Batch: 110, Avg. Loss: 0.07971074897795916\nEpoch: 6, Batch: 120, Avg. Loss: 0.06932028494775296\nEpoch: 6, Batch: 130, Avg. Loss: 0.06447334736585617\nEpoch: 6, Batch: 140, Avg. Loss: 0.07284157425165176\nEpoch: 6, Batch: 150, Avg. Loss: 0.07156029269099236\nEpoch: 6, Batch: 160, Avg. Loss: 0.0627998985350132\nEpoch: 6, Batch: 170, Avg. Loss: 0.07472128756344318\nEpoch: 6, Batch: 180, Avg. Loss: 0.05850897897034883\nEpoch: 6, Batch: 190, Avg. Loss: 0.06482606492936611\nEpoch: 6, Batch: 200, Avg. Loss: 0.08174276128411292\nEpoch: 6, Batch: 210, Avg. Loss: 0.05840686410665512\nEpoch: 6, Batch: 220, Avg. Loss: 0.05247593075037003\nEpoch: 6, Batch: 230, Avg. Loss: 0.060184385254979135\nEpoch: 6, Batch: 240, Avg. Loss: 0.05962736383080482\nEpoch: 6, Batch: 250, Avg. Loss: 0.06160111855715513\nEpoch: 6, Batch: 260, Avg. Loss: 0.07339813001453876\nEpoch: 6, Batch: 270, Avg. Loss: 0.0595314035192132\nEpoch: 6, Batch: 280, Avg. Loss: 0.04898449704051018\nEpoch: 6, Batch: 290, Avg. Loss: 0.06392868589609861\nEpoch: 6, Batch: 300, Avg. Loss: 0.049040567316114905\nEpoch: 6, Batch: 310, Avg. Loss: 0.047439711168408394\nEpoch: 6, Batch: 320, Avg. Loss: 0.059214136376976964\nEpoch: 6, Batch: 330, Avg. Loss: 0.06720991246402264\nEpoch: 6, Batch: 340, Avg. Loss: 0.07145421244204045\nEpoch: 7, Batch: 10, Avg. Loss: 0.05788564998656511\nEpoch: 7, Batch: 20, Avg. Loss: 0.20631268993020058\nEpoch: 7, Batch: 30, Avg. Loss: 0.07600948326289654\nEpoch: 7, Batch: 40, Avg. Loss: 0.12280024103820324\nEpoch: 7, Batch: 50, Avg. Loss: 0.06742164418101311\nEpoch: 7, Batch: 60, Avg. Loss: 0.05840242840349674\nEpoch: 7, Batch: 70, Avg. Loss: 0.07713458314538002\nEpoch: 7, Batch: 80, Avg. Loss: 0.055093659088015556\nEpoch: 7, Batch: 90, Avg. Loss: 0.1322836246341467\nEpoch: 7, Batch: 100, Avg. Loss: 0.06406111344695091\nEpoch: 7, Batch: 110, Avg. Loss: 0.06306700613349676\nEpoch: 7, Batch: 120, Avg. Loss: 0.0490744523704052\nEpoch: 7, Batch: 130, Avg. Loss: 0.08874573763459921\nEpoch: 7, Batch: 140, Avg. Loss: 0.058002734929323195\nEpoch: 7, Batch: 150, Avg. Loss: 0.11608156077563762\nEpoch: 7, Batch: 160, Avg. Loss: 0.04425553195178509\nEpoch: 7, Batch: 170, Avg. Loss: 0.07028151974081993\nEpoch: 7, Batch: 180, Avg. Loss: 0.06388998478651046\nEpoch: 7, Batch: 190, Avg. Loss: 0.08098162673413753\nEpoch: 7, Batch: 200, Avg. Loss: 0.055412362515926364\nEpoch: 7, Batch: 210, Avg. Loss: 0.0635084319859743\nEpoch: 7, Batch: 220, Avg. Loss: 0.046326776035130024\nEpoch: 7, Batch: 230, Avg. Loss: 0.04527541007846594\nEpoch: 7, Batch: 240, Avg. Loss: 0.08100639954209328\nEpoch: 7, Batch: 250, Avg. Loss: 0.0491887180134654\nEpoch: 7, Batch: 260, Avg. Loss: 0.05493013616651297\nEpoch: 7, Batch: 270, Avg. Loss: 0.05660474617034197\nEpoch: 7, Batch: 280, Avg. Loss: 0.054648135975003245\nEpoch: 7, Batch: 290, Avg. Loss: 0.06419778056442738\nEpoch: 7, Batch: 300, Avg. Loss: 0.04939074739813805\nEpoch: 7, Batch: 310, Avg. Loss: 0.05146754141896963\nEpoch: 7, Batch: 320, Avg. Loss: 0.05029594376683235\nEpoch: 7, Batch: 330, Avg. Loss: 0.13098315447568892\nEpoch: 7, Batch: 340, Avg. Loss: 0.053756604343652724\nEpoch: 8, Batch: 10, Avg. Loss: 0.06256329752504826\nEpoch: 8, Batch: 20, Avg. Loss: 0.06301752589643002\nEpoch: 8, Batch: 30, Avg. Loss: 0.06326640360057353\nEpoch: 8, Batch: 40, Avg. Loss: 0.053764683380723\nEpoch: 8, Batch: 50, Avg. Loss: 0.07191067859530449\nEpoch: 8, Batch: 60, Avg. Loss: 0.06096766926348209\nEpoch: 8, Batch: 70, Avg. Loss: 0.054040583223104476\nEpoch: 8, Batch: 80, Avg. Loss: 0.04787160027772188\nEpoch: 8, Batch: 90, Avg. Loss: 0.04983269535005093\nEpoch: 8, Batch: 100, Avg. Loss: 0.047104967571794984\nEpoch: 8, Batch: 110, Avg. Loss: 0.05764889270067215\nEpoch: 8, Batch: 120, Avg. Loss: 0.04960756227374077\nEpoch: 8, Batch: 130, Avg. Loss: 0.05822401233017445\nEpoch: 8, Batch: 140, Avg. Loss: 0.053056230582296846\nEpoch: 8, Batch: 150, Avg. Loss: 0.03711046688258648\nEpoch: 8, Batch: 160, Avg. Loss: 0.14100506529211998\nEpoch: 8, Batch: 170, Avg. Loss: 0.05520296767354012\nEpoch: 8, Batch: 180, Avg. Loss: 0.04987628515809774\nEpoch: 8, Batch: 190, Avg. Loss: 0.06969722341746092\nEpoch: 8, Batch: 200, Avg. Loss: 0.046427170559763906\nEpoch: 8, Batch: 210, Avg. Loss: 0.042093202099204065\nEpoch: 8, Batch: 220, Avg. Loss: 0.04276003763079643\nEpoch: 8, Batch: 230, Avg. Loss: 0.04827338252216577\nEpoch: 8, Batch: 240, Avg. Loss: 0.04494358506053686\nEpoch: 8, Batch: 250, Avg. Loss: 0.06720381304621696\nEpoch: 8, Batch: 260, Avg. Loss: 0.03808741588145494\nEpoch: 8, Batch: 270, Avg. Loss: 0.054081511497497556\nEpoch: 8, Batch: 280, Avg. Loss: 0.044025127217173575\nEpoch: 8, Batch: 290, Avg. Loss: 0.052297753281891346\nEpoch: 8, Batch: 300, Avg. Loss: 0.04692896045744419\nEpoch: 8, Batch: 310, Avg. Loss: 0.05117587186396122\nEpoch: 8, Batch: 320, Avg. Loss: 0.0525355851277709\nEpoch: 8, Batch: 330, Avg. Loss: 0.061768196150660516\nEpoch: 8, Batch: 340, Avg. Loss: 0.11629142723977566\nEpoch: 9, Batch: 10, Avg. Loss: 0.05406637601554394\nEpoch: 9, Batch: 20, Avg. Loss: 0.04990652110427618\nEpoch: 9, Batch: 30, Avg. Loss: 0.05556227192282677\nEpoch: 9, Batch: 40, Avg. Loss: 0.07050047051161527\nEpoch: 9, Batch: 50, Avg. Loss: 0.16893813833594323\nEpoch: 9, Batch: 60, Avg. Loss: 0.04805730860680342\nEpoch: 9, Batch: 70, Avg. Loss: 0.10069886967539787\nEpoch: 9, Batch: 80, Avg. Loss: 0.05574633963406086\nEpoch: 9, Batch: 90, Avg. Loss: 0.09138512425124645\nEpoch: 9, Batch: 100, Avg. Loss: 0.12057317439466715\nEpoch: 9, Batch: 110, Avg. Loss: 0.0892296290025115\nEpoch: 9, Batch: 120, Avg. Loss: 0.05153895430266857\nEpoch: 9, Batch: 130, Avg. Loss: 0.0659572659060359\nEpoch: 9, Batch: 140, Avg. Loss: 0.05675642117857933\nEpoch: 9, Batch: 150, Avg. Loss: 0.045543840155005455\nEpoch: 9, Batch: 160, Avg. Loss: 0.045289041846990584\nEpoch: 9, Batch: 170, Avg. Loss: 0.09967865571379661\nEpoch: 9, Batch: 180, Avg. Loss: 0.04962304662913084\nEpoch: 9, Batch: 190, Avg. Loss: 0.07373892869800329\nEpoch: 9, Batch: 200, Avg. Loss: 0.04314884599298239\nEpoch: 9, Batch: 210, Avg. Loss: 0.06076900400221348\nEpoch: 9, Batch: 220, Avg. Loss: 0.03614503592252731\nEpoch: 9, Batch: 230, Avg. Loss: 0.04103000555187464\nEpoch: 9, Batch: 240, Avg. Loss: 0.04597904328256845\nEpoch: 9, Batch: 250, Avg. Loss: 0.04467903506010771\nEpoch: 9, Batch: 260, Avg. Loss: 0.041640084236860275\nEpoch: 9, Batch: 270, Avg. Loss: 0.042829732224345204\nEpoch: 9, Batch: 280, Avg. Loss: 0.056007545441389084\nEpoch: 9, Batch: 290, Avg. Loss: 0.06265867780894041\nEpoch: 9, Batch: 300, Avg. Loss: 0.04278700426220894\nEpoch: 9, Batch: 310, Avg. Loss: 0.03935463558882475\nEpoch: 9, Batch: 320, Avg. Loss: 0.175035048276186\nEpoch: 9, Batch: 330, Avg. Loss: 0.04416582323610783\nEpoch: 9, Batch: 340, Avg. Loss: 0.0741931725293398\nEpoch: 10, Batch: 10, Avg. Loss: 0.06130882278084755\nEpoch: 10, Batch: 20, Avg. Loss: 0.03637017160654068\nEpoch: 10, Batch: 30, Avg. Loss: 0.03976167161017656\nEpoch: 10, Batch: 40, Avg. Loss: 0.03389114178717136\nEpoch: 10, Batch: 50, Avg. Loss: 0.07417517025023698\nEpoch: 10, Batch: 60, Avg. Loss: 0.04463060200214386\nEpoch: 10, Batch: 70, Avg. Loss: 0.04110721200704574\nEpoch: 10, Batch: 80, Avg. Loss: 0.12185230124741793\nEpoch: 10, Batch: 90, Avg. Loss: 0.044991275295615196\nEpoch: 10, Batch: 100, Avg. Loss: 0.04224203452467919\nEpoch: 10, Batch: 110, Avg. Loss: 0.06171862632036209\nEpoch: 10, Batch: 120, Avg. Loss: 0.03866221886128187\nEpoch: 10, Batch: 130, Avg. Loss: 0.046689947694540025\nEpoch: 10, Batch: 140, Avg. Loss: 0.0504083076491952\nEpoch: 10, Batch: 150, Avg. Loss: 0.035383103974163534\nEpoch: 10, Batch: 160, Avg. Loss: 0.04042493086308241\nEpoch: 10, Batch: 170, Avg. Loss: 0.05173234697431326\nEpoch: 10, Batch: 180, Avg. Loss: 0.03803642988204956\nEpoch: 10, Batch: 190, Avg. Loss: 0.0351900240406394\nEpoch: 10, Batch: 200, Avg. Loss: 0.09490111246705055\nEpoch: 10, Batch: 210, Avg. Loss: 0.03840796370059252\nEpoch: 10, Batch: 220, Avg. Loss: 0.0449936181306839\nEpoch: 10, Batch: 230, Avg. Loss: 0.039249077439308167\nEpoch: 10, Batch: 240, Avg. Loss: 0.040227429009974\nEpoch: 10, Batch: 250, Avg. Loss: 0.04323658086359501\nEpoch: 10, Batch: 260, Avg. Loss: 0.036140587739646436\nEpoch: 10, Batch: 270, Avg. Loss: 0.04543791189789772\nEpoch: 10, Batch: 280, Avg. Loss: 0.037392864003777504\nEpoch: 10, Batch: 290, Avg. Loss: 0.04381084088236094\nEpoch: 10, Batch: 300, Avg. Loss: 0.038170242309570314\nEpoch: 10, Batch: 310, Avg. Loss: 0.07751398384571076\nEpoch: 10, Batch: 320, Avg. Loss: 0.05669640563428402\nEpoch: 10, Batch: 330, Avg. Loss: 0.04008456040173769\nEpoch: 10, Batch: 340, Avg. Loss: 0.0449509609490633\nFinished Training\n"
],
[
"# visualize the loss as the network trained\nplt.figure()\nplt.semilogy(training_loss)\nplt.grid()\nplt.xlabel('Epoch')\nplt.ylabel('Loss');",
"_____no_output_____"
]
],
[
[
"## Test data\n\nSee how the model performs on previously unseen, test data. We've already loaded and transformed this data, similar to the training data. Next, run the trained model on these images to see what kind of keypoints are produced.",
"_____no_output_____"
]
],
[
[
"# get a sample of test data again\ntest_images, test_outputs, gt_pts = net_sample_output()\n\nprint(test_images.data.size())\nprint(test_outputs.data.size())\nprint(gt_pts.size())",
"torch.Size([10, 1, 224, 224])\ntorch.Size([10, 68, 2])\ntorch.Size([10, 68, 2])\n"
],
[
"## visualize test output\n# you can use the same function as before, by un-commenting the line below:\n\nvisualize_output(test_images, test_outputs, gt_pts)\n",
"_____no_output_____"
]
],
[
[
"Once we have found a good model (or two), we have to save the model so we can load it and use it later!",
"_____no_output_____"
]
],
[
[
"## change the name to something uniqe for each new model\nmodel_dir = 'saved_models/'\nmodel_name = 'facial_keypoints_model.pt'\n\n# after training, save your model parameters in the dir 'saved_models'\ntorch.save(net.state_dict(), model_dir+model_name)",
"_____no_output_____"
]
],
[
[
"## Feature Visualization\n\nSometimes, neural networks are thought of as a black box, given some input, they learn to produce some output. CNN's are actually learning to recognize a variety of spatial patterns and you can visualize what each convolutional layer has been trained to recognize by looking at the weights that make up each convolutional kernel and applying those one at a time to a sample image. This technique is called feature visualization and it's useful for understanding the inner workings of a CNN.",
"_____no_output_____"
],
[
"In the cell below, you can see how to extract a single filter (by index) from your first convolutional layer. The filter should appear as a grayscale grid.",
"_____no_output_____"
]
],
[
[
"# Get the weights in the first conv layer, \"conv1\"\n# if necessary, change this to reflect the name of your first conv layer\nweights1 = net.conv1.weight.data\n\nw = weights1.numpy()\n\nfilter_index = 0\n\nprint(w[filter_index][0])\nprint(w[filter_index][0].shape)\n\n# display the filter weights\nplt.imshow(w[filter_index][0], cmap='gray')\n",
"[[ 0.15470788 -0.03103321 0.14474995 -0.09415503 -0.17265566]\n [ 0.25098324 0.1987015 -0.0861486 -0.18626866 0.02080246]\n [ 0.21582745 0.20678866 0.02022225 -0.2662425 -0.10517941]\n [-0.0465669 -0.06400613 0.11120261 -0.18623494 0.01846401]\n [ 0.03995793 0.116187 -0.08362331 -0.1171196 -0.09572858]]\n(5, 5)\n"
]
],
[
[
"## Feature maps\n\nEach CNN has at least one convolutional layer that is composed of stacked filters (also known as convolutional kernels). As a CNN trains, it learns what weights to include in it's convolutional kernels and when these kernels are applied to some input image, they produce a set of **feature maps**. So, feature maps are just sets of filtered images; they are the images produced by applying a convolutional kernel to an input image. These maps show us the features that the different layers of the neural network learn to extract. For example, you might imagine a convolutional kernel that detects the vertical edges of a face or another one that detects the corners of eyes. You can see what kind of features each of these kernels detects by applying them to an image. One such example is shown below; from the way it brings out the lines in an the image, you might characterize this as an edge detection filter.\n\n<img src='images/feature_map_ex.png' width=50% height=50%/>\n\n\nNext, choose a test image and filter it with one of the convolutional kernels in your trained CNN; look at the filtered output to get an idea what that particular kernel detects.\n\n### Filter an image to see the effect of a convolutional kernel\n---",
"_____no_output_____"
]
],
[
[
"## load in and display any image from the transformed test dataset\nimport cv2\n\nimage = cv2.imread('images/mona_lisa.jpg')\n# convert image to grayscale\nimage = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) / 255.0",
"_____no_output_____"
],
[
"## Using cv's filter2D function\nfilter_kernel = np.array([[ 0, 1, 1],\n [-1, 0, 1],\n [-1, -1, 0]])\n\nfiltered_image = cv2.filter2D(image, -1, filter_kernel)\n\nf, (ax1, ax2, ax3) = plt.subplots(ncols=3, nrows=1, figsize=(10, 5))\nax1.imshow(filter_kernel, cmap='gray')\nax2.imshow(image, cmap='gray')\nax3.imshow(filtered_image, cmap='gray')\n\nax1.set_title('Kernel')\nax2.set_title('Orginal Image')\nax3.set_title('Filtered image')\nplt.tight_layout();",
"_____no_output_____"
],
[
"## apply a specific set of filter weights (like the one displayed above) to the test image\nweights = net.conv1.weight.data.numpy()\n\nfilter_kernel = weights[filter_index][0]\nfiltered_image = cv2.filter2D(image, -1, filter_kernel)\n\nf, (ax1, ax2, ax3) = plt.subplots(ncols=3, nrows=1, figsize=(10, 5))\nax1.imshow(filter_kernel, cmap='gray')\nax2.imshow(image, cmap='gray')\nax3.imshow(filtered_image, cmap='gray')\n\nax1.set_title('Kernel')\nax2.set_title('Orginal Image')\nax3.set_title('Filtered image')\nplt.tight_layout();",
"_____no_output_____"
]
],
[
[
"---\n## Moving on!\n\nNow that we have defined and trained the model (and saved the best model), we are ready to move on to the last notebook, which combines a face detector with your saved model to create a facial keypoint detection system that can predict the keypoints on *any* face in an image!",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
]
] |
d09a8db3491cc743bdcf0a22a5f00cc22df3af9d | 14,994 | ipynb | Jupyter Notebook | notebooks/RAC_least_squares.ipynb | jeremydavis-2/Jolanta-by-dvr | 025f7392ffc40c12ede2f07efefd1f2b0dcd8d35 | [
"Apache-2.0"
] | null | null | null | notebooks/RAC_least_squares.ipynb | jeremydavis-2/Jolanta-by-dvr | 025f7392ffc40c12ede2f07efefd1f2b0dcd8d35 | [
"Apache-2.0"
] | null | null | null | notebooks/RAC_least_squares.ipynb | jeremydavis-2/Jolanta-by-dvr | 025f7392ffc40c12ede2f07efefd1f2b0dcd8d35 | [
"Apache-2.0"
] | null | null | null | 29.691089 | 126 | 0.483127 | [
[
[
"import numpy as np\nfrom scipy.optimize import least_squares\n#from pandas import Series, DataFrame\nimport pandas as pd\nimport matplotlib\nimport matplotlib.pyplot as plt\nmatplotlib.use('Qt5Agg')\n%matplotlib qt5\n#\n# if pade.py is not in the current directory, set this path:\n#\n#import sys\n#sys.path.append('../Python_libs')\nfrom rac_aux import *",
"_____no_output_____"
],
[
"Angs2Bohr=1.8897259886\nau2eV=27.211386027\nau2cm=219474.63068\n#\n# files in the current directory do not need the path name\n#\n#df = pd.read_csv(\"/home/thomas/Python/StabPlots/Stab_data/1D_a0.2_b0_c0.14/crossing_1.dat\", delim_whitespace=True)\ndf = pd.read_csv(\"sb_rac.csv\")\n#df = pd.read_csv(\"crossing_1.dat\", delim_whitespace=True)\n\nplot_it=False\nif plot_it:\n plt.cla()\n plt.plot(df.l.values, df.E1.values, 'o-')\n plt.plot(df.l.values, df.E2.values, 'o-')\n plt.plot(df.l.values, df.E3.values, 'o-')\n plt.show()\ndf[:5]",
"_____no_output_____"
],
[
"#\n# put all negative E(lambda) points into the vectors: ls and Es\n#\ni_neg = np.argmin(abs(df.E1.values))\nif df.E1[i_neg] > 0:\n i_neg += 1\nls = df.l.values[i_neg:]\nprint('N=',len(ls))\nEs = df.E1.values[i_neg:]\nif plot_it:\n plt.cla()\n plt.plot(df.l.values, df.E1.values, 'b-')\n plt.plot(df.l.values, df.E2.values, 'b-')\n plt.plot(df.l.values, df.E3.values, 'b-')\n plt.plot(ls, Es, 'o', color=\"orange\")\n plt.show()",
"N= 89\n"
],
[
"#\n# So far, nm can be in [21, 31, 32, 42, 53]\n#\nnm=32\n\nfun=pade_32_lsq\njac=pade_32j_lsq",
"_____no_output_____"
],
[
"#\n# kappas, kappa**2, and sigmas (weights = sigma**2)\n# least_squares() passes parg to each pade_nm function\n#\nk2s = -Es\nks = np.sqrt(k2s)\nsigmas = weights(len(Es), 'ones')\n#sigmas = weights(len(Es), 'energy', E0=Es[11], Es=Es)\nparg=(ks,k2s,ls,sigmas)",
"_____no_output_____"
],
[
"# start params depend on nm\np31_opt = [2.4022, 0.2713, 1.2813, 0.4543]\np42_opt = [2.3919, 0.2964, 1.3187, 1.3736, 0.29655, 0.5078]\n\nE0 = linear_extra(ls,Es)\nG0 = 0.2*E0\nif nm == 21:\n p0s=[ls[0]] + guess(E0, G0)\nelif nm == 31:\n p0s=[ls[0]] + guess(E0, G0) + [10]\nelif nm == 32:\n p0s=[ls[0]] + guess(E0, G0) + [10] + [1]\nelif nm == 42:\n p0s=[ls[0]] + guess(E0, G0) + guess(5*E0,10*G0) + [10]\nelif nm == 53:\n p0s = p42_opt[0:5] + p31_opt[3:] + p42_opt[5:] + [1]\nelse:\n print(\"Warning\", nm, \"not implemented\")\nprint(p0s)\nprint(chi2_gen(p0s, ks, k2s, ls, sigmas, fun))",
"[2.4, 0.2704669890140844, 1.4666963639216455, 10, 1]\n1.0015809965763467\n"
],
[
"#\n# test the derivative of [n,m] \n#\n\nN=6\n\ndf1s = pade_gen_j_lsq(p0s, ks[-N:], k2s[-N:], ls[-N:], sigmas[-N:], fun)\nprint(\"num grad:\\n\", df1s)\n\ndf2s = jac(p0s, ks[-N:], k2s[-N:], ls[-N:], sigmas[-N:])\nprint(\"ana grad:\\n\", df2s)\n\nnp.sqrt(np.sum(np.square(df1s-df2s)))",
"num grad:\n [[ 0.77080867 0.39021477 -2.13292562 0.00635328 -0.06124711]\n [ 0.77901782 0.39225394 -2.15903672 0.00645951 -0.06228522]\n [ 0.78723047 0.39427653 -2.18515483 0.00656635 -0.06332949]\n [ 0.79544659 0.39628284 -2.21127985 0.00667381 -0.0643799 ]\n [ 0.80366612 0.39827313 -2.2374117 0.00678189 -0.06543642]\n [ 0.81188905 0.40024768 -2.26355027 0.00689057 -0.06649903]]\nana grad:\n [[ 0.77080867 0.39021477 -2.13292562 0.00635328 -0.06124711]\n [ 0.77901782 0.39225394 -2.15903672 0.00645951 -0.06228522]\n [ 0.78723047 0.39427653 -2.18515483 0.00656635 -0.06332949]\n [ 0.79544659 0.39628284 -2.21127985 0.00667381 -0.0643799 ]\n [ 0.80366612 0.39827313 -2.2374117 0.00678189 -0.06543642]\n [ 0.81188905 0.40024768 -2.26355027 0.00689057 -0.06649903]]\n"
],
[
"print('Least squares, trust-region-reflective (default) with 2-point jac')\nres = least_squares(fun, p0s, method='trf', jac='2-point', args=parg)\nprint(\"njev:\",res.njev)\nprint(\"cost:\",res.cost)\nprint(\"grad:\",res.grad)\nprint(\"message:\",res.message)\nprint(\"success:\",res.success)\nprint(\"x:\", res.x)\nprint('chi2 = %.3e' % (res.cost*2))\nprint(\"Er=%f, Gamma=%f\" % res_ene(res.x[1], res.x[2]))",
"Least squares, trust-region-reflective (default) with 2-point jac\nnjev: 98\ncost: 3.668166195430488e-08\ngrad: [-2.48509482e-11 -3.05390182e-12 5.03193369e-11 -1.27157436e-11\n 1.97579929e-11]\nmessage: `gtol` termination condition is satisfied.\nsuccess: True\nx: [2.39286302 0.31720262 1.31445718 1.03224749 0.18596322]\nchi2 = 7.336e-08\nEr=1.717674, Gamma=0.529030\n"
],
[
"print('Least squares, trust-region-reflective (default) with analytic jac')\nres = least_squares(fun, p0s, method='trf', jac=jac, args=parg)\nprint(\"njev:\",res.njev)\nprint(\"cost:\",res.cost)\nprint(\"grad:\",res.grad)\nprint(\"message:\",res.message)\nprint(\"success:\",res.success)\nprint(\"x:\", res.x)\nprint('chi2 = %.3e' % (res.cost*2))\nprint(\"Er=%f, Gamma=%f\" % res_ene(res.x[1], res.x[2]))",
"Least squares, trust-region-reflective (default) with analytic jac\nnjev: 98\ncost: 3.6681661954263234e-08\ngrad: [-2.63597878e-11 1.25826368e-11 7.03996035e-11 -1.03581419e-11\n 2.80148887e-11]\nmessage: `gtol` termination condition is satisfied.\nsuccess: True\nx: [2.39286302 0.31720262 1.31445718 1.03224742 0.1859632 ]\nchi2 = 7.336e-08\nEr=1.717674, Gamma=0.529030\n"
],
[
"print('Least squares, Levenberg-Marquardt with analytic jac')\nres = least_squares(fun, p0s, method='lm', jac=jac, args=parg)\nprint(\"njev:\",res.njev)\nprint(\"cost:\",res.cost)\nprint(\"grad:\",res.grad)\nprint(\"message:\",res.message)\nprint(\"success:\",res.success)\nprint(\"x:\", res.x)\nprint('chi2 = %.3e' % (res.cost*2))\nprint(\"Er=%f, Gamma=%f\" % res_ene(res.x[1], res.x[2]))",
"Least squares, Levenberg-Marquardt with analytic jac\nnjev: 92\ncost: 3.668166195416088e-08\ngrad: [-2.77802261e-11 1.32567909e-11 7.41961591e-11 -1.09292853e-11\n 2.95169864e-11]\nmessage: `ftol` termination condition is satisfied.\nsuccess: True\nx: [2.39286302 0.31720262 1.31445718 1.03224742 0.1859632 ]\nchi2 = 7.336e-08\nEr=1.717674, Gamma=0.529030\n"
],
[
"print('Least squares, TRF with bounds')\n\n#\n# bnds depend on the number of parameters\n#\nnpara=len(p0s)\nzs = np.zeros(npara)\ninfs = np.full(npara, np.inf)\nbnds=(zs, infs)\n\nres = least_squares(fun, p0s, jac=jac, bounds=bnds, args=parg)\nprint(\"njev:\",res.njev)\nprint(\"cost:\",res.cost)\nprint(\"grad:\",res.grad)\nprint(\"message:\",res.message)\nprint(\"success:\",res.success)\nprint(\"x:\", res.x)\nprint('chi2 = %.3e' % (res.cost*2))\nprint(\"Er=%f, Gamma=%f\" % res_ene(res.x[1], res.x[2]))",
"Least squares, TRF with bounds\nnjev: 31\ncost: 2.6744702809644768e-06\ngrad: [-8.12658327e-10 -4.76519391e-10 2.22819483e-09 -7.83010650e-12\n 1.58066482e-10]\nmessage: `gtol` termination condition is satisfied.\nsuccess: True\nx: [ 2.37126798 0.27344434 1.33872887 33.01593234 6.70979226]\nchi2 = 5.349e-06\nEr=1.786604, Gamma=0.400397\n"
],
[
"#\n# swipe energy filter \n#\nM=len(Es)\nsigmas = weights(M, 'ones')\nres = least_squares(fun, p0s, method='trf', jac=jac, args=(ks, k2s, ls, sigmas))\nEr, G = res_ene(res.x[1], res.x[2])\nprint('All weights equal: chi2 = %.3e Er=%f, Gamma=%f' % (res.cost*2, Er, G))\nM=len(Es)\nfor n in [0, M//4, M//2, 3*M//4, M-1]:\n sigmas = weights(M, 'energy', E0=Es[n], Es=Es)\n res = least_squares(fun, p0s, method='trf', jac=jac, args=(ks, k2s, ls, sigmas))\n Er, G = res_ene(res.x[1], res.x[2])\n print('Filter E = %6.2f: chi2 = %.3e Er=%f, Gamma=%f' % (Es[n], res.cost*2, Er, G))",
"All weights equal: chi2 = 7.336e-08 Er=1.717674, Gamma=0.529030\nFilter E = -0.00: chi2 = 4.824e-08 Er=1.727520, Gamma=0.521586\nFilter E = -3.30: chi2 = 2.607e-08 Er=1.703503, Gamma=0.510255\nFilter E = -6.81: chi2 = 2.007e-08 Er=1.693914, Gamma=0.501228\nFilter E = -10.43: chi2 = 1.762e-08 Er=1.687185, Gamma=0.492727\nFilter E = -14.11: chi2 = 1.899e-08 Er=1.678523, Gamma=0.477630\n"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09a9a0f71abc188375082f9aff84820f4fe1285 | 3,582 | ipynb | Jupyter Notebook | final_rws.ipynb | andrewmum/Pose-Estimation-TF2 | e14a0133f298064fd9c6a6d17c4a6d47cbbd6302 | [
"Apache-2.0"
] | 1 | 2022-03-25T11:46:05.000Z | 2022-03-25T11:46:05.000Z | final_rws.ipynb | andrewmum/Pose-Estimation-TF2 | e14a0133f298064fd9c6a6d17c4a6d47cbbd6302 | [
"Apache-2.0"
] | null | null | null | final_rws.ipynb | andrewmum/Pose-Estimation-TF2 | e14a0133f298064fd9c6a6d17c4a6d47cbbd6302 | [
"Apache-2.0"
] | null | null | null | 24.875 | 106 | 0.54383 | [
[
[
"import cv2\nimport sys\nimport time\nimport logging\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport pickle\n\nfrom tf_pose import common\nfrom tf_pose.estimator import TfPoseEstimator\nfrom tf_pose.networks import get_graph_path, model_wh\n\n\n# cap equals video capture of this gif\ncap= cv2.VideoCapture('./images/squat.gif')\n# while there are frames\ni=0\nwhile(cap.isOpened()):\n# read frames\n ret, frame = cap.read()\n# if no more frames break\n if ret == False:\n break\n# write (this frame) into gif_spread/kang(i).jpg | frame\n cv2.imwrite('./gif_spread/kang'+str(i)+'.jpg',frame)\n i+=1\ncap.release()\ncv2.destroyAllWindows()",
"_____no_output_____"
],
[
"\n#parameters\nmodel='mobilenet_thin'\nresize='432x368'\nw, h = model_wh(resize)\n\n# creating estimator\ne = TfPoseEstimator(get_graph_path(model), target_size=(w, h))\n\nimage_path = './gif_spread/kang0.jpg'\nimage = cv2.imread(image_path)\nimage = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n# dont need to show\n# plt.imshow(image)\n# plt.grid();",
"_____no_output_____"
],
[
"image = common.read_imgfile(image_path, None, None)\nhumans = e.inference(image, resize_to_default=(w > 0 and h > 0), upsample_size=4.0)\nmax_prob = np.amax(e.heatMat[:, :, :-1], axis=2)\n# dont need to show image\n# plt.imshow(max_prob)\n# plt.grid();",
"_____no_output_____"
],
[
"plt.figure(figsize=(15,8))\nbgimg = cv2.cvtColor(image.astype(np.uint8), cv2.COLOR_BGR2RGB)\nbgimg = cv2.resize(bgimg, (e.heatMat.shape[1], e.heatMat.shape[0]), interpolation=cv2.INTER_AREA)\nplt.imshow(bgimg, alpha=0.5)\nplt.imshow(max_prob, alpha=0.5)\nplt.colorbar()\nplt.grid();\n# NEED THIS TO DRAW HUMAN\nimage = TfPoseEstimator.draw_humans(image, humans, imgcopy=False)\n\n# SAVE THE JPG OF THE SKELETON\ncv2.imwrite('./skelly/test.jpg',image)\n\n\n\n",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code"
]
] |
d09a9e6b3ec9e0a399011c52089557b4f8359dea | 111,822 | ipynb | Jupyter Notebook | araghug_NN/Final_NN.ipynb | adithyarganesh/CSC591_004_Neural_Nets | 93df4fec09335215c4bbbb67449511daa032efd3 | [
"MIT"
] | null | null | null | araghug_NN/Final_NN.ipynb | adithyarganesh/CSC591_004_Neural_Nets | 93df4fec09335215c4bbbb67449511daa032efd3 | [
"MIT"
] | null | null | null | araghug_NN/Final_NN.ipynb | adithyarganesh/CSC591_004_Neural_Nets | 93df4fec09335215c4bbbb67449511daa032efd3 | [
"MIT"
] | null | null | null | 124.940782 | 36,724 | 0.815305 | [
[
[
"# Project 4: Neural Networks Project\nAll code was complied and run in Google Colab as Neural models take time to run and the university laptops donot have enough processing power to run the same.\n\n##### All comments and conclusions have been added right below each code block for easier analysis and understanding",
"_____no_output_____"
],
[
"<a href=\"https://colab.research.google.com/github/adithyarganesh/CSC591_004_Neural_Nets/blob/main/Final_NN.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"# Task 1. Automatic grid search\n\n### Libraries\n\nKey libraries used are keras and scikit-learn",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport statsmodels.api as sm\nfrom sklearn.preprocessing import PolynomialFeatures\nfrom sklearn.model_selection import GridSearchCV, cross_val_score, KFold\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.metrics import mean_squared_error, r2_score \nfrom keras.wrappers.scikit_learn import KerasRegressor\nfrom keras.layers import Dense\nfrom keras.models import Sequential\nfrom keras.optimizers import Adam",
"_____no_output_____"
],
[
"data = pd.read_csv(\"20.csv\", header = None)",
"_____no_output_____"
],
[
"data.head()",
"_____no_output_____"
],
[
"data.corr()",
"_____no_output_____"
]
],
[
[
"From the correlation values determined for the dataset, we notice that there is a high correlation with the first column in comparison with the rest\n\nSplitting the data into train and test with a 2000 - 300 split",
"_____no_output_____"
]
],
[
[
"dataset = data.values\nX = dataset[:,0:5]\nY = dataset[:,5]\nX_test = X[-300:]\nX = X[:-300]\nY_test = Y[-300:]\nY = Y[:-300]",
"_____no_output_____"
]
],
[
[
"First, I decided to run a baseline model and see how the mse value for it is coming to be as this would give a perspective of how the values can increase with modification and hyperparameter tuning.",
"_____no_output_____"
]
],
[
[
"# define base model\ndef baseline():\n model = Sequential()\n model.add(Dense(5, input_dim=5, kernel_initializer='normal', activation='relu'))\n model.add(Dense(1, kernel_initializer='normal'))\n model.compile(loss='mean_squared_error', optimizer='adam')\n return model\n\nestimator = KerasRegressor(build_fn=baseline, epochs=100, batch_size=5, verbose=0)\nkfold = KFold(n_splits=10)\nresults = cross_val_score(estimator, X, Y, cv=kfold)\nprint(\"Baseline: %.2f (%.2f) MSE\" % (results.mean(), results.std()))",
"Baseline: -8204048.46 (24315010.89) MSE\n"
]
],
[
[
"As seen above, for a simple multilayer perceptron regressor, a very high mse value has been determined. This allows us to conclude that better hyperparameter tuning is required with modifications to other parameters such as learning rate, dropout, epochs etc.\n\nInitially, I decided to nail down which an ideal optimizer would be, then I decided to tweak the other major parameters as it takes hours to try every combination.\n\nFor a list of optimizers, epochs and batch sizes, I was able to conclude that Adam optimizer is the most ideal for the dataset given to me.\n\nThe mse values for each combination while run in gridsearch has been listed below.\n\nBest: -25979.201172 using {'batch_size': 20, 'epochs': 100, 'optimizer': 'adam'} \n-83848.133594 (31485.334665) with: {'batch_size': 10, 'epochs': 10, 'optimizer': 'adam'} \n-124149.147656 (106041.321994) with: {'batch_size': 10, 'epochs': 10, 'optimizer': 'RMSprop'} \n-17538629.000000 (6145950.034129) with: {'batch_size': 10, 'epochs': 10, 'optimizer': 'Adagrad'} \n-28976.654297 (6686.122457) with: {'batch_size': 10, 'epochs': 50, 'optimizer': 'adam'} \n-28985.950000 (4135.118675) with: {'batch_size': 10, 'epochs': 50, 'optimizer': 'RMSprop'} \n-1475655.350000 (144409.180757) with: {'batch_size': 10, 'epochs': 50, 'optimizer': 'Adagrad'} \n-31307.830078 (7195.229146) with: {'batch_size': 10, 'epochs': 100, 'optimizer': 'adam'} \n-35668.427344 (12147.983446) with: {'batch_size': 10, 'epochs': 100, 'optimizer': 'RMSprop'} \n-1435397.200000 (173003.982770) with: {'batch_size': 10, 'epochs': 100, 'optimizer': 'Adagrad'} \n-607021.156250 (225326.076199) with: {'batch_size': 20, 'epochs': 10, 'optimizer': 'adam'} \n-155434.096875 (67205.428782) with: {'batch_size': 20, 'epochs': 10, 'optimizer': 'RMSprop'} \n-39172515.600000 (7229904.980792) with: {'batch_size': 20, 'epochs': 10, 'optimizer': 'Adagrad'} \n-32730.587109 (9326.100937) with: {'batch_size': 20, 'epochs': 50, 'optimizer': 'adam'} \n-46073.637109 (17537.055165) with: {'batch_size': 20, 'epochs': 50, 'optimizer': 'RMSprop'} \n-1622539.675000 (233324.938891) with: {'batch_size': 20, 'epochs': 50, 'optimizer': 'Adagrad'} \n-25979.201172 (3285.793231) with: {'batch_size': 20, 'epochs': 100, 'optimizer': 'adam'} \n-44877.579688 (7302.797490) with: {'batch_size': 20, 'epochs': 100, 'optimizer': 'RMSprop'} \n-1489904.750000 (215725.142852) with: {'batch_size': 20, 'epochs': 100, 'optimizer': 'Adagrad'} \n-1350494.175000 (162489.428364) with: {'batch_size': 40, 'epochs': 10, 'optimizer': 'adam'} \n-742374.950000 (163049.310736) with: {'batch_size': 40, 'epochs': 10, 'optimizer': 'RMSprop'} \n-56523900.000000 (2665037.687018) with: {'batch_size': 40, 'epochs': 10, 'optimizer': 'Adagrad'} \n-56658.258203 (24003.537579) with: {'batch_size': 40, 'epochs': 50, 'optimizer': 'adam'} \n-64086.296094 (12042.358310) with: {'batch_size': 40, 'epochs': 50, 'optimizer': 'RMSprop'} \n-9372795.800000 (5108249.641949) with: {'batch_size': 40, 'epochs': 50, 'optimizer': 'Adagrad'} \n-30622.471875 (6322.287248) with: {'batch_size': 40, 'epochs': 100, 'optimizer': 'adam'} \n-36232.569531 (13259.656484) with: {'batch_size': 40, 'epochs': 100, 'optimizer': 'RMSprop'} \n-1600181.925000 (146014.239422) with: {'batch_size': 40, 'epochs': 100, 'optimizer': 'Adagrad'} \n-1390699.350000 (145640.273592) with: {'batch_size': 60, 'epochs': 10, 'optimizer': 'adam'} \n-1082542.925000 (144731.452078) with: {'batch_size': 60, 'epochs': 10, 'optimizer': 'RMSprop'} \n-62656396.800000 (559420.032519) with: {'batch_size': 60, 'epochs': 10, 'optimizer': 'Adagrad'} \n-69710.080469 (40863.769851) with: {'batch_size': 60, 'epochs': 50, 'optimizer': 'adam'} \n-71970.824219 (24058.956433) with: {'batch_size': 60, 'epochs': 50, 'optimizer': 'RMSprop'} \n-16491987.400000 (3500092.027003) with: {'batch_size': 60, 'epochs': 50, 'optimizer': 'Adagrad'} \n-46966.215625 (15952.838801) with: {'batch_size': 60, 'epochs': 100, 'optimizer': 'adam'} \n-45104.332812 (10972.408712) with: {'batch_size': 60, 'epochs': 100, 'optimizer': 'RMSprop'} \n-2788073.200000 (698682.820182) with: {'batch_size': 60, 'epochs': 100, 'optimizer': 'Adagrad'} \n-1493044.875000 (155697.516601) with: {'batch_size': 80, 'epochs': 10, 'optimizer': 'adam'} \n-1351079.800000 (130707.791587) with: {'batch_size': 80, 'epochs': 10, 'optimizer': 'RMSprop'} \n-65509906.400000 (3248526.947553) with: {'batch_size': 80, 'epochs': 10, 'optimizer': 'Adagrad'} \n-263853.200000 (123436.623595) with: {'batch_size': 80, 'epochs': 50, 'optimizer': 'adam'} \n-92486.471875 (25669.353331) with: {'batch_size': 80, 'epochs': 50, 'optimizer': 'RMSprop'} \n-25053901.200000 (2766136.455614) with: {'batch_size': 80, 'epochs': 50, 'optimizer': 'Adagrad'} \n-41316.805469 (6963.559710) with: {'batch_size': 80, 'epochs': 100, 'optimizer': 'adam'} \n-47747.921094 (15393.723483) with: {'batch_size': 80, 'epochs': 100, 'optimizer': 'RMSprop'} \n-6449660.600000 (3418975.118244) with: {'batch_size': 80, 'epochs': 100, 'optimizer': 'Adagrad'} \n-1476760.825000 (167679.598081) with: {'batch_size': 100, 'epochs': 10, 'optimizer': 'adam'} \n-1404041.825000 (201396.916914) with: {'batch_size': 100, 'epochs': 10, 'optimizer': 'RMSprop'} \n-72146363.200000 (2264547.064452) with: {'batch_size': 100, 'epochs': 10, 'optimizer': 'Adagrad'} \n-352332.837500 (104710.011595) with: {'batch_size': 100, 'epochs': 50, 'optimizer': 'adam'} \n-90365.727344 (25890.780854) with: {'batch_size': 100, 'epochs': 50, 'optimizer': 'RMSprop'} \n-32726843.600000 (8646951.726295) with: {'batch_size': 100, 'epochs': 50, 'optimizer': 'Adagrad'} \n-42565.274219 (14731.363104) with: {'batch_size': 100, 'epochs': 100, 'optimizer': 'adam'} \n-65972.997656 (29357.657998) with: {'batch_size': 100, 'epochs': 100, 'optimizer': 'RMSprop'} \n-11417867.800000 (2452926.970178) with: {'batch_size': 100, 'epochs': 100, 'optimizer': 'Adagrad'} ",
"_____no_output_____"
]
],
[
[
"def custom_model( momentum=0, dropout_rate=0.0, learn_rate=0.01, epochs = 10, verbose=0):\n model = Sequential()\n model.add(Dense(128, input_dim=X.shape[1], activation='relu'))\n model.add(Dense(64, activation='relu'))\n model.add(Dense(1))\n adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)\n model.compile(loss='mean_squared_error', optimizer=adam, metrics=['mse'])\n return model\n\nnp.random.seed(5)\n\nmodel = KerasRegressor(build_fn=custom_model, verbose=0)\n\n# Hyperparameter tuning\nlearn_rate = [0.0001, 0.001, 0.01]\ndropout_rate = [0.0, 0.2, 0.3]\nbatch_size = [10, 50, 100]\nepochs = [10, 50, 100]\n\nparam_grid = dict(batch_size=batch_size, epochs=epochs, learn_rate=learn_rate, dropout_rate=dropout_rate)\n\ngrid = GridSearchCV(estimator=model, param_grid=param_grid, n_jobs=-1)\ngrid_result = grid.fit(X, Y)",
"_____no_output_____"
]
],
[
[
"I then created a model with two dense layers and used the Adam optimizer to perform the remaining hyperparameter tuning. There were the outputs that were obtained",
"_____no_output_____"
]
],
[
[
"print(\"Best mse is %f with params --> %s\" % (grid_result.best_score_, grid_result.best_params_))\nmeans = grid_result.cv_results_['mean_test_score']\nstd_dev = grid_result.cv_results_['std_test_score']\ntuned_params = grid_result.cv_results_['params' ]\nfor mean, stdev, param in zip(means, std_dev, tuned_params):\n print(\"%f, %f ----> %r\" % (mean, stdev, param))",
"Best mse is -24887.330078 with params --> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.01}\n-83463.160156, 25271.865298 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.0001}\n-88775.615625, 23038.250922 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.001}\n-90300.914844, 32942.413488 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.01}\n-36012.864453, 11941.555961 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.0001}\n-31121.520313, 7992.348638 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.001}\n-28983.807812, 5356.626577 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.01}\n-35069.562109, 8180.334911 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.0001}\n-33771.587500, 7284.982974 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.001}\n-32479.938281, 8067.058798 ----> {'batch_size': 10, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.01}\n-65907.715625, 24826.923191 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.0001}\n-77717.960156, 34020.840710 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.001}\n-85224.619531, 29205.053937 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.01}\n-33830.892578, 3986.515899 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.0001}\n-31440.497656, 7374.794438 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.001}\n-27606.241406, 4662.202180 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.01}\n-29065.995703, 5938.747315 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.0001}\n-26874.994922, 4138.167862 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.001}\n-24887.330078, 3946.426631 ----> {'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.01}\n-70273.667188, 37229.902720 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.0001}\n-107712.754687, 46457.342344 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.001}\n-79865.365625, 20842.438363 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.01}\n-29312.818750, 6711.976675 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.0001}\n-28767.895313, 2799.946145 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.001}\n-33539.787500, 7869.685157 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.01}\n-29265.059375, 4962.019223 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.0001}\n-38803.658594, 26620.278088 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.001}\n-26067.693750, 2738.324925 ----> {'batch_size': 10, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.01}\n-1391231.450000, 131769.763490 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.0001}\n-1394257.100000, 148163.606630 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.001}\n-1334672.825000, 121353.652625 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.01}\n-77671.030078, 72929.877871 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.0001}\n-54267.003906, 26992.621148 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.001}\n-53134.264062, 25623.205015 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.01}\n-31013.926172, 3174.209647 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.0001}\n-48720.391016, 16649.325040 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.001}\n-31847.444141, 13980.987319 ----> {'batch_size': 50, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.01}\n-1345242.825000, 160264.462418 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.0001}\n-1343731.025000, 142865.047342 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.001}\n-1354697.775000, 124586.394037 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.01}\n-57313.091016, 18280.070121 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.0001}\n-51404.878125, 26886.678269 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.001}\n-53362.170312, 20306.481582 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.01}\n-31464.076953, 6984.338670 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.0001}\n-34847.044141, 15228.993071 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.001}\n-32029.678125, 5482.817316 ----> {'batch_size': 50, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.01}\n-1379998.125000, 172412.173111 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.0001}\n-1378416.150000, 94280.468042 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.001}\n-1372643.625000, 120326.731641 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.01}\n-56712.373437, 11561.815221 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.0001}\n-64784.871875, 12282.235965 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.001}\n-69941.793750, 38007.654682 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.01}\n-37416.461328, 13574.281323 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.0001}\n-45251.885156, 16110.994895 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.001}\n-29959.032031, 6280.830413 ----> {'batch_size': 50, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.01}\n-1494333.325000, 171158.091437 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.0001}\n-1492897.475000, 136893.803305 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.001}\n-1481787.225000, 181406.314004 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 10, 'learn_rate': 0.01}\n-362559.275000, 144505.187410 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.0001}\n-493743.925000, 175628.419863 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.001}\n-425601.634375, 138994.073447 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 50, 'learn_rate': 0.01}\n-56394.031250, 29822.650123 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.0001}\n-62118.402734, 33738.726838 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.001}\n-45354.618750, 12476.379207 ----> {'batch_size': 100, 'dropout_rate': 0.0, 'epochs': 100, 'learn_rate': 0.01}\n-1520627.550000, 153316.697647 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.0001}\n-1525229.250000, 192898.677445 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.001}\n-1576432.075000, 140458.977827 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 10, 'learn_rate': 0.01}\n-438015.534375, 169262.908999 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.0001}\n-526000.006250, 172199.747528 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.001}\n-302433.350000, 203837.546876 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 50, 'learn_rate': 0.01}\n-65191.571875, 16258.684909 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.0001}\n-65105.631250, 21753.605495 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.001}\n-42338.783203, 14474.060719 ----> {'batch_size': 100, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.01}\n-1580144.925000, 167657.703649 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.0001}\n-1572937.625000, 189683.774624 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.001}\n-1567378.325000, 161370.750398 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 10, 'learn_rate': 0.01}\n-461903.221875, 243098.024866 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.0001}\n-329203.254688, 191314.508843 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.001}\n-514557.693750, 125854.978822 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 50, 'learn_rate': 0.01}\n-68567.199219, 37323.214715 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.0001}\n-46243.672266, 26302.250809 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.001}\n-79122.269922, 52110.550128 ----> {'batch_size': 100, 'dropout_rate': 0.3, 'epochs': 100, 'learn_rate': 0.01}\n"
]
],
[
[
"From the above values, we notice that the most optimal set of attributes were found to be. \n#### 'batch_size': 10, 'dropout_rate': 0.2, 'epochs': 100, 'learn_rate': 0.01",
"_____no_output_____"
],
[
"## Task 2 - Compare the trained neural networkwith multivariable regression",
"_____no_output_____"
]
],
[
[
"X2 = sm.add_constant(X)\nest = sm.OLS(Y, X2)\nest2 = est.fit()",
"_____no_output_____"
],
[
"print(est2.summary())",
" OLS Regression Results \n==============================================================================\nDep. Variable: y R-squared: 0.960\nModel: OLS Adj. R-squared: 0.960\nMethod: Least Squares F-statistic: 9585.\nDate: Thu, 05 Nov 2020 Prob (F-statistic): 0.00\nTime: 01:55:01 Log-Likelihood: -14158.\nNo. Observations: 2000 AIC: 2.833e+04\nDf Residuals: 1994 BIC: 2.836e+04\nDf Model: 5 \nCovariance Type: nonrobust \n==============================================================================\n coef std err t P>|t| [0.025 0.975]\n------------------------------------------------------------------------------\nconst -2567.9162 142.217 -18.056 0.000 -2846.826 -2289.007\nx1 55.0369 0.259 212.402 0.000 54.529 55.545\nx2 2.2014 0.267 8.252 0.000 1.678 2.725\nx3 5.6969 0.266 21.387 0.000 5.175 6.219\nx4 6.9531 0.251 27.745 0.000 6.462 7.445\nx5 9.1432 0.263 34.767 0.000 8.627 9.659\n==============================================================================\nOmnibus: 1395.967 Durbin-Watson: 1.974\nProb(Omnibus): 0.000 Jarque-Bera (JB): 29299.309\nSkew: 3.018 Prob(JB): 0.00\nKurtosis: 20.753 Cond. No. 1.22e+04\n==============================================================================\n\nWarnings:\n[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n[2] The condition number is large, 1.22e+04. This might indicate that there are\nstrong multicollinearity or other numerical problems.\n"
],
[
"reg2 = LinearRegression()\nreg2.fit(X, Y)\n\nprint(\"The linear model is: Y = {:.5} + {:.5}*X1 + {:.5}*X2 + {:.5}*X3 + {:.5}*X4 + {:.5}*X5\".format(reg2.intercept_, reg2.coef_[0], reg2.coef_[1], reg2.coef_[2], reg2.coef_[3], reg2.coef_[4]))\nprint(\"Y = a0 + a1X1 + a3X3 + a4X4 + a5X5\")",
"The linear model is: Y = -2567.9 + 55.037*X1 + 2.2014*X2 + 5.6969*X3 + 6.9531*X4 + 9.1432*X5\nY = a0 + a1X1 + a3X3 + a4X4 + a5X5\n"
]
],
[
[
"We now calculate the sum of squared errors (SSE) for each of the models and determine which is the better model",
"_____no_output_____"
]
],
[
[
"LR_sse = 0\nfor v in Y - reg2.predict(X):\n LR_sse += v**2",
"_____no_output_____"
],
[
"NN_sse = 0\nfor v in Y - grid_result.predict(X):\n NN_sse += v**2",
"_____no_output_____"
],
[
"print(\"SSE for Multivariate regression: \", LR_sse)\nprint(\"SSE for estimation with Neural Moedl: \", NN_sse)",
"SSE for Multivariate regression: 164973673.90797538\nSSE for estimation with Neural Moedl: 44258448.18429801\n"
]
],
[
[
"It can be seen that the SSE value for the custom neural model created with hyperparameter tuning seems to fare better in comparison to the Multivariable linear regression.\n\nBelow are two sample predictions made on untrained test data by both the models. To plain sight, the difference is minimal but on further analysis with hyper parammeter tuning, we see a much bigger difference in performance between the two models.",
"_____no_output_____"
]
],
[
[
"Y_test_pred = reg2.predict(X_test)\nplt.plot(Y_test_pred[:50])\nplt.plot(Y_test[:50])",
"_____no_output_____"
],
[
"Y_test_pred_NN = grid_result.predict(X_test)\nplt.plot(Y_test_pred_NN[:50])\nplt.plot(Y_test[:50])",
"_____no_output_____"
]
],
[
[
"## Conclusions\n\nWe notice that hyperparameter tuning is important and upon proper analysis choice of the parameters, a neural model can perform better than the previously run Multivariable regression model.\n\nRefs:\nhttps://machinelearningmastery.com/tutorial-first-neural-network-python-keras/\n\nhttps://machinelearningmastery.com/regression-tutorial-keras-deep-learning-library-python/\n\nhttps://www.kaggle.com/willkoehrsen/intro-to-model-tuning-grid-and-random-search",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
d09aa78ab938121d81cd7e3e77214655da2f6175 | 4,751 | ipynb | Jupyter Notebook | Arrays and Linked Lists/011_Pascal's-Triangle.ipynb | parikshitsaikia1619/DSA_Mastery | bfeab4780d4de372e305c7a517abd04186af0b87 | [
"MIT"
] | null | null | null | Arrays and Linked Lists/011_Pascal's-Triangle.ipynb | parikshitsaikia1619/DSA_Mastery | bfeab4780d4de372e305c7a517abd04186af0b87 | [
"MIT"
] | null | null | null | Arrays and Linked Lists/011_Pascal's-Triangle.ipynb | parikshitsaikia1619/DSA_Mastery | bfeab4780d4de372e305c7a517abd04186af0b87 | [
"MIT"
] | null | null | null | 21.400901 | 116 | 0.505367 | [
[
[
"### Problem Statement\n\nFind and return the `nth` row of Pascal's triangle in the form a list. `n` is 0-based.\n\nFor exmaple, if `n = 4`, then `output = [1, 4, 6, 4, 1]`.\n\nTo know more about Pascal's triangle: https://www.mathsisfun.com/pascals-triangle.html",
"_____no_output_____"
]
],
[
[
"#%% Imports and functions declarations \nfrom math import factorial\n\n\ndef combinations(total_num: int, choosen_num: int) -> int:\n \"\"\"\n Returns the number of available combinations given a number of elements and the subspace selected\n :param total_num: number of total elements\n :param choosen_num: number of elements of the subspace\n :return: number of total combinations\n \"\"\"\n return int(factorial(total_num)/(factorial(choosen_num)*factorial(total_num-choosen_num)))\n\n\ndef nth_row_pascal(num_row: int) -> list:\n \"\"\"\n Given the number of the row, generates the specifiy values present in this pascal triangle\n :param num_row: number of row to represent \n :return: pascal's triangle row \n \"\"\"\n row_result = []\n for i in range(num_row+1):\n row_result.append(combinations(num_row, i))\n return row_result",
"_____no_output_____"
]
],
[
[
"<span class=\"graffiti-highlight graffiti-id_wf20h2l-id_cuoppnd\"><i></i><button>Show Solution</button></span>",
"_____no_output_____"
]
],
[
[
"def test_function(test_case):\n n = test_case[0]\n solution = test_case[1]\n output = nth_row_pascal(n)\n if solution == output:\n print(\"Pass\")\n else:\n print(\"Fail\")",
"_____no_output_____"
],
[
"n = 0\nsolution = [1]\n\ntest_case = [n, solution]\ntest_function(test_case)",
"Pass\n"
],
[
"n = 1\nsolution = [1, 1]\n\ntest_case = [n, solution]\ntest_function(test_case)",
"Pass\n"
],
[
"n = 2\nsolution = [1, 2, 1]\n\ntest_case = [n, solution]\ntest_function(test_case)",
"Pass\n"
],
[
"n = 3\nsolution = [1, 3, 3, 1]\n\ntest_case = [n, solution]\ntest_function(test_case)",
"Pass\n"
],
[
"n = 4\nsolution = [1, 4, 6, 4, 1]\n\ntest_case = [n, solution]\ntest_function(test_case)",
"Pass\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09ac337ce3e9072426ecabe0fb55a0f28352cfa | 236,011 | ipynb | Jupyter Notebook | Model backlog/Train/53-tweet-train-3fold-roberta-base-pb2.ipynb | dimitreOliveira/Tweet-Sentiment-Extraction | 0a775abe9a92c4bc2db957519c523be7655df8d8 | [
"MIT"
] | 11 | 2020-06-17T07:30:20.000Z | 2022-03-25T16:56:01.000Z | Model backlog/Train/53-tweet-train-3fold-roberta-base-pb2.ipynb | dimitreOliveira/Tweet-Sentiment-Extraction | 0a775abe9a92c4bc2db957519c523be7655df8d8 | [
"MIT"
] | null | null | null | Model backlog/Train/53-tweet-train-3fold-roberta-base-pb2.ipynb | dimitreOliveira/Tweet-Sentiment-Extraction | 0a775abe9a92c4bc2db957519c523be7655df8d8 | [
"MIT"
] | null | null | null | 242.809671 | 189,572 | 0.87082 | [
[
[
"## Dependencies",
"_____no_output_____"
]
],
[
[
"import json, warnings, shutil\nfrom tweet_utility_scripts import *\nfrom tweet_utility_preprocess_roberta_scripts import *\nfrom transformers import TFRobertaModel, RobertaConfig\nfrom tokenizers import ByteLevelBPETokenizer\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras import optimizers, metrics, losses, layers\nfrom tensorflow.keras.callbacks import EarlyStopping, TensorBoard, ModelCheckpoint\n\nSEED = 0\nseed_everything(SEED)\nwarnings.filterwarnings(\"ignore\")",
"_____no_output_____"
]
],
[
[
"# Load data",
"_____no_output_____"
]
],
[
[
"database_base_path = '/kaggle/input/tweet-dataset-split-roberta-base-96/'\nk_fold = pd.read_csv(database_base_path + '5-fold.csv')\ndisplay(k_fold.head())\n\n# Unzip files\n!tar -xvf /kaggle/input/tweet-dataset-split-roberta-base-96/fold_1.tar.gz\n!tar -xvf /kaggle/input/tweet-dataset-split-roberta-base-96/fold_2.tar.gz\n!tar -xvf /kaggle/input/tweet-dataset-split-roberta-base-96/fold_3.tar.gz\n# !tar -xvf /kaggle/input/tweet-dataset-split-roberta-base-96/fold_4.tar.gz\n# !tar -xvf /kaggle/input/tweet-dataset-split-roberta-base-96/fold_5.tar.gz",
"_____no_output_____"
]
],
[
[
"# Model parameters",
"_____no_output_____"
]
],
[
[
"vocab_path = database_base_path + 'vocab.json'\nmerges_path = database_base_path + 'merges.txt'\nbase_path = '/kaggle/input/qa-transformers/roberta/'\n\nconfig = {\n \"MAX_LEN\": 96,\n \"BATCH_SIZE\": 32,\n \"EPOCHS\": 5,\n \"LEARNING_RATE\": 3e-5,\n \"ES_PATIENCE\": 1,\n \"question_size\": 4,\n \"N_FOLDS\": 1,\n \"base_model_path\": base_path + 'roberta-base-tf_model.h5',\n \"config_path\": base_path + 'roberta-base-config.json'\n}\n\nwith open('config.json', 'w') as json_file:\n json.dump(json.loads(json.dumps(config)), json_file)",
"_____no_output_____"
]
],
[
[
"# Model",
"_____no_output_____"
]
],
[
[
"module_config = RobertaConfig.from_pretrained(config['config_path'], output_hidden_states=False)\n\ndef model_fn(MAX_LEN):\n input_ids = layers.Input(shape=(MAX_LEN,), dtype=tf.int32, name='input_ids')\n attention_mask = layers.Input(shape=(MAX_LEN,), dtype=tf.int32, name='attention_mask')\n \n base_model = TFRobertaModel.from_pretrained(config['base_model_path'], config=module_config, name=\"base_model\")\n sequence_output = base_model({'input_ids': input_ids, 'attention_mask': attention_mask})\n last_state = sequence_output[0]\n \n x_start = layers.Conv1D(1, 1)(last_state)\n x_start = layers.Flatten()(x_start)\n y_start = layers.Activation('softmax', name='y_start')(x_start)\n\n x_end = layers.Conv1D(1, 1)(last_state)\n x_end = layers.Flatten()(x_end)\n y_end = layers.Activation('softmax', name='y_end')(x_end)\n \n model = Model(inputs=[input_ids, attention_mask], outputs=[y_start, y_end])\n model.compile(optimizers.Adam(lr=config['LEARNING_RATE']), \n loss=losses.CategoricalCrossentropy(), \n metrics=[metrics.CategoricalAccuracy()])\n \n return model",
"_____no_output_____"
]
],
[
[
"# Tokenizer",
"_____no_output_____"
]
],
[
[
"tokenizer = ByteLevelBPETokenizer(vocab_file=vocab_path, merges_file=merges_path, lowercase=True, add_prefix_space=True)\ntokenizer.save('./')",
"_____no_output_____"
]
],
[
[
"# Train",
"_____no_output_____"
]
],
[
[
"history_list = []\nAUTO = tf.data.experimental.AUTOTUNE\n\nfor n_fold in range(config['N_FOLDS']):\n n_fold +=1\n print('\\nFOLD: %d' % (n_fold))\n # Load data\n base_data_path = 'fold_%d/' % (n_fold)\n x_train = np.load(base_data_path + 'x_train.npy')\n y_train = np.load(base_data_path + 'y_train.npy')\n x_valid = np.load(base_data_path + 'x_valid.npy')\n y_valid = np.load(base_data_path + 'y_valid.npy')\n \n ### Delete data dir\n shutil.rmtree(base_data_path)\n\n # Train model\n model_path = 'model_fold_%d.h5' % (n_fold)\n model = model_fn(config['MAX_LEN'])\n es = EarlyStopping(monitor='val_loss', mode='min', patience=config['ES_PATIENCE'], \n restore_best_weights=True, verbose=1)\n checkpoint = ModelCheckpoint(model_path, monitor='val_loss', mode='min', \n save_best_only=True, save_weights_only=True)\n\n history = model.fit(list(x_train), list(y_train),\n validation_data=(list(x_valid), list(y_valid)),\n batch_size=config['BATCH_SIZE'], \n callbacks=[checkpoint, es],\n epochs=config['EPOCHS'], \n verbose=2).history\n history_list.append(history)\n\n \n # Make predictions\n train_preds = model.predict(list(x_train))\n valid_preds = model.predict(list(x_valid))\n \n k_fold.loc[k_fold['fold_%d' % (n_fold)] == 'train', 'start_fold_%d' % (n_fold)] = train_preds[0].argmax(axis=-1)\n k_fold.loc[k_fold['fold_%d' % (n_fold)] == 'train', 'end_fold_%d' % (n_fold)] = train_preds[1].argmax(axis=-1)\n k_fold.loc[k_fold['fold_%d' % (n_fold)] == 'validation', 'start_fold_%d' % (n_fold)] = valid_preds[0].argmax(axis=-1)\n k_fold.loc[k_fold['fold_%d' % (n_fold)] == 'validation', 'end_fold_%d' % (n_fold)] = valid_preds[1].argmax(axis=-1)\n \n k_fold['end_fold_%d' % (n_fold)] = k_fold['end_fold_%d' % (n_fold)].astype(int)\n k_fold['start_fold_%d' % (n_fold)] = k_fold['start_fold_%d' % (n_fold)].astype(int)\n k_fold['end_fold_%d' % (n_fold)].clip(0, k_fold['text_len'], inplace=True)\n k_fold['start_fold_%d' % (n_fold)].clip(0, k_fold['end_fold_%d' % (n_fold)], inplace=True)\n k_fold['prediction_fold_%d' % (n_fold)] = k_fold.apply(lambda x: decode(x['start_fold_%d' % (n_fold)], x['end_fold_%d' % (n_fold)], x['text'], config['question_size'], tokenizer), axis=1)\n k_fold['prediction_fold_%d' % (n_fold)].fillna('', inplace=True)\n k_fold['jaccard_fold_%d' % (n_fold)] = k_fold.apply(lambda x: jaccard(x['text'], x['prediction_fold_%d' % (n_fold)]), axis=1)",
"\nFOLD: 1\nTrain on 21984 samples, validate on 5496 samples\nEpoch 1/5\n21984/21984 - 292s - loss: 2.1388 - y_start_loss: 1.0514 - y_end_loss: 1.0873 - y_start_categorical_accuracy: 0.6546 - y_end_categorical_accuracy: 0.6529 - val_loss: 1.6510 - val_y_start_loss: 0.8545 - val_y_end_loss: 0.7959 - val_y_start_categorical_accuracy: 0.6994 - val_y_end_categorical_accuracy: 0.7220\nEpoch 2/5\n21984/21984 - 274s - loss: 1.5846 - y_start_loss: 0.8206 - y_end_loss: 0.7640 - y_start_categorical_accuracy: 0.7031 - y_end_categorical_accuracy: 0.7300 - val_loss: 1.5583 - val_y_start_loss: 0.8072 - val_y_end_loss: 0.7505 - val_y_start_categorical_accuracy: 0.7091 - val_y_end_categorical_accuracy: 0.7285\nEpoch 3/5\nRestoring model weights from the end of the best epoch.\n21984/21984 - 272s - loss: 1.4377 - y_start_loss: 0.7527 - y_end_loss: 0.6850 - y_start_categorical_accuracy: 0.7213 - y_end_categorical_accuracy: 0.7471 - val_loss: 1.5799 - val_y_start_loss: 0.8255 - val_y_end_loss: 0.7538 - val_y_start_categorical_accuracy: 0.6943 - val_y_end_categorical_accuracy: 0.7240\nEpoch 00003: early stopping\n"
]
],
[
[
"# Model loss graph",
"_____no_output_____"
]
],
[
[
"sns.set(style=\"whitegrid\")\nfor n_fold in range(config['N_FOLDS']):\n print('Fold: %d' % (n_fold+1))\n plot_metrics(history_list[n_fold])",
"Fold: 1\n"
]
],
[
[
"# Model evaluation",
"_____no_output_____"
]
],
[
[
"display(evaluate_model_kfold(k_fold, config['N_FOLDS']).style.applymap(color_map))",
"_____no_output_____"
]
],
[
[
"# Visualize predictions",
"_____no_output_____"
]
],
[
[
"display(k_fold[[c for c in k_fold.columns if not (c.startswith('textID') or \n c.startswith('text_len') or \n c.startswith('selected_text_len') or \n c.startswith('text_wordCnt') or \n c.startswith('selected_text_wordCnt') or \n c.startswith('fold_') or \n c.startswith('start_fold_') or \n c.startswith('end_fold_'))]].head(15))",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09ad25c52188338b7f2607a5c5880badf53b5b1 | 185,561 | ipynb | Jupyter Notebook | learnelixir.ipynb | kalz2q/myjupyternotebooks | daab7169bd6e515c94207371471044bd5992e009 | [
"MIT"
] | null | null | null | learnelixir.ipynb | kalz2q/myjupyternotebooks | daab7169bd6e515c94207371471044bd5992e009 | [
"MIT"
] | null | null | null | learnelixir.ipynb | kalz2q/myjupyternotebooks | daab7169bd6e515c94207371471044bd5992e009 | [
"MIT"
] | null | null | null | 25.840551 | 253 | 0.415141 | [
[
[
"<a href=\"https://colab.research.google.com/github/kalz2q/mycolabnotebooks/blob/master/learnelixir.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"# メモ\n\nelixir を齧る。かじる。\n\n今のイメージ $\\quad$ erlang 上で、erlang は 並行処理のためのシステムで、その erlang 上で理想的な言語を作ろうとしたら、ruby + clojure みたいな言語になった。\n\nDave Thomas と まつもとゆきひろ が勧めているのだからいい言語なのだろう。\n",
"_____no_output_____"
],
[
"* https://elixirschool.com/ja/lessons/basics/control-structures/\n* https://magazine.rubyist.net/articles/0054/0054-ElixirBook.\n* https://dev.to/gumi/elixir-01--2585\n* https://elixir-lang.org/getting-started/introduction.html\n\n---\n本を買った。\n\nプログラミング elixir\n\ndave thomas, 笹田耕一・鳥居雪訳、 ohmsha\nprogramming elixir |> 1.6\n\nを読む。\n\n",
"_____no_output_____"
]
],
[
[
"%%capture\n!wget https://packages.erlang-solutions.com/erlang-solutions_2.0_all.deb && sudo dpkg -i erlang-solutions_2.0_all.deb\n!sudo apt update\n!sudo apt install elixir",
"_____no_output_____"
],
[
"!elixir -v\n!date",
"Erlang/OTP 24 [erts-12.2.1] [source] [64-bit] [smp:2:2] [ds:2:2:10] [async-threads:1] [jit]\n\nElixir 1.13.0 (compiled with Erlang/OTP 24)\nWed Mar 16 16:43:56 UTC 2022\n"
]
],
[
[
"---\nメモ\n\n`!elixir -h` (ヘルプ)としたらシェルワンライナー `elixir -e` が使えるらしいことがわかった。\n\n`iex` というのがインタラクティブ環境なのだが、colab では使いにくいので `elixir -e` で代用する。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts 3 + 3'\n!elixir -e 'IO.puts \"hello world!\"'",
"6\nhello world!\n"
],
[
"# 次のようにすればファイルが作れる\n%%writefile temp.exs\nIO.puts \"this is a pen.\"",
"Writing temp.exs\n"
],
[
"# cat してみる\n!cat temp.exs",
"IO.puts \"this is a pen.\""
],
[
"# ファイルを elixir で実行する\n!elixir temp.exs",
"this is a pen.\n"
]
],
[
[
"---\nネットで紹介されていた次のコードセルのコードはどうやって実行するのだろう。 今はわからなくていいと思うがとりあえず転記しておく。\n\n説明:\n\nこのプログラムでは、Parallel というモジュールに pmap という関数を定義しているmap は、与えられたコレクションに対して map(Ruby での Enumerable#map と同じようなものと考えて下さい)を行なうのですが、 各要素の処理を、要素数の分だけプロセスを生成し、各プロセスで並行に実行する、というものです。 ちょっと見ても、よくわからないような気がしますが、大丈夫、本書を読めば、わかるようになりる\n\nとのこと。",
"_____no_output_____"
]
],
[
[
"%%writefile temp.exs\ndefmodule Parallel do\n def pmap(collection, func) do\n collection\n |> Enum.map(&(Task.async(fn -> func.(&1) end)))\n |> Enum.map(&Task.await/1)\n end\nend\n\nresult = Parallel.pmap 1..1000, &(&1 * &1)\n\nIO.inspect result",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"[1, 4, 9, 16, 25, 36, 49, 64, 81, 100, 121, 144, 169, 196, 225, 256, 289, 324,\n 361, 400, 441, 484, 529, 576, 625, 676, 729, 784, 841, 900, 961, 1024, 1089,\n 1156, 1225, 1296, 1369, 1444, 1521, 1600, 1681, 1764, 1849, 1936, 2025, 2116,\n 2209, 2304, 2401, 2500, ...]\n"
]
],
[
[
"上の例で colab 環境で非同期処理が問題なく動くことが確認できたみたい。",
"_____no_output_____"
],
[
"---\n次のもネットで紹介されていた例で、ハローワールド並行処理版\n",
"_____no_output_____"
]
],
[
[
"%%writefile temp.exs\nparent = self()\n \nspawn_link(fn ->\n send parent, {:msg, \"hello world\"}\nend)\n \nreceive do\n {:msg, contents} -> IO.puts contents\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"hello world\n"
]
],
[
[
"上の例でやっていることはつぎのような流れである。\n\n1. spawn_linkという関数に渡された関数が、関数の内容を実行する。\n2. 新しく作られたプロセス側では、メインプロセス側(parent)に “hello world” というメッセージを送る。\n3. メインプロセス側は、どこからかメッセージが来ないかを待ち受けて(receive)、メッセージが来たらそれをコンソールに表示する。",
"_____no_output_____"
]
],
[
[
"# 実験 とりあえず理解しない。 colab 環境でどうかだけ調べる。\n%%writefile chain.exs\ndefmodule Chain do\n def counter(next_pid) do\n receive do\n n -> send next_pid, n + 1\n end\n end\n \n def create_processes(n) do\n last = Enum.reduce 1..n, self(),\n fn (_, send_to) -> spawn(Chain, :counter, [send_to]) end\n send last, 0\n receive do\n final_answer when is_integer(final_answer) ->\n \"Result is #{inspect(final_answer)}\"\n end\n end\n \n def run(n) do\n IO.puts inspect :timer.tc(Chain, :create_processes, [n])\n end\nend",
"Writing chain.exs\n"
],
[
"!elixir --erl \"+P 1000000\" -r chain.exs -e \"Chain.run(1_000_000)\"",
"{4638957, \"Result is 1000000\"}\n"
]
],
[
[
"記事 https://ubiteku.oinker.me/2015/12/22/elixir試飲-2-カルチャーショックに戸惑う-並行指向プ/ のマシン Macbook Pro – 3 GHz Intel Core i7, 16GB RAM では 7 秒のところ、colab では 5 秒で終わってるね!!!!\n\n手元のwindowsマシン intel core i5-9400 8gb ram でやったら次のようになった。 \n{3492935, \"Result is 1000000\"}\n\nあれ、速いじゃん!!!!",
"_____no_output_____"
],
[
"---\nコメントは `#`",
"_____no_output_____"
]
],
[
[
"%%writefile temp.exs\n# コメント実験\nstr = \"helloworld!!!!\"\nIO.puts str",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"helloworld!!!!\n"
]
],
[
[
"---\nn 進数、整数 integer",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts 0b1111'\n!elixir -e 'IO.puts 0o7777'\n!elixir -e 'IO.puts 0xffff'\n!elixir -e 'IO.puts 1000_000_00_0'",
"15\n4095\n65535\n1000000000\n"
]
],
[
[
"整数型に上限下限 fixed limit はない。 factorial(10000) が計算できる。今はしない。\n",
"_____no_output_____"
],
[
"---\n問題\n\n10進数を $n$ 進数にベースを変えるのはどうするか。 python では `int()`, `bin()`, `oct()`, `hex()` があった。",
"_____no_output_____"
]
],
[
[
"# python\nprint(0b1111)\nprint(0o7777)\nprint(0xffff)\nprint(int('7777',8))\nprint(bin(15))\nprint(oct(4095))\nprint(hex(65535))",
"15\n4095\n65535\n4095\n0b1111\n0o7777\n0xffff\n"
],
[
"!elixir -e 'IO.puts 0b1111'\n!elixir -e 'IO.puts 0o7777'\n!elixir -e 'IO.puts 0xffff'\n!echo\n# Integer.to_string() と言う関数を使う\n# <> はバイナリー連結\n!elixir -e 'IO.puts \"0b\" <> Integer.to_string(15,2)'\n!elixir -e 'IO.puts \"0o\" <> Integer.to_string(4095,8)'\n!elixir -e 'IO.puts \"0x\" <> Integer.to_string(65535,16)'",
"15\n4095\n65535\n\n0b1111\n0o7777\n0xFFFF\n"
]
],
[
[
"浮動小数点数 floating-point number",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts 1.532e-4'\n# .0 とか 1. とかはエラーになる\n!elixir -e 'IO.puts 98099098.0809898888'\n!elixir -e 'IO.puts 0.00000000000000000000000001' #=> 1.0e-26\n!elixir -e 'IO.puts 90000000000000000000000000000000000000000000000000000000'",
"1.532e-4\n98099098.08098988\n1.0e-26\n999999999999999999999999999999999999999\n90000000000000000000000000000000000000000000000000000000\n"
]
],
[
[
"文字列 string\n\nstring という型はない、みたい。\n\n---\n質問 型を調べる関数はあるか。type() とか。\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts \"日本語が書けますか\"'\n!elixir -e 'IO.puts \"日本語が書けます\"'",
"日本語が書けますか\n日本語が書けます\n"
],
[
"# 関数に括弧をつけることができる\n# \\ で escape できる\n!elixir -e 'IO.puts (0b1111)'\n!elixir -e 'IO.puts (\"にほんご\\n日本語\")'\n!elixir -e \"IO.puts ('にほんご\\n\\\"日本語\\\"')\"",
"15\nにほんご\n日本語\nにほんご\n\"日本語\"\n"
],
[
"# 文字連結 `+` ではない!!!!\n!elixir -e 'IO.puts(\"ABCD\"<>\"EFGH\")'",
"ABCDEFGH\n"
]
],
[
[
"`<>` と言う記号はバイナリ連結ということらしい。",
"_____no_output_____"
],
[
"---\n値の埋め込み\n\n`#{変数名}` を記述することで、変数の値を埋め込むことができる。\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'val = 1000; IO.puts \"val = #{val}\"'",
"val = 1000\n"
]
],
[
[
"---\n真偽値\n\nelixir の 真偽値は true と false (小文字) で false と nil が false でそれ以外は true\n\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'if true do IO.puts \"true\" end'\n!elixir -e 'if True do IO.puts \"true\" end'\n!elixir -e 'if False do IO.puts \"true\" end' # False が大文字なので\n!elixir -e 'if false do IO.puts \"true\" else IO.puts \"false\" end'\n!elixir -e 'if nil do IO.puts \"true\" else IO.puts \"false\" end'\n!elixir -e 'if 0 do IO.puts \"true\" else IO.puts \"false\" end'\n!elixir -e 'if (-1) do IO.puts \"true\" else IO.puts \"false\" end'\n!elixir -e 'if [] do IO.puts \"true\" else IO.puts \"false\" end'\n!elixir -e 'if \"\" do IO.puts \"true\" else IO.puts \"false\" end'",
"true\ntrue\ntrue\nfalse\nfalse\ntrue\ntrue\ntrue\ntrue\n"
]
],
[
[
"`null` はない。",
"_____no_output_____"
],
[
"---\n**マッチ演算子 `=`**\n\nマッチ演算子 `=` はマッチ演算子である。 マッチ演算子を通して値を代入し、その後、マッチさせることができる。マッチすると、方程式の結果が返され、失敗すると、エラーになる。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts a = 1'\n!elixir -e 'a =1; IO.puts 1 = a'\n!elixir -e 'a =1; IO.puts 2 = a'",
"1\n1\n** (MatchError) no match of right hand side value: 1\n (stdlib 3.17) erl_eval.erl:450: :erl_eval.expr/5\n (stdlib 3.17) erl_eval.erl:893: :erl_eval.expr_list/6\n (stdlib 3.17) erl_eval.erl:408: :erl_eval.expr/5\n (elixir 1.13.0) lib/code.ex:404: Code.validated_eval_string/3\n"
],
[
"!elixir -e 'IO.inspect a = [1,2,3]' # リストは puts で表示できないので inspect を使う\n!elixir -e '[a,b,c] = [1,2,3]; IO.puts c; IO.puts b'\n",
"[1, 2, 3]\n3\n2\n"
]
],
[
[
"上の例は、elixir は マッチ演算子 `=` があると左右がマッチするように最善を尽くす。 そのため、`[a,b,c] = [1,2,3]` で a,b,c に値が代入される。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect [1,2,[3,4,5]]'\n!elixir -e '[a,b,c] = [1,2,[3,4,5]]; IO.inspect c; IO.inspect b'",
"[1, 2, [3, 4, 5]]\n[3, 4, 5]\n2\n"
],
[
"# 実験 => エラー\n!elixir -e 'IO.insepct [a,b] = [1,2,3]'",
"** (MatchError) no match of right hand side value: [1, 2, 3]\n (stdlib 3.15) erl_eval.erl:450: :erl_eval.expr/5\n (stdlib 3.15) erl_eval.erl:893: :erl_eval.expr_list/6\n (stdlib 3.15) erl_eval.erl:408: :erl_eval.expr/5\n (elixir 1.12.0) lib/code.ex:656: Code.eval_string_with_error_handling/3\n"
],
[
"# 実験\n!elixir -e 'IO.inspect a = [[1,2,3]]'\n!elixir -e 'IO.inspect [a] = [[1,2,3]]'\n!elixir -e '[a] = [[1,2,3]]; IO.inspect a'",
"[[1, 2, 3]]\n[[1, 2, 3]]\n[1, 2, 3]\n"
],
[
"# 実験 => エラー\n!elixir -e 'IO.insepct [a,b] = [a,b]'",
"\u001b[33mwarning: \u001b[0mvariable \"a\" does not exist and is being expanded to \"a()\", please use parentheses to remove the ambiguity or change the variable name\n nofile:1\n\n** (CompileError) nofile:1: undefined function a/0\n (stdlib 3.15) lists.erl:1358: :lists.mapfoldl/3\n"
],
[
"# 実験 アトムについては後述\n!elixir -e 'IO.puts a = :a'\n!elixir -e 'a = :a; IO.inspect a = a'\n!elixir -e 'a = :a; IO.puts a = a'\n!elixir -e 'IO.puts :b'",
"a\n:a\na\nb\n"
]
],
[
[
"アンダースコア `_` で値を無視する。 ワルドカード。\n\nなんでも受け付ける。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect [1,_,_]=[1,2,3]'",
"[1, 2, 3]\n"
],
[
"!elixir -e 'IO.inspect [1,_,_]=[1,\"cat\",\"dog\"]'",
"[1, \"cat\", \"dog\"]\n"
]
],
[
[
"変数は、バインド (束縛、紐付け) されると変更できない。\n\nかと思ったらできてしまう。\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'a = 1; IO.puts a = 2'",
"2\n"
]
],
[
[
"元の変数を指し示すピン演算子 (`^` カレット) がある。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'a = 1; IO.puts ^a = 2'",
"** (MatchError) no match of right hand side value: 2\n (stdlib 3.15) erl_eval.erl:450: :erl_eval.expr/5\n (stdlib 3.15) erl_eval.erl:893: :erl_eval.expr_list/6\n (stdlib 3.15) erl_eval.erl:408: :erl_eval.expr/5\n (elixir 1.12.0) lib/code.ex:656: Code.eval_string_with_error_handling/3\n"
]
],
[
[
"メモ $\\quad$ 普通の関数型言語のように変数は変更できないルールにしてしまった方が簡単ではなかったか、と思わないでもない。 変数を不変にする、const 宣言みたいなのはないのか。\n\nリストは不変 immutable なので安心。\n\n",
"_____no_output_____"
]
],
[
[
"# 大文字にする capitalize\n!elixir -e 'IO.puts name = String.capitalize \"elixir\"'",
"Elixir\n"
],
[
"# 大文字にする upcase\n!elixir -e 'IO.puts String.upcase \"elixir\"'",
"ELIXIR\n"
]
],
[
[
"# アトム\n\nアトムは名前がそのまま値となる定数である。\n\n**名前の前にコロン `:` をつけることでアトムになる。**\n\nアトムの名前は utf-8 文字列 (記号を含む)、数字、アンダースコア `_` 、`@` で、終端文字としてのみ「!」や「?」が使える。\n\n:fred $\\quad$ :is_binary? $\\quad$ :var@2 $\\quad$ :<> $\\quad$ :=== \n\n:\"func/3\" $\\quad$ :\"long john silver\" $\\quad$ :эликсир\n\n:mötley_crüe\n\nメモ",
"_____no_output_____"
]
],
[
[
"# 実験 アトムは宣言しないで突然使える\n!elixir -e 'IO.puts :fred'",
"fred\n"
],
[
"# 実験\n!elixir -e 'IO.puts true === :true'\n!elixir -e 'IO.puts :true'\n!elixir -e 'IO.puts false === :false'",
"true\ntrue\ntrue\n"
],
[
"# 実験 \n!elixir -e 'IO.puts :fred'\n!elixir -e 'IO.puts :is_binary?'\n!elixir -e 'IO.puts :var@2'\n!elixir -e 'IO.puts :<>'\n!elixir -e 'IO.puts :==='\n# セミコロンを含むアトムは iex 上では使えるが、シェルワンライナーでは使えない \n# unexpected token: \"\" と言うエラーになる\n# colab の環境だけでなく、通常のシェルでも同じ\n# ファイルにしたプログラムでは使えるので問題ない\n# !elixir -e 'IO.puts :\"func/3\"' \n# !elixir -e 'IO.puts :\"long john silver\"'\n!elixir -e 'IO.puts :эликсир'\n!elixir -e 'IO.puts :mötley_crüe'\n!elixir -e 'IO.puts :日本語はどうか'",
"fred\nis_binary?\nvar@2\n<>\n===\nэликсир\nmötley_crüe\n日本語はどうか\n"
]
],
[
[
"演算子",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts 1 + 2'\n!elixir -e 'x = 10; IO.puts x + 1'\n!elixir -e 'IO.puts 1 - 2'\n!elixir -e 'x = 10; IO.puts x - 1'\n!elixir -e 'IO.puts 5 * 2'\n!elixir -e 'x = 10; IO.puts x * 4'\n!echo \n!elixir -e 'IO.puts 5 / 2'\n!elixir -e 'x = 10; IO.puts x / 3'",
"3\n11\n-1\n9\n10\n40\n\n2.5\n3.3333333333333335\n"
],
[
"# 浮動少数ではなく整数としての結果がほしい場合は div 関数を使用\n!elixir -e 'IO.puts div(10,5)'\n!elixir -e 'IO.puts div(10,4)'\n# 割り算の余り、剰余を求める場合は rem関数を使用\n!elixir -e 'IO.puts rem(10,4)'\n!elixir -e 'IO.puts rem(10,3)'\n!elixir -e 'IO.puts rem(10,2)'",
"2\n2\n2\n1\n0\n"
],
[
"# 比較演算子\n!elixir -e 'IO.puts 1 == 1'\n!elixir -e 'IO.puts 1 != 1'\n!elixir -e 'IO.puts ! (1 != 1)'\n!echo\n!elixir -e 'IO.puts 20.0 == 20'\n!elixir -e 'IO.puts 20.0 === 20'\n!elixir -e 'IO.puts 20.0 !== 20'",
"true\nfalse\ntrue\n\ntrue\nfalse\ntrue\n"
],
[
"# 論理演算子\n# 論理和\n!elixir -e 'IO.puts \"ABC\" == \"ABC\" || 20 == 30'\n!elixir -e 'IO.puts \"ABC\" == \"abc\" || 20 == 30'\n!echo\n# 論理積\n!elixir -e 'IO.puts \"ABC\" == \"ABC\" && 20 == 20'\n!elixir -e 'IO.puts \"ABC\" == \"ABC\" && 20 == 30'\n!elixir -e 'IO.puts \"ABC\" == \"def\" && 10 > 100'\n!echo\n# 否定\n!elixir -e 'IO.puts !(\"ABC\" == \"ABC\")'\n!elixir -e 'IO.puts !(\"ABC\" == \"DEF\")'",
"true\nfalse\n\ntrue\nfalse\nfalse\n\nfalse\ntrue\n"
]
],
[
[
"range\n\nメモ $\\quad$ range は型ではなく、struct である。 構造体?\n\n`start..end` で表現される、とあるが、1..10 と書けばそれで range なのか?\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect Enum.to_list(1..3)'\n!elixir -e 'IO.inspect Enum.to_list(0..10//3)'\n!elixir -e 'IO.inspect Enum.to_list(0..10//-3)'\n!elixir -e 'IO.inspect Enum.to_list(10..0//-3)'\n!elixir -e 'IO.inspect Enum.to_list(1..1)'\n!elixir -e 'IO.inspect Enum.to_list(1..-1)'\n!elixir -e 'IO.inspect Enum.to_list(1..1//2)'\n!elixir -e 'IO.inspect Enum.to_list(1..-1//2)'\n!elixir -e 'IO.inspect Enum.to_list(1..-1//-2)'",
"[1, 2, 3]\n[0, 3, 6, 9]\n[]\n[10, 7, 4, 1]\n[1]\n[1, 0, -1]\n[1]\n[]\n[1, -1]\n"
],
[
"!elixir -e 'IO.inspect 1..9//2'",
"1..9//2\n"
]
],
[
[
"正規表現 regular expression\n\n正規表現も型ではなく、struct である。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect Regex.run ~r{[aiueo]},\"catapillar\"'",
"[\"a\"]\n"
],
[
"!elixir -e 'IO.inspect Regex.scan ~r{[aiueo]},\"catapillar\"'",
"[[\"a\"], [\"a\"], [\"i\"], [\"a\"]]\n"
],
[
"!elixir -e 'IO.inspect Regex.split ~r{[aiueo]},\"catapillar\"'",
"[\"c\", \"t\", \"p\", \"ll\", \"r\"]\n"
],
[
"!elixir -e 'IO.inspect Regex.replace ~r{[aiueo]},\"catapillar\", \"*\"'",
"\"c*t*p*ll*r\"\n"
]
],
[
[
"# コレクション型",
"_____no_output_____"
],
[
"## タプル\n\nタプルは波括弧 brace を用いて定義する。\n\nタプルに限らず elixir のコレクションはすべて要素のタイプを限定しない。\n\n通常 2 から 4 の要素であり、それ以上の要素数の場合、map や struct の利用を考える。\n\nタプルは関数の返り値に便利に利用される。\n\nパターンマッチングと組み合わせて使われる。\n\n---\ncf. タプル以外の波括弧 brace の使用\n* 値の代入`#{変数名}` \n* 正規表現 Regex `r{}` \n* マップ `%{}` ",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect {3.14, :pie, \"Apple\"}'",
"{3.14, :pie, \"Apple\"}\n"
],
[
"!elixir -e '{status, count, action} = {3.14, :pie, \"next\"}; IO.puts action'",
"next\n"
],
[
"# 実験\n# タプルの使い方の例 \n!echo hello > temp.txt\n!elixir -e '{status, file} = File.open(\"temp.txt\"); IO.inspect {status, file}'\n!elixir -e '{status, file} = File.read(\"temp.txt\"); IO.inspect {status, file}'\n!elixir -e '{status, file} = File.read(\"temp02.txt\"); IO.inspect {status, file}'\n!elixir -e '{status, file} = File.write(\"temp.txt\", \"goodbye\"); IO.inspect {status, file}'\n!elixir -e '{status, file} = File.read(\"temp.txt\"); IO.inspect {status, file}'\n",
"_____no_output_____"
],
[
"# 実験 タプルに ++ は使えるか。 => 使えない <> も使えない\n# !elixir -e 'IO.inspect {3.14, :pie, \"Apple\"} ++ {3}'\n# 実験 タプルに head は使えるか。 => 使えない\n# !elixir -e 'IO.inspect hd {3.14, :pie, \"Apple\"}'\n# 実験 タプルにパターンマッチングは使えるか。 => 使える\n!elixir -e '{a,b,c} = {3.14, :pie, \"Apple\"}; IO.inspect [c,a,b]'",
"[\"Apple\", 3.14, :pie]\n"
],
[
"# 実験\n# 項目の入れ替え\n!elixir -e 'a=1; b=3; {b,a}={a,b}; IO.inspect {a,b}'\n!elixir -e 'a=1; b=3; c=5; d= 7; {d,c,b,a}={a,b,c,d}; IO.inspect {a,b,c,d}'",
"{3, 1}\n{7, 5, 3, 1}\n"
],
[
"# 実験\n# タプルの要素にタプルはあるか\n!elixir -e 'IO.inspect {3.14, :pie, \"Apple\", {3}}'",
"{3.14, :pie, \"Apple\", {3}}\n"
]
],
[
[
"## リスト\n\n他の言語の配列 array と elixir のリストは違うので注意。 lisp のリストと似たような概念である。\n\nカラのリストでなければ、head (hd) と tail (tl) がある。hd は頭の1つで tl はそれ以降全部。",
"_____no_output_____"
]
],
[
[
"# リスト\n!elixir -e 'IO.inspect [3.14, :pie, \"Apple\"]'\n!elixir -e 'IO.inspect hd [3.14]'\n!elixir -e 'IO.inspect tl [3.14]'",
"[3.14, :pie, \"Apple\"]\n3.14\n[]\n"
],
[
"# リスト先頭への追加(高速)\n!elixir -e 'IO.inspect [\"π\" | [3.14, :pie, \"Apple\"]]'\n# リスト末尾への追加(低速)\n!elixir -e 'IO.inspect [3.14, :pie, \"Apple\"] ++ [\"Cherry\"]'",
"[\"π\", 3.14, :pie, \"Apple\"]\n[3.14, :pie, \"Apple\", \"Cherry\"]\n"
]
],
[
[
"上と下のコードセルでリストの連結を行っているが、++/2 演算子を用いている。 この `++/2` という表記は `++` が演算子自体で `/2` がアリティ (引数の数) を表す。 \n\n",
"_____no_output_____"
],
[
"---\n質問 $\\quad$ アリティとはなにか。\n\n---\n質問 $\\quad$ リストの連結に `++` で文字列の連結 `<>` なのはなぜか。 オーバーライディングはあるのか。 文字列 string はリストではないのか。 長さを測る関数も別々なのか。",
"_____no_output_____"
]
],
[
[
"# リストの連結\n!elixir -e 'IO.inspect [1, 2] ++ [3, 4, 1]'",
"[1, 2, 3, 4, 1]\n"
],
[
"# リストの減算 \n# --/2 演算子は存在しない値を引いてしまってもオッケー\n!elixir -e 'IO.inspect [\"foo\", :bar, 42] -- [42, \"bar\"]'\n# 重複した値の場合、右辺の要素のそれぞれに対し、左辺の要素のうち初めて登場した同じ値が順次削除\n!elixir -e 'IO.inspect [1,2,2,3,2,3] -- [1,2,3,2]'\n# リストの減算の値のマッチには strict comparison が使われている\n!elixir -e 'IO.inspect [2] -- [2.0]'\n!elixir -e 'IO.inspect [2.0] -- [2.0]'",
"[\"foo\", :bar]\n[2, 3]\n[2]\n[]\n"
],
[
"# head /tail\n!elixir -e 'IO.inspect hd [3.14, :pie, \"Apple\"]'\n!elixir -e 'IO.inspect tl [3.14, :pie, \"Apple\"]'",
"3.14\n[:pie, \"Apple\"]\n"
]
],
[
[
"---\nリストを頭部と尾部に分けるのに\n\n* パターンマッチング\n* cons 演算子( `|` )\n\nを使うこともできる。",
"_____no_output_____"
]
],
[
[
"!elixir -e '[head | tail] = [3.14, :pie, \"Apple\"]; IO.inspect head; IO.inspect tail'",
"3.14\n[:pie, \"Apple\"]\n"
]
],
[
[
"## キーワードリスト\n\nキーワードリストとマップは elixir の連想配列である。\n\nキーワードリストは最初の要素がアトムのタプルからなる特別なリストで、リストと同様の性能になる。",
"_____no_output_____"
]
],
[
[
"# キーワードリスト\n!elixir -e 'IO.inspect [foo: \"bar\", hello: \"world\"]'\n# タプルのリストとしても同じ\n!elixir -e 'IO.inspect [{:foo, \"bar\"}, {:hello, \"world\"}]'\n!elixir -e 'IO.inspect [foo: \"bar\", hello: \"world\"] == [{:foo, \"bar\"}, {:hello, \"world\"}]'",
"[foo: \"bar\", hello: \"world\"]\n[foo: \"bar\", hello: \"world\"]\ntrue\n"
]
],
[
[
"キーワードリストの 3 つの特徴\n\n* キーはアトムである。\n* キーは順序付けされている。\n* キーの一意性は保証されない。\n\nこうした理由から、キーワードリストは関数にオプションを渡すためによく用いられる。\n",
"_____no_output_____"
]
],
[
[
"# 実験 リストの角括弧は省略できる\n!elixir -e 'IO.inspect foo: \"bar\", hello: \"world\"'",
"[foo: \"bar\", hello: \"world\"]\n"
],
[
"# 実験\n!elixir -e 'IO.inspect [1, fred: 1, dave: 2]'\n!elixir -e 'IO.inspect {1, fred: 1, dave: 2}'\n!elixir -e 'IO.inspect {1, [{:fred,1},{:dave, 2}]}'",
"[1, {:fred, 1}, {:dave, 2}]\n{1, [fred: 1, dave: 2]}\n{1, [fred: 1, dave: 2]}\n"
]
],
[
[
"## マップ\n\n* キーワードリストとは違ってどんな型のキーも使える。\n* 順序付けされない。\n* キーの一意性が保証されている。重複したキーが追加された場合は、前の値が置き換えられる。\n* 変数をマップのキーにできる。\n* `%{}` 構文で定義する。\n\n",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect %{:foo => \"bar\", \"hello\" => :world}'\n!elixir -e 'map = %{:foo => \"bar\", \"hello\" => :world}; IO.inspect map[:foo]'\n!elixir -e 'map = %{:foo => \"bar\", \"hello\" => :world}; IO.inspect map[\"hello\"]'\n!echo\n!elixir -e 'key = \"hello\"; IO.inspect %{key => \"world\"}'\n!echo\n!elixir -e 'IO.inspect %{:foo => \"bar\", :foo => \"hello world\"}'",
"%{:foo => \"bar\", \"hello\" => :world}\n\"bar\"\n:world\n\n%{\"hello\" => \"world\"}\n\n\u001b[33mwarning: \u001b[0mkey :foo will be overridden in map\n nofile:1\n\n%{foo: \"hello world\"}\n"
]
],
[
[
"アトムのキーだけを含んだマップには特別な構文がある。",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.inspect %{foo: \"bar\", hello: \"world\"} == %{:foo => \"bar\", :hello => \"world\"}'",
"true\n"
],
[
"# 加えて、アトムのキーにアクセスするための特別な構文がある。\n!elixir -e 'map = %{foo: \"bar\", hello: \"world\"}; IO.inspect map.hello'\n!elixir -e 'map = %{foo: \"bar\", hello: \"world\"}; IO.inspect map[:hello]'\n!elixir -e 'map = %{:foo => \"bar\", :hello => \"world\"}; IO.inspect map[:hello]'",
"\"world\"\n\"world\"\n\"world\"\n"
]
],
[
[
"---\n質問 map の特別な構文\n\n1. `=>` の代わりにコロン `:` を使う\n2. 要素を取り出すのに `[]` の代わりにピリオド `.` を使う\n\nは不要ではないか。不要だが見かけが良くなる、ということか。普通はどっちを使うのか。無駄に構文を複雑にするだけのような気がする。\n\n多分まず Python の dict でコロン `:` を使うこと、Ruby は `=>` を使うが糖衣構文としてコロン `:` が使えてその形が主流であることから、見かけ大切ということでこうなったのではないか。キーにアトムを使うことが前提ならば生産性が上がるかもしれない。キーであることを示すコロンが不要になる。fat arrow よりコロンの方が短い。map の定義が同時に行われる。要素の取り出しピリオドを使う点についても同様。ということは基本的にこの構文になる、と言う事だろう。",
"_____no_output_____"
]
],
[
[
"# マップの更新のための構文がある (新しい map が作成される)\n# この構文は、マップに既に存在するキーを更新する場合にのみ機能する\n!elixir -e 'map = %{foo: \"bar\", hello: \"world\"}; IO.inspect %{map | foo: \"baz\"}'",
"%{foo: \"baz\", hello: \"world\"}\n"
],
[
"# 新しいキーを作成するには、`Map.put/3` を使用\n!elixir -e 'map = %{hello: \"world\"}; IO.inspect Map.put(map, :foo, \"baz\")'",
"%{foo: \"baz\", hello: \"world\"}\n"
]
],
[
[
"---\n質問 binary については良くわからないので別途。",
"_____no_output_____"
],
[
"# バイナリ binary",
"_____no_output_____"
]
],
[
[
"# binaries \n!elixir -e 'IO.inspect <<1,2>>'\n!elixir -e 'IO.inspect <<1,10>>'\n!elixir -e 'bin = <<1,10>>; IO.inspect byte_size bin'\n!elixir -e 'bin = <<3::size(2),5::size(4),1::size(2)>>; IO.inspect bin'\n!elixir -e 'IO.puts Integer.to_string(213,2)'\n!elixir -e 'IO.puts 0b11'\n!elixir -e 'IO.puts 0b0101'\n!echo\n!elixir -e 'bin = <<3::size(2),5::size(4),1::size(2)>>; IO.inspect byte_size bin'\n!elixir -e 'bin = <<3::size(2),5::size(4),1::size(2)>>; IO.inspect :io.format(\"~-8.2b~n\",:binary.bin_to_list(bin))'",
"<<1, 2>>\n<<1, 10>>\n2\n<<213>>\n11010101\n3\n5\n\n1\n11010101\n:ok\n"
],
[
"!elixir -e 'IO.inspect <<1,2>> <> <<3>>'",
"<<1, 2, 3>>\n"
]
],
[
[
"----\n** Date and Time 日付 **",
"_____no_output_____"
]
],
[
[
"# Date and Time\n!elixir -e 'IO.inspect Date.new(2021,6,2)'",
"{:ok, ~D[2021-06-02]}\n"
],
[
"!elixir -e '{:ok, d1}=Date.new(2021,6,2); IO.inspect d1'\n!elixir -e '{:ok, d1}=Date.new(2021,6,2); IO.inspect Date.day_of_week(d1)'\n!elixir -e '{:ok, d1}=Date.new(2021,6,2); IO.inspect Date.add(d1,7)'\n!elixir -e '{:ok, d1}=Date.new(2021,6,2); IO.inspect d1, structs: false'",
"~D[2021-06-02]\n3\n~D[2021-06-09]\n%{__struct__: Date, calendar: Calendar.ISO, day: 2, month: 6, year: 2021}\n"
]
],
[
[
"`~D[...]` や `~T[...]` は elixir の シギル sigil である。 文字列とバイナリーのところで説明する。",
"_____no_output_____"
],
[
"# help について\nメモ $\\quad$ 関数の調べ方\n\nHelper の使い方。 help, type, info, information とか。\n\n下のコードセルにあるように、対象のモジュールの関数名を調べ、そのヘルプを見ればけっこうくわしくわかる。\n\nコメントアウトしてあるのは出力が大きいので、とりあえずコメントアウトして出力を抑制してある。\n\n具体的には、Enum にあたるところにモジュール名を入れて関数のリストを出す。 Ctrl+A Ctrl+C でコピーして vscode などでペーストして読む。 調べたい関数名をヘルプの、Enum.all?/1 のところに入れて出力をコピーして、vscode などでペーストして読む\n",
"_____no_output_____"
]
],
[
[
"# !elixir -e 'Enum.__info__(:functions) |> Enum.each(fn({function, arity}) -> IO.puts \"#{function}/#{arity}\" end)'",
"_____no_output_____"
],
[
"# !elixir -e 'require IEx.Helpers;IEx.Helpers.h Enum.all?/1'",
"_____no_output_____"
],
[
"# h 単独のドキュメントを見たい\n# !elixir -e 'require IEx.Helpers;IEx.Helpers.h'",
"_____no_output_____"
],
[
"# i というのもある\n# !elixir -e 'x = [3,2]; require IEx.Helpers;IEx.Helpers.i x'",
"_____no_output_____"
],
[
"# !elixir -e 'require IEx.Helpers;IEx.Helpers.h IO'",
"_____no_output_____"
]
],
[
[
"# Enum モジュール\n\nEnum はリストなどコレクションを列挙するための一連のアルゴリズム。\n\n* all?、any?\n* chunk_every、chunk_by、map_every\n* each\n* map、filter、reduce\n* min、max\n* sort、uniq、uniq_by\n* キャプチャ演算子 `(&)`\n",
"_____no_output_____"
]
],
[
[
"# all? 関数を引数で受け取り、リストの全体が true の時、true を返す\n!elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], fn(s) -> String.length(s) == 3 end)'\n!elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], fn(s) -> String.length(s) >1 end)'",
"false\ntrue\n"
],
[
"# any? 少なくとも1つの要素が true と評価された場合に true を返す\n!elixir -e 'IO.puts Enum.any?([\"foo\", \"bar\", \"hello\"], fn(s) -> String.length(s) == 5 end)'",
"true\n"
],
[
"# chunk_every リストを小さなグループに分割する\n!elixir -e 'IO.inspect Enum.chunk([1, 2, 3, 4, 5, 6], 2)'\n!elixir -e 'IO.inspect Enum.chunk([1, 2, 3, 4, 5, 6], 3)'\n!elixir -e 'IO.inspect Enum.chunk([1, 2, 3, 4, 5, 6], 4)'",
"[[1, 2], [3, 4], [5, 6]]\n[[1, 2, 3], [4, 5, 6]]\n[[1, 2, 3, 4]]\n"
],
[
"# chunk_by 関数の戻り値が変化することによって分割する\n!elixir -e 'IO.inspect Enum.chunk_by([\"one\", \"two\", \"three\", \"four\", \"five\"], fn(x) -> String.length(x) end)'\n!elixir -e 'IO.inspect Enum.chunk_by([\"one\", \"two\", \"three\", \"four\", \"five\", \"six\"], fn(x) -> String.length(x) end)'",
"[[\"one\", \"two\"], [\"three\"], [\"four\", \"five\"]]\n[[\"one\", \"two\"], [\"three\"], [\"four\", \"five\"], [\"six\"]]\n"
],
[
"# map_every nth ごとに map 処理する\n!elixir -e 'IO.inspect Enum.map_every(1..10, 3, fn x -> x + 1000 end)'\n!elixir -e 'IO.inspect Enum.map_every(1..10, 1, fn x -> x + 1000 end)'\n!elixir -e 'IO.inspect Enum.map_every(1..10, 0, fn x -> x + 1000 end)'",
"[1001, 2, 3, 1004, 5, 6, 1007, 8, 9, 1010]\n[1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010]\n[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n"
],
[
"# each 新しい値を生成することなく反復する。返り値は:ok というアトム。\n!elixir -e 'IO.inspect Enum.each([\"one\", \"two\", \"three\"], fn(s) -> IO.puts(s) end)'\n!elixir -e 'IO.puts Enum.each([\"one\", \"two\", \"three\"], fn(s) -> IO.puts(s) end)'",
"one\ntwo\nthree\n:ok\n"
],
[
"# map 関数を各要素に適用して新しいリストを生み出す\n!elixir -e 'IO.inspect Enum.map([0, 1, 2, 3], fn(x) -> x - 1 end)'",
"[-1, 0, 1, 2]\n"
],
[
"# min 最小の値を探す。 リストが空の場合エラーになる\n# リストが空だったときのために予め最小値を生成する関数を渡すことができる\n!elixir -e 'IO.inspect Enum.min([5, 3, 0, -1])'\n!elixir -e 'IO.inspect Enum.min([], fn -> :foo end)'",
"-1\n:foo\n"
],
[
"# max 最大の(max/1)値を返す\n!elixir -e 'IO.inspect Enum.max([5, 3, 0, -1])'\n!elixir -e 'IO.inspect Enum.max([], fn -> :bar end)'",
"5\n:bar\n"
],
[
"# filter 与えられた関数によって true と評価された要素だけを得る\n!elixir -e 'IO.inspect Enum.filter([1, 2, 3, 4], fn(x) -> rem(x, 2) == 0 end)'\n!elixir -e 'IO.inspect Enum.filter([], fn(x) -> rem(x, 2) == 0 end)'",
"[2, 4]\n[]\n"
],
[
"# reduce リストを関数に従って単一の値へ抽出する。 accumulator を指定できる。\n# accumulator が与えられない場合は最初の要素が用いられる。\n!elixir -e 'IO.inspect Enum.reduce([1, 2, 3], 10, fn(x, acc) -> x + acc end)'\n!elixir -e 'IO.inspect Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end)'\n!elixir -e 'IO.inspect Enum.reduce([\"a\",\"b\",\"c\"], \"1\", fn(x,acc)-> x <> acc end)'",
"16\n6\n\"cba1\"\n"
],
[
"# sort `sort/1` はソートの順序に Erlangの Term 優先順位 を使う\n!elixir -e 'IO.inspect Enum.sort([5, 6, 1, 3, -1, 4])'\n!elixir -e 'IO.inspect Enum.sort([:foo, \"bar\", Enum, -1, 4])'\n\n# `sort/2` は、順序を決める為の関数を渡すことができる\n!elixir -e 'IO.inspect Enum.sort([%{:val => 4}, %{:val => 1}], fn(x, y) -> x[:val] > y[:val] end)'\n\n# なしの場合\n!elixir -e 'IO.inspect Enum.sort([%{:count => 4}, %{:count => 1}])'\n\n# sort/2 に :asc または :desc をソート関数として渡すことができる\n!elixir -e 'IO.inspect Enum.sort([2, 3, 1], :desc)'\n",
"[-1, 1, 3, 4, 5, 6]\n[-1, 4, Enum, :foo, \"bar\"]\n[%{val: 4}, %{val: 1}]\n[%{count: 1}, %{count: 4}]\n[3, 2, 1]\n"
],
[
"# uniq 重複した要素を取り除く\n!elixir -e 'IO.inspect Enum.uniq([1, 2, 3, 2, 1, 1, 1, 1, 1])'\n[1, 2, 3]\n\n# uniq_by 重複した要素を削除するが、ユニークかどうか比較を行う関数を渡せる\n!elixir -e 'IO.inspect Enum.uniq_by([%{x: 1, y: 1}, %{x: 2, y: 1}, %{x: 3, y: 3}], fn coord -> coord.y end)'",
"[1, 2, 3]\n[%{x: 1, y: 1}, %{x: 3, y: 3}]\n"
]
],
[
[
"# キャプチャ演算子 `&` を使用した Enum と無名関数 \n\nelixir の Enum モジュール内の多くの関数は、引数として無名関数を取る。\n\nこれらの無名関数は、多くの場合、キャプチャ演算子 `&` を使用して省略形で記述される。\n\n",
"_____no_output_____"
]
],
[
[
"# 無名関数でのキャプチャ演算子の使用 \n!elixir -e 'IO.inspect Enum.map([1,2,3], fn number -> number + 3 end)'\n!elixir -e 'IO.inspect Enum.map([1,2,3], &(&1 + 3))'\n!elixir -e 'plus_three = &(&1 + 3);IO.inspect Enum.map([1,2,3], plus_three)'",
"[4, 5, 6]\n[4, 5, 6]\n[4, 5, 6]\n"
],
[
"# Enum.all? でもキャプチャ演算子が使えるか\n# all? 関数を引数で受け取り、リストの全体が true の時、true を返す\n# !elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], fn(s) -> String.length(s) == 3 end)'\n!elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], &(String.length(&1)==3))'\n# !elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], fn(s) -> String.length(s) >1 end)'\n!elixir -e 'IO.puts Enum.all?([\"foo\", \"bar\", \"hello\"], &(String.length(&1)>1))'",
"false\ntrue\n"
]
],
[
[
"---\n# パターンマッチング\n\nパターンマッチングでは、値、データ構造、関数をマッチすることができる。\n\n* マッチ演算子\n* ピン演算子",
"_____no_output_____"
]
],
[
[
"# マッチ演算子 `=` はマッチ演算子である。 マッチ演算子を通して値を代入し、\n# その後、マッチさせることができる。マッチすると、方程式の結果が返され、\n# 失敗すると、エラーになる\n!elixir -e 'IO.puts x = 1'\n!elixir -e 'x = 1;IO.puts 1 = x'\n# !elixir -e 'x = 1;IO.puts 2 = x'\n#=> (MatchError) no match of right hand side value: 1",
"1\n1\n"
],
[
"# リストでのマッチ演算子\n!elixir -e 'IO.inspect list = [1, 2, 3]'\n!elixir -e 'list = [1, 2, 3]; IO.inspect [1, 2, 3] = list'\n# !elixir -e 'list = [1, 2, 3]; IO.inspect [] = list'\n#=> (MatchError) no match of right hand side value: [1, 2, 3]",
"[1, 2, 3]\n[1, 2, 3]\n"
],
[
"!elixir -e 'list = [1, 2, 3]; IO.inspect [1 | tail] = list'\n!elixir -e 'list = [1, 2, 3]; [1 | tail] = list; IO.inspect tail'",
"[1, 2, 3]\n[2, 3]\n"
],
[
"# タプルとマッチ演算子\n!elixir -e 'IO.inspect {:ok, value} = {:ok, \"Successful!\"}'\n!elixir -e '{:ok, value} = {:ok, \"Successful!\"}; IO.inspect value'",
"{:ok, \"Successful!\"}\n\"Successful!\"\n"
]
],
[
[
"---\n**ピン演算子**\n\nマッチ演算子は左辺に変数が含まれている時に代入操作を行う。 \n\nこの変数を再び束縛するという挙動は望ましくない場合がある。 そうした状況のために、ピン演算子 `^` がある。\n\nピン演算子で変数を固定すると、新しく再束縛するのではなく既存の値とマッチする。\n",
"_____no_output_____"
]
],
[
[
"# ピン演算子\n!elixir -e 'IO.inspect x = 1'\n# !elixir -e 'x = 1; IO.inspect ^x = 2'\n#=> ** (MatchError) no match of right hand side value: 2\n!elixir -e 'x = 1; IO.inspect {x, ^x} = {2, 1}'\n!elixir -e 'x = 1;{x, ^x} = {2, 1}; IO.inspect x'\n!echo\n!elixir -e 'IO.inspect key = \"hello\"'\n!elixir -e 'key = \"hello\"; IO.inspect %{^key => value} = %{\"hello\" => \"world\"}'\n!elixir -e 'key = \"hello\"; %{^key => value} = %{\"hello\" => \"world\"}; IO.inspect value'\n!elixir -e 'key = \"hello\"; %{^key => value} = %{\"hello\" => \"world\"}; IO.inspect value'",
"1\n{2, 1}\n2\n\n\"hello\"\n%{\"hello\" => \"world\"}\n\"world\"\n\"world\"\n"
],
[
"# 関数の clause でのピン演算子\n!elixir -e 'IO.inspect greeting = \"Hello\"'\n!elixir -e 'greeting = \"Hello\"; IO.inspect greet = fn (^greeting, name) -> \"Hi #{name}\"; (greeting, name) -> \"#{greeting},#{name}\" end'\n!elixir -e 'greeting = \"Hello\"; greet = fn (^greeting, name) -> \"Hi #{name}\"; (greeting, name) -> \"#{greeting},#{name}\" end; IO.inspect greet.(\"Hello\",\"Sean\")'\n!elixir -e 'greeting = \"Hello\"; greet = fn (^greeting, name) -> \"Hi #{name}\"; (greeting, name) -> \"#{greeting},#{name}\" end; IO.inspect greet.(\"Mornin\",\"Sean\")'",
"\"Hello\"\n#Function<43.65746770/2 in :erl_eval.expr/5>\n\"Hi Sean\"\n\"Mornin,Sean\"\n"
]
],
[
[
"# 制御構造 control structure\n\n* if と unless\n* case\n* cond\n* with",
"_____no_output_____"
],
[
"if と unless \n\nelixir の if と unless は ruby と同じ。\n\nelixir は if と unless はマクロとして定義されている。\n\nこの実装は kernel module で知ることができる。\n\nelixir では偽とみなされる値は nil と真理値の false だけだということに留意。",
"_____no_output_____"
]
],
[
[
"%%writefile temp.exs\nIO.puts (\n if String.valid?(\"Hello\") do\n \"Valid string!\"\n else\n \"Invalid string.\"\n end)",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Valid string!\n"
],
[
"%%writefile temp.exs\nif \"a string value\" do\n IO.puts \"Truthy\"\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Truthy\n"
],
[
"# unless/2 は if/2 の逆で、条件が否定される時だけ作用する\n%%writefile temp.exs\nunless is_integer(\"hello\") do\n IO.puts \"Not an Int\"\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Not an Int\n"
],
[
"# 実験 シェルワンライナー版 do や end の前後にセミコロンは要らない\n!elixir -e 'unless is_integer(\"hello\") do IO.puts \"Not an Int\" end'",
"Not an Int\n"
],
[
"# 複数のパターンにマッチする場合、case/2 を使う\n%%writefile temp.exs\nIO.puts(\n case {:error, \"Hello World\"} do\n {:ok, result} -> result\n {:error, _} -> \"Uh oh!\"\n _ -> \"Catch all\"\n end \n)",
"_____no_output_____"
],
[
"!elixir temp.exs",
"Uh oh!\n"
],
[
"# アンダースコア _ 変数は case/2 命令文の中に含まれる重要な要素\n# これが無いと、マッチするものが見あたらない場合にエラーが発生する\n# エラーの例\n!elixir -e 'case :even do :odd -> IO.puts \"Odd\" end'",
"** (CaseClauseError) no case clause matching: :even\n (stdlib 3.17) erl_eval.erl:973: :erl_eval.case_clauses/6\n (elixir 1.13.0) lib/code.ex:404: Code.validated_eval_string/3\n"
],
[
"# アンダースコア _ を\"他の全て\"にマッチする else と考えること\n!elixir -e 'case :even do :odd -> IO.puts \"Odd\"; _ -> IO.puts \"Not odd\" end'",
"Not Odd\n"
],
[
"# case/2 はパターンマッチングに依存しているため、パターンマッチングと同じルールや制限が全て適用される\n# 既存の変数に対してマッチさせようという場合にはピン ^ 演算子を使う\n!elixir -e 'pie=3.14; IO.puts(case \"cherry pie\" do ^pie -> \"Not so tasty\"; pie -> \"I bet #{pie} is tasty\" end)'\n!elixir -e 'pie=3.14; IO.puts(case \"cherry pie\" do pie -> \"Not so tasty\"; pie -> \"I bet #{pie} is tasty\" end)'",
"I bet cherry pie is tasty\n\u001b[33mwarning: \u001b[0mvariable \"pie\" is unused (there is a variable with the same name in the context, use the pin operator (^) to match on it or prefix this variable with underscore if it is not meant to be used)\n nofile:1\n\nNot so tasty\n"
],
[
"# case/2 はガード節に対応している\n# 公式ドキュメントの Expressions allowed in guard clauses を参照\n!elixir -e 'IO.puts(case {1, 2, 3} do {1, x, 3} when x > 0 -> \"Will match\"; _ -> \"Wont match\" end)'",
"Will match\n"
]
],
[
[
"---\nガード節とは何か?\n\n公式ドキュメントの Expressions allowed in guard clauses を参照",
"_____no_output_____"
]
],
[
[
"# cond\n!elixir -e 'IO.puts (cond do 2+2==5 -> \"This will not be true\"; 2*2==3 -> \"Nor this\"; 1+1 == 2 -> \"But this will\" end)'",
"But this will\n"
],
[
"# cond も case と同様マッチしない場合にエラーになるので、true になる条件を定義する\n!elixir -e 'IO.puts (cond do 7+1==0 -> \"Incorrect\"; true -> \"Catch all\" end)'",
"Catch all\n"
],
[
"# with\n# 特殊形式の with/1 はネストされた case/2 文やきれいにパイプできない状況に便利\n# with/1 式はキーワード, ジェネレータ, そして式から成り立っている\n# ジェネレータについてはリスト内包表記のところで詳しく述べる\n# `<-` の右側と左側を比べるのにパターンマッチングが使われる\n!elixir -e 'user=%{first: \"Sean\", last: \"Callan\"}; IO.inspect user'\n!elixir -e 'user=%{first: \"Sean\", last: \"Callan\"}; with {:ok, first} <- Map.fetch(user, :first), {:ok, last} <- Map.fetch(user, :last), do: IO.puts last <> \", \" <> first'",
"%{first: \"Sean\", last: \"Callan\"}\nCallan, Sean\n"
],
[
"# シェルワンライナーが長いのでファイルにする\n%%writefile temp.exs\nuser=%{first: \"Sean\", last: \"Callan\"}\nwith {:ok, first} <- Map.fetch(user, :first), \n {:ok, last} <- Map.fetch(user, :last), \n do: IO.puts last <> \", \" <> first",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"_____no_output_____"
],
[
"# 式がマッチに失敗した場合\n# Map.fetch が失敗して :error を返し、first が設定されずプログラムが止まる\n%%writefile temp.exs\nuser = %{first: \"doomspork\"}\nwith {:ok, first} <- Map.fetch(user, :first), \n {:ok, last} <- Map.fetch(user, :last), \n do: IO.puts last <> \", \" <> first",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"_____no_output_____"
],
[
"# with/1 で else が使える\n%%writefile temp.exs\nimport Integer\nm = %{a: 1, c: 3}\na =\n with {:ok, number} <- Map.fetch(m, :a),\n true <- is_even(number) do\n IO.puts \"#{number} divided by 2 is #{div(number, 2)}\"\n :even\n else\n :error ->\n IO.puts(\"We don't have this item in map\")\n :error\n _ ->\n IO.puts(\"It is odd\")\n :odd\n end\nIO.inspect a",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"We don't have this item in map\n:error\n"
]
],
[
[
"# 関数 Function",
"_____no_output_____"
]
],
[
[
"# 関数型言語では、関数は第一級オブジェクト first class object である\n# ここでは無名関数、名前付き関数、アリティ、パターンマッチング、プライベート関数、ガード、デフォルト引数について学ぶ\n\n# 無名関数 anonymous function\n# fn end のキーワードを用い、 引数 `->` 関数定義 の形で定義する\n\n%%writefile temp.exs\nsum = fn (a, b) -> a + b end\nIO.puts sum.(2, 3)",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"5\n"
],
[
"# シェルワンライナーで書いてみる\n!elixir -e 'sum=fn(a,b)->a+b end;IO.puts sum.(2,3)'",
"5\n"
],
[
"# elixir では通常関数定義に省略記号 & を使う (キャプチャ演算子)\n!elixir -e 'sum = &(&1 + &2); IO.puts sum.(2, 3)'",
"5\n"
]
],
[
[
"---\n質問 無名関数に引数を渡して結果を得るのはどうやるのか\n\n&(&1 + &2).(2, 3) として出来なかった。 => 出来た。\n\n!elixir -e 'IO.puts ((&(&1 + &2)).(2,3))'",
"_____no_output_____"
]
],
[
[
"!elixir -e 'IO.puts ((fn (a,b) -> a + b end).(2,3))'",
"5\n"
],
[
"!elixir -e 'IO.puts ((&(&1 + &2)).(2,3))'",
"5\n"
],
[
"# 関数定義にパターンマッチングが使える\n%%writefile temp.exs\nhandle_result = fn\n {:ok, _result} -> IO.puts \"Handling result...\"\n {:ok, _} -> IO.puts \"This would be never run as previous will be matched beforehand.\"\n {:error} -> IO.puts \"An error has occurred!\"\nend\n\nsome_result = 1\n\nhandle_result.({:ok, some_result}) #=> Handling result...\nhandle_result.({:error}) #=> An error has occured!",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Handling result...\nAn error has occurred!\n"
],
[
"# 名前付き関数\n# 名前付き関数はモジュール内部で def キーワードを用いて定義する\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(name) do\n \"Hello, \" <> name\n end\nend\n\nIO.puts Greeter.hello(\"Sean\")",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean\n"
],
[
"# 次のような書き方もできる do: を使う\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(name), do: \"Hello, \" <> name\nend\nIO.puts Greeter.hello(\"Sean\")",
"Writing temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean\n"
],
[
"# 実験 シェルワンライナーで出来るか\n!elixir -e 'defmodule Greeter do def hello(name) do \"Hello, \" <> name end end;IO.puts Greeter.hello(\"Sean\")'",
"Hello, Sean\n"
],
[
"# 実験 シェルワンライナーで `, do:` 構文が使えるか\n!elixir -e 'defmodule Greeter do def hello(name),do: \"Hello, \" <> name end;IO.puts Greeter.hello(\"Sean\")'",
"Hello, Sean\n"
],
[
"# 再帰\n%%writefile temp.exs\ndefmodule Length do\n def of([]), do: 0\n def of([_ | tail]), do: 1 + of(tail)\nend\n\nIO.puts Length.of []\nIO.puts Length.of [1, 2, 3]",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"0\n3\n"
],
[
"# アリティとは関数の引数の数\n# 引数の数が違えば別の関数\n%%writefile temp.exs\ndefmodule Greeter2 do\n def hello(), do: \"Hello, anonymous person!\" # hello/0\n def hello(name), do: \"Hello, \" <> name # hello/1\n def hello(name1, name2), do: \"Hello, #{name1} and #{name2}\" # hello/2\nend\n\nIO.puts Greeter2.hello()\nIO.puts Greeter2.hello(\"Fred\")\nIO.puts Greeter2.hello(\"Fred\", \"Jane\")",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, anonymous person!\nHello, Fred\nHello, Fred and Jane\n"
],
[
"# 関数とパターンマッチング\n%%writefile temp.exs\ndefmodule Greeter1 do\n def hello(%{name: person_name}) do\n IO.puts \"Hello, \" <> person_name\n end\nend\n\nfred = %{\nname: \"Fred\",\nage: \"95\",\nfavorite_color: \"Taupe\"\n}\n\nIO.puts Greeter1.hello(fred) #=> Hello, fred になる\n#IO.puts Greeter1.hello(%{age: \"95\", favorite_color: \"Taupe\"}) #=> (FunctionClauseError) no function clause matching in Greeter1.hello/1 ",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Fred\nok\n"
],
[
"# Fredの名前を person_name にアサインしたいが、人物マップ全体の値も保持したいという場合\n# マップを引数にすれば、別々の変数に格納することができる\n%%writefile temp.exs\ndefmodule Greeter2 do\n def hello(%{name: person_name} = person) do\n IO.puts \"Hello, \" <> person_name\n IO.inspect person\n end\nend\n\nfred = %{\n name: \"Fred\",\n age: \"95\",\n favorite_color: \"Taupe\"\n}\n\nGreeter2.hello(fred)\nIO.puts(\"\")\nGreeter2.hello(%{name: \"Fred\"})\nIO.puts(\"\")\n# Greeter2.hello(%{age: \"95\", favorite_color: \"Taupe\"}) #=> (FunctionClauseError) no function clause matching in Greeter2.hello/1 ",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Fred\n%{age: \"95\", favorite_color: \"Taupe\", name: \"Fred\"}\n\nHello, Fred\n%{name: \"Fred\"}\n\n"
]
],
[
[
"",
"_____no_output_____"
]
],
[
[
"# %{name: person_name} と person の順序を入れ替えても、それぞれがfredとマッチングするので同じ結果となる\n# 変数とマップを入れ替えてみる\n# それぞれがパターンマッチしているので結果は同じになる\n%%writefile temp.exs\ndefmodule Greeter3 do\n def hello(person = %{name: person_name}) do\n IO.puts \"Hello, \" <> person_name\n IO.inspect person\n end\nend\n\nfred = %{\n name: \"Fred\",\n age: \"95\",\n favorite_color: \"Taupe\"\n}\n\nGreeter3.hello(fred)\nIO.puts(\"\")\nGreeter3.hello(%{name: \"Fred\"})",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Fred\n%{age: \"95\", favorite_color: \"Taupe\", name: \"Fred\"}\n\nHello, Fred\n%{name: \"Fred\"}\n"
],
[
"# プライベート関数\n# プライベート関数は defp を用いて定義する\n# そのモジュール自身の内部からのみ呼び出すことが出来る\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(name), do: phrase() <> name\n defp phrase, do: \"Hello, \"\nend\n\nIO.puts Greeter.hello(\"Sean\") #=> \"Hello, Sean\"\n\n# IO.puts Greeter.phrase #=> (UndefinedFunctionError) function Greeter.phrase/0 is undefined or private",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean\n"
],
[
"# ガード\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(names) when is_list(names) do\n names\n |> Enum.join(\", \")\n |> hello\n end\n\n def hello(name) when is_binary(name) do\n phrase() <> name\n end\n\n defp phrase, do: \"Hello, \"\nend\n\nIO.puts Greeter.hello [\"Sean\", \"Steve\"]\nIO.puts Greeter.hello \"Bill\"",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean, Steve\nHello, Bill\n"
]
],
[
[
"---\n質問 Elixir のガードは Haskell のガードと同じか?",
"_____no_output_____"
]
],
[
[
"# デフォルト引数\n# デフォルト値が欲しい場合、引数 \\\\ デフォルト値の記法を用いる\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(name, language_code \\\\ \"en\") do\n phrase(language_code) <> name\n end\n\n defp phrase(\"en\"), do: \"Hello, \"\n defp phrase(\"es\"), do: \"Hola, \"\nend\n\nIO.puts Greeter.hello(\"Sean\", \"en\")\nIO.puts Greeter.hello(\"Sean\")\nIO.puts Greeter.hello(\"Sean\", \"es\")",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean\nHello, Sean\nHola, Sean\n"
],
[
"# ガードとデフォルト引数を組み合わせる場合\n# 混乱を避けるためデフォルト引数を処理する定義を先に置く\n%%writefile temp.exs\ndefmodule Greeter do\n def hello(names, language_code \\\\ \"en\")\n\n def hello(names, language_code) when is_list(names) do\n names\n |> Enum.join(\", \")\n |> hello(language_code)\n end\n\n def hello(name, language_code) when is_binary(name) do\n phrase(language_code) <> name\n end\n\n defp phrase(\"en\"), do: \"Hello, \"\n defp phrase(\"es\"), do: \"Hola, \"\nend\n\nIO.puts Greeter.hello [\"Sean\", \"Steve\"] #=> \"Hello, Sean, Steve\"\n\nIO.puts Greeter.hello [\"Sean\", \"Steve\"], \"es\" #=> \"Hola, Sean, Steve\"\n\nIO.puts Greeter.hello \"Bob\", \"es\"",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello, Sean, Steve\nHola, Sean, Steve\nHola, Bob\n"
],
[
"# パイプライン演算子\n# パイプライン演算子 `|>` はある式の結果を別の式に渡す\n# 関数のネストを理解しやすくするためのもの\n\n# 文字列をトークン化する、単語に分ける\n!elixir -e 'IO.inspect \"Elixir rocks\" |> String.split()'",
"[\"Elixir\", \"rocks\"]\n"
],
[
"!elixir -e 'IO.inspect \"Elixir rocks\" |> String.upcase() |> String.split()'",
"[\"ELIXIR\", \"ROCKS\"]\n"
],
[
"# パイプラインを使う場合に関数の括弧は省略せずには入れた方がわかりやすい\n!elixir -e 'IO.inspect \"elixir\" |> String.ends_with?(\"ixir\")'",
"true\n"
]
],
[
[
"# モジュール",
"_____no_output_____"
],
[
"---\n質問 いままで IO.puts とか一々モジュール名を付けていたが、elixir ではこれが普通なのか?\n\n関数を作る際に一々モジュールを作成していたがあれで既存のモジュールに付け加えられているのか?",
"_____no_output_____"
]
],
[
[
"# モジュールの基本的な例\n%%writefile temp.exs\ndefmodule Example do\n def greeting(name) do\n \"Hello #{name}.\"\n end\nend\n\nIO.puts Example.greeting \"Sean\"",
"Writing temp.exs\n"
],
[
"!elixir temp.exs",
"Hello Sean.\n"
],
[
"# モジュールはネストする事ができる\n%%writefile temp.exs\ndefmodule Example.Greetings do\n def morning(name) do\n \"Good morning #{name}.\"\n end\n\n def evening(name) do\n \"Good night #{name}.\"\n end\nend\n\nIO.puts Example.Greetings.morning \"Sean\"",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Good morning Sean.\n"
],
[
"# モジュールの属性\n# モジュール属性は Elixir では一般に定数として用いられる\n# Elixirには予約されている属性がある\n# moduledoc — 現在のモジュールにドキュメントを付ける\n# doc — 関数やマクロについてのドキュメント管理\n# behaviour — OTPまたはユーザが定義した振る舞い(ビヘイビア)に用いる\n%%writefile temp.exs\ndefmodule Example do\n @greeting \"Hello\"\n\n def greeting(name) do\n ~s(#{@greeting} #{name}.)\n end\nend\n\nIO.puts Example.greeting \"tak\"",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hello tak.\n"
],
[
"# 構造体 struct \n# 構造体は定義済みのキーの一群とデフォルト値を持つマップである\n# 定義するには defstruct を用いる\n%%writefile temp.exs\ndefmodule Example.User do\n defstruct name: \"Sean\", roles: []\nend\n\ndefmodule Main do\n IO.inspect %Example.User{}\n IO.inspect %Example.User{name: \"Steve\"}\n IO.inspect %Example.User{name: \"Steve\", roles: [:manager]}\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"%Example.User{name: \"Sean\", roles: []}\n%Example.User{name: \"Steve\", roles: []}\n%Example.User{name: \"Steve\", roles: [:manager]}\n"
],
[
"# 構造体の更新\n%%writefile temp.exs\ndefmodule Example.User do\n defstruct name: \"Sean\", roles: []\nend\n\ndefmodule Main do\n steve = %Example.User{name: \"Steve\"}\n IO.inspect %{steve | name: \"Sean\"}\n IO.inspect steve\nend",
"_____no_output_____"
],
[
"!elixir temp.exs",
"%Example.User{name: \"Sean\", roles: []}\n%Example.User{name: \"Steve\", roles: []}\n"
],
[
"# 構造体の更新とマッチング\n%%writefile temp.exs\ndefmodule Example.User do\n defstruct name: \"Sean\", roles: []\nend\n\ndefmodule Main do\n steve = %Example.User{name: \"Steve\"}\n sean = %{steve | name: \"Sean\"}\n IO.inspect %{name: \"Sean\"} = sean\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"%Example.User{name: \"Sean\", roles: []}\n"
],
[
"# inspect の出力を変える\n%%writefile temp.exs\ndefmodule Example.User do\n # @derive {Inspect, only: [:name]}\n @derive {Inspect, except: [:roles]}\n defstruct name: \"Sean\", roles: []\nend\n\ndefmodule Main do\n steve = %Example.User{name: \"Steve\"}\n sean = %{steve | name: \"Sean\"}\n IO.inspect %{name: \"Sean\"} = sean\nend",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"#Example.User<name: \"Sean\", ...>\n"
],
[
"# コンポジション(Composition)\n# コンポジションを用いてモジュールや構造体に既存の機能を追加する\n\n# alias モジュール名をエイリアスする\n\n%%writefile temp.exs\n\ndefmodule Sayings.Greetings do\n def basic(name), do: \"Hi, #{name}\"\nend\n\ndefmodule Example do\n alias Sayings.Greetings\n def greeting(name), do: Greetings.basic(name)\nend\n\nIO.puts Example.greeting \"Bob!!\"\n\n# aliasを使わない場合\n\n# defmodule Example do\n# def greeting(name), do: Sayings.Greetings.basic(name)\n# end",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hi, Bob!!\n"
],
[
"# 別名で alias したい時は `:as` を使う\n%%writefile temp.exs\ndefmodule Sayings.Greetings do\n def basic(name), do: \"Hi, #{name}\"\nend\n\ndefmodule Example do\n alias Sayings.Greetings, as: Hi\n def print_message(name), do: Hi.basic(name)\nend\n\nIO.puts Example.print_message \"Chris!!\"",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hi, Chris!!\n"
],
[
"# 複数のモジュールを一度に alias する\n\n# defmodule Example do\n# alias Sayings.{Greetings, Farewells}\n# end",
"_____no_output_____"
],
[
"# import\n# 関数を取り込みたいという場合には、 import を使う\n!elixir -e 'import List; IO.inspect last([1,2,3])'",
"3\n"
],
[
"# フィルタリング\n# import のデフォルトでは全ての関数とマクロが取り込まれるが、 :only や :except でフィルタすることができる\n# アリティを付ける必要がある\n%%writefile temp.exs\nimport List, only: [last: 1]\nIO.inspect last([1,2,3])\n# IO.inspect first([1,2,3]) #=> (CompileError) temp.exs:3: undefined function first/1 (there is no such import)",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"3\n"
],
[
"# import には :functions と :macros という2つの特別なアトムもありるこれらはそれぞれ関数とマクロのみを取り込む\n\n# import List, only: :functions\n# import List, only: :macros",
"_____no_output_____"
],
[
"# require と import の違いがわからない\n# まだロードされていないマクロを呼びだそうとすると、Elixirはエラーを発生させる\n# とのこと\n\n# defmodule Example do\n# require SuperMacros\n# \n# SuperMacros.do_stuff\n# end",
"_____no_output_____"
],
[
"# use\n# use マクロを用いることで他のモジュールを利用して現在のモジュールの定義を変更することができる\n# コード上で use を呼び出すと、実際には提供されたモジュールに定義されている \n# __using__/1 コールバックを呼び出している \n%%writefile temp.exs\ndefmodule Hello do\n defmacro __using__ _ do\n quote do\n def hello(name), do: \"Hi, #{name}\"\n end\n end\nend\n\ndefmodule Example do\n use Hello\nend\n\nIO.puts Example.hello(\"Sean\")",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hi, Sean\n"
],
[
"# greeting オプションを追加する\n%%writefile temp.exs\ndefmodule Hello do\n defmacro __using__(opts) do\n greeting = Keyword.get(opts, :greeting, \"Hi\")\n\n quote do\n def hello(name), do: unquote(greeting) <> \", \" <> name\n end\n end\nend\n\ndefmodule Example do\n use Hello, greeting: \"Hola\"\nend\n\nIO.puts Example.hello(\"Sean\")",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"Hola, Sean\n"
]
],
[
[
"# Mix",
"_____no_output_____"
]
],
[
[
"# mixとは Ruby の Bundler, RubyGems, Rake が組み合わさったようなもの\n# colab の環境でやってみる\n!mix new example\n#=> \n# * creating README.md\n# * creating .formatter.exs\n# * creating .gitignore\n# * creating mix.exs\n# * creating lib\n# * creating lib/example.ex\n# * creating test\n# * creating test/test_helper.exs\n# * creating test/example_test.exs\n# \n# Your Mix project was created successfully.\n# You can use \"mix\" to compile it, test it, and more:\n# \n# cd example\n# mix test\n# \n# Run \"mix help\" for more commands.",
"_____no_output_____"
],
[
"# colab 環境ではシステムコマンドを 1 行の中で書かないとディレクトリ内の処理ができない\n!cd example; mix test",
"\u001b[32m.\u001b[0m\u001b[32m.\u001b[0m\n\nFinished in 0.04 seconds (0.00s async, 0.04s sync)\n\u001b[32m1 doctest, 1 test, 0 failures\u001b[0m\n\nRandomized with seed 861553\n"
],
[
"!cd example; ls -la",
"total 36\ndrwxr-xr-x 5 root root 4096 Mar 17 18:47 .\ndrwxr-xr-x 1 root root 4096 Mar 17 18:42 ..\ndrwxr-xr-x 3 root root 4096 Mar 17 18:47 _build\n-rw-r--r-- 1 root root 97 Mar 17 18:42 .formatter.exs\n-rw-r--r-- 1 root root 629 Mar 17 18:42 .gitignore\ndrwxr-xr-x 2 root root 4096 Mar 17 18:42 lib\n-rw-r--r-- 1 root root 572 Mar 17 18:42 mix.exs\n-rw-r--r-- 1 root root 469 Mar 17 18:42 README.md\ndrwxr-xr-x 2 root root 4096 Mar 17 18:42 test\n"
],
[
"!cd example; cat mix.exs\n#=> 次のフォーマットのプログラムが出来る\n# defmodule Example.MixProject do\n# use Mix.Project\n# def project do # 名前(app)と依存関係(deps)が書かれている\n# def application do\n# defp deps do\n# end",
"_____no_output_____"
],
[
"!cd example; iex -S mix",
"_____no_output_____"
],
[
"# iex で対話的に使うことが出来るが colab 環境では出来ない\n# cd example\n# iex -S mix",
"_____no_output_____"
],
[
"# compile\n# mix はコードの変更を自動的にコンパイルする\n# 明示的にコンパイルすることも出来る\n# !cd example; mix compile \n# rootディレクトリ以外から実行する場合は、グローバルmix taskのみが実行可能",
"_____no_output_____"
],
[
"!cd example; mix compile ",
"_____no_output_____"
],
[
"!cd example; ls -la",
"_____no_output_____"
],
[
"!cd example; ls -laR _build",
"_____no_output_____"
],
[
"# 依存関係を管理する \n# 新しい依存関係を追加するには、 mix.exs の deps 内に追加する\n# パッケージ名のアトムと、バージョンを表す文字列)と1つの任意的な値(オプション)を持つタプル\n# 実例として、phoenix_slimのようなプロジェクトの依存関係を見る\n\n# def deps do\n# [\n# {:phoenix, \"~> 1.1 or ~> 1.2\"},\n# {:phoenix_html, \"~> 2.3\"},\n# {:cowboy, \"~> 1.0\", only: [:dev, :test]},\n# {:slime, \"~> 0.14\"}\n# ]\n# end\n\n# cowboy の依存は開発時とテスト時にのみ必要\n\n# 依存しているパッケージの取り込みは bundle install に似たもの\n# mix deps.get",
"_____no_output_____"
],
[
"!cd example/_build/test/lib/example/ebin; ./example.app #=> Permission denied\n# colab 環境ではアプリは起動できないと言う事か",
"_____no_output_____"
],
[
"# 環境 \n# Bundler に似て、様々な環境に対応している\n# mixは最初から 3 つの環境で動作するように構成されている\n\n# :dev - 初期状態での環境。\n# :test - mix testで用いられる環境。次のレッスンでさらに見ていきる\n# :prod - アプリケーションを製品に出荷するときに用いられる環境。\n\n# 現在の環境は Mix.env で取得することができる\n# この環境は MIX_ENV 環境変数によって変更することが出来る\n\n# MIX_ENV=prod mix compile",
"_____no_output_____"
]
],
[
[
"# シギル sigil",
"_____no_output_____"
]
],
[
[
"# シギル sigil とは elixir で文字列リテラルを取り扱うための特別の構文\n# チルダ ~ で始まる\n\n# シギルのリスト\n\n# ~C エスケープや埋め込みを含まない文字のリストを生成する\n# ~c エスケープや埋め込みを含む文字のリストを生成する\n# ~R エスケープや埋め込みを含まない正規表現を生成する\n# ~r エスケープや埋め込みを含む正規表現を生成する\n# ~S エスケープや埋め込みを含まない文字列を生成する\n# ~s エスケープや埋め込みを含む文字列を生成する\n# ~W エスケープや埋め込みを含まない単語のリストを生成する\n# ~w エスケープや埋め込みを含む単語のリストを生成する\n# ~N NaiveDateTime 構造体を生成する\n\n# デリミタのリスト\n\n# <...> カギ括弧のペア angle bracket\n# {...} 中括弧のペア brace\n# [...] 大括弧のペア bracket\n# (...) 小括弧のペア parenthesis\n# |...| パイプ記号のペア pipe\n# /.../ スラッシュのペア slash\n# \"...\" ダブルクォートのペア double quote\n# '...' シングルクォートのペア single quote",
"_____no_output_____"
],
[
"# 文字のリスト #=> tutorial と結果が違う!!!!\n!elixir -e 'IO.puts ~c/2 + 7 = #{ 2 + 7 }/'\n!elixir -e 'IO.puts ~C/2 + 7 = #{ 2 + 7 }/'",
"2 + 7 = #9\n2 + 7 = #9\nwelcome to elixir school\nwelcome to elixir #{String.downcase \"SCHOOL\"}\n"
],
[
"# 正規表現 \n!elixir -e 'IO.puts 3 == 3'\n!elixir -e 'IO.puts \"Elixir\" =~ ~r/elixir/'\n!elixir -e 'IO.puts \"elixir\" =~ ~r/elixir/'\n!echo\n!elixir -e 'IO.puts \"Elixir\" =~ ~r/elixir/i'\n!elixir -e 'IO.puts \"elixir\" =~ ~r/elixir/i'",
"true\nfalse\ntrue\n\ntrue\ntrue\n"
],
[
"# Erlang の正規表現ライブラリを元に作られた Regex.split/2 を使う\n!elixir -e 'string=\"100_000_000\"; IO.inspect Regex.split(~r/_/, string)'",
"[\"100\", \"000\", \"000\"]\n"
],
[
"# 文字列\n!elixir -e 'IO.puts ~s/welcome to elixir #{String.downcase \"SCHOOL\"}/'\n!elixir -e 'IO.puts ~S/welcome to elixir #{String.downcase \"SCHOOL\"}/'",
"welcome to elixir school\nwelcome to elixir #{String.downcase \"SCHOOL\"}\n"
],
[
"# 単語のリスト\n!elixir -e 'IO.inspect ~w/i love elixir school/'\n!elixir -e 'IO.inspect ~w/i love\\telixir school/'\n!elixir -e 'IO.inspect ~W/i love\\telixir school/'\n!elixir -e 'name=\"Bob\"; IO.inspect ~w/i love #{name}lixir school/'\n!elixir -e 'name=\"Bob\"; IO.inspect ~W/i love #{name}lixir school/'",
"[\"i\", \"love\", \"elixir\", \"school\"]\n[\"i\", \"love\", \"elixir\", \"school\"]\n[\"i\", \"love\\\\telixir\", \"school\"]\n[\"i\", \"love\", \"Boblixir\", \"school\"]\n[\"i\", \"love\", \"\\#{name}lixir\", \"school\"]\n"
],
[
"# NaiveDateTime \n# NaiveDateTime は タイムゾーンがない DateTime を表現する構造体を手早く作るときに有用\n# NaiveDateTime 構造体を直接作ることは避けるべき\n# パターンマッチングには有用\n!elixir -e 'IO.inspect NaiveDateTime.from_iso8601(\"2015-01-23 23:50:07\") == {:ok, ~N[2015-01-23 23:50:07]}'",
"true\n"
],
[
"# シギルを作る\n%%writefile temp.exs\ndefmodule MySigils do\n def sigil_u(string, []), do: String.upcase(string)\nend\n\ndefmodule Main do\n import MySigils\n IO.puts (~u/elixir school/)\nend ",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"ELIXIR SCHOOL\n"
]
],
[
[
"**ドキュメント**",
"_____no_output_____"
],
[
"**インラインドキュメント用の属性**\n\n* @moduledoc - モジュールレベルのドキュメント用\n* @doc - 関数レベルのドキュメント用\n\n省略",
"_____no_output_____"
],
[
"**テスト**\n\nExUnit\n\n省略",
"_____no_output_____"
],
[
"# 内包表記",
"_____no_output_____"
]
],
[
[
"# 内包表記 list comprehension\n# 内包表記は列挙体 enumerable をループするための糖衣構文である\n\n!elixir -e 'list=[1,2,3,4,5];IO.inspect for x <- list, do: x*x'",
"[1, 4, 9, 16, 25]\n"
],
[
"# for とジェネレータの使い方に留意する\n# ジェネレータとは `x <- list` の部分\n# Haskell だと [x * x | x <- list] と書き、数学の集合での表記に近いが Elixir ではこのように書く\n# 内包表記はリストに限定されない\n\n# キーワードリスト\n!elixir -e 'IO.inspect for {_key, val} <- [one: 1, two: 2, three: 3], do: val'",
"[1, 2, 3]\n"
],
[
"# マップ\n!elixir -e 'IO.inspect for {k, v} <- %{\"a\" => \"A\", \"b\" => \"B\"}, do: {k, v}'",
"[{\"a\", \"A\"}, {\"b\", \"B\"}]\n"
],
[
"# バイナリ\n!elixir -e 'IO.inspect for <<c <- \"hello\">>, do: <<c>>'",
"[\"h\", \"e\", \"l\", \"l\", \"o\"]\n"
],
[
"# ジェネレータは入力値セットと左辺の変数を比較するのにパターンマッチングを利用している\n# マッチするものが見つからない場合には、値は無視される\n!elixir -e 'IO.inspect for {:ok, val} <- [ok: \"Hello\", error: \"Unknown\", ok: \"World\"], do: val'",
"[\"Hello\", \"World\"]\n"
],
[
"# 入れ子\n%%writefile temp.exs\nlist = [1, 2, 3, 4]\nIO.inspect (\n for n <- list, times <- 1..n do\n String.duplicate(\"*\", times)\n end\n)",
"Writing temp.exs\n"
],
[
"!elixir temp.exs",
"[\"*\", \"*\", \"**\", \"*\", \"**\", \"***\", \"*\", \"**\", \"***\", \"****\"]\n"
],
[
"# ループの見える化\n!elixir -e 'list = [1, 2, 3, 4]; for n <- list, times <- 1..n, do: IO.puts \"#{n} - #{times}\"'",
"1 - 1\n2 - 1\n2 - 2\n3 - 1\n3 - 2\n3 - 3\n4 - 1\n4 - 2\n4 - 3\n4 - 4\n"
],
[
"# フィルタ\n!elixir -e 'import Integer; IO.inspect for x <- 1..10, is_even(x), do: x'",
"[2, 4, 6, 8, 10]\n"
],
[
"# 偶数かつ 3 で割り切れる値のみをフィルタ\n%%writefile temp.exs\nimport Integer\nIO.inspect (\n for x <- 1..100,\n is_even(x),\n rem(x, 3) == 0, do: x)",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"[6, 12, 18, 24, 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 90, 96]\n"
],
[
"# :into の使用\n# 他のものを生成したい場合\n# :into は Collectable プロトコルを実装している構造体を指定する\n\n# :into を用いて、キーワードリストからマップを作成する\n\n!elixir -e 'IO.inspect for {k, v} <- [one: 1, two: 2, three: 3], into: %{}, do: {k, v}'\n!elixir -e 'IO.inspect %{:one => 1, :three => 2, :two => 2}'\n!elixir -e 'IO.inspect %{\"one\" => 1, \"three\" => 2, \"two\" => 2}'\n\n# なるほど、と言うかわからなくて当然ですね。多分、Erlang の仕様を引き継いでこのようになっているのだろう\n# map では高速なプログラムができなくて、キーワードリストを作って、キーワードリストはリストでありマップなのだろう",
"%{one: 1, three: 3, two: 2}\n%{one: 1, three: 2, two: 2}\n%{\"one\" => 1, \"three\" => 2, \"two\" => 2}\n"
],
[
"# ビット文字列 bitstring は列挙可能 enumerable なので、:into を用いて文字列を作成することが出来る\n!elixir -e \"IO.inspect for c <- [72, 101, 108, 108, 111], into: \\\"\\\", do: <<c>>\"",
"\"Hello\"\n"
]
],
[
[
"# 文字列",
"_____no_output_____"
]
],
[
[
"# 文字列 string\n# elixir の文字列はバイトのシーケンスである\n!elixir -e 'string = <<104,101,108,108,111>>;IO.puts string'\n!elixir -e 'string = <<104,101,108,108,111>>;IO.inspect string'\n!elixir -e 'IO.inspect <<104,101,108,108,111>>'\n!echo\n# 文字列に 0 バイトを追加するとバイナリとして表示される\n!elixir -e 'IO.inspect <<104,101,108,108,111,0>>'",
"hello\n\"hello\"\n\"hello\"\n\n<<104, 101, 108, 108, 111, 0>>\n<<104, 101, 108, 108, 111, 0>>\n"
],
[
"# 質問 文字列をバイナリ表示するにはどうするか\n!elixir -e 'IO.inspect \"hello\"<> <<0>>'",
"_____no_output_____"
],
[
"# 実験 日本語\n!elixir -e 'IO.inspect \"あ\"<> <<0>>' #=> <<227, 129, 130, 0>>\n!elixir -e 'IO.inspect <<227, 129, 130>>' #=> \"あ\"",
"<<227, 129, 130, 0>>\n\"あ\"\n"
],
[
"# 文字リスト\n# elixir は文字列と別に文字リストという型を別に持っている\n# 文字列はダブルクオートで生成され、文字リストはシングルクオートで生成される\n# 文字リストは utf-8 で、文字列はバイナリである\n!elixir -e \"IO.inspect 'hello'\"\n!elixir -e \"IO.inspect 'hello' ++ [0]\"\n!elixir -e 'IO.inspect \"hello\"<> <<0>>'\n!echo\n!elixir -e \"IO.inspect 'hełło' ++ [0]\"\n!elixir -e 'IO.inspect \"hełło\"<> <<0>>'\n!echo\n!elixir -e \"IO.inspect 'あ' ++ [0]\"\n!elixir -e 'IO.inspect \"あ\"<> <<0>>'",
"'hello'\n[104, 101, 108, 108, 111, 0]\n<<104, 101, 108, 108, 111, 0>>\n\n[104, 101, 322, 322, 111, 0]\n<<104, 101, 197, 130, 197, 130, 111, 0>>\n\n[12354, 0]\n<<227, 129, 130, 0>>\n"
],
[
"# クエスチョンマークによるコードポイントの取得\n# コードポイントは unicode なので 1 バイト以上のバイトである\n!elixir -e 'IO.inspect ?Z'\n!elixir -e 'IO.inspect ?あ'\n!elixir -e 'IO.inspect \"áñèane\" <> <<0>>'\n!elixir -e \"IO.inspect 'áñèane' ++ [0]\"\n!elixir -e \"IO.inspect 'あいう' ++ [0]\"",
"90\n12354\n<<195, 161, 195, 177, 195, 168, 97, 110, 101, 0>>\n[225, 241, 232, 97, 110, 101, 0]\n[12354, 12356, 12358, 0]\n"
],
[
"# シンボルには ? 表記が使える\n# elixir でプログラムする時は通常文字リストは使わず文字列を使う\n# 文字リストが必要なのは erlang のため\n# String モジュールにコードポイントを取得する関数 graphemes/1 と codepoints/1 がある\n!elixir -e 'string = \"\\u0061\\u0301\"; IO.puts string' #=> á\n!elixir -e 'string = \"\\u0061\\u0301\"; IO.inspect String.codepoints string'\n!elixir -e 'string = \"\\u0061\\u0301\"; IO.inspect String.graphemes string'",
"á\n[\"a\", \"́\"]\n[\"á\"]\n"
],
[
"# 下記の実験から á と あ は違う\n# á は graphemes では 1 文字だが codepoints では 2 文字\n# あ はどちらでも 1 文字\n!elixir -e 'string = \"あいう\"; IO.puts string' \n!elixir -e 'string = \"あいう\"; IO.inspect String.codepoints string'\n!elixir -e 'string = \"あいう\"; IO.inspect String.graphemes string'",
"あいう\n[\"あ\", \"い\", \"う\"]\n[\"あ\", \"い\", \"う\"]\n"
],
[
"# 文字列関数\n# length/1\n!elixir -e 'IO.puts String.length \"hello\"'\n!elixir -e 'IO.puts String.length \"あいう\"'\n# replace/3\n!elixir -e 'IO.puts String.replace(\"Hello\", \"e\", \"a\")'\n# duplicate/2\n!elixir -e 'IO.puts String.duplicate(\"Oh my \", 3)'\n# split/2\n!elixir -e 'IO.inspect String.split(\"Oh my \", \" \")'\n# split/1 # こちらが words 相当か\n!elixir -e 'IO.inspect String.split(\"Oh my \")'",
"5\n3\nHallo\nOh my Oh my Oh my \n[\"Oh\", \"my\", \"\"]\n[\"Oh\", \"my\"]\n"
],
[
"# 問題 アナグラムチェック\n# A = super\n# B = perus\n# 文字列 A を並び替えれば B に出来るので A は B のアナグラム\n%%writefile temp.exs\ndefmodule Anagram do\n def anagrams?(a, b) when is_binary(a) and is_binary(b) do\n sort_string(a) == sort_string(b)\n end\n\n def sort_string(string) do\n string\n |> String.downcase()\n |> String.graphemes()\n |> Enum.sort()\n end\nend\n\ndefmodule Main do\n IO.puts Anagram.anagrams?(\"Hello\", \"ohell\")\n IO.puts Anagram.anagrams?(\"María\", \"íMara\")\n IO.puts Anagram.anagrams?(3, 5) #=> エラー\nend ",
"Overwriting temp.exs\n"
],
[
"!elixir temp.exs",
"_____no_output_____"
]
],
[
[
"# 日付と時間",
"_____no_output_____"
]
],
[
[
"# 日付と時間\n# 現在時刻の取得\n!elixir -e 'IO.puts Time.utc_now' \n# シギルで Time 構造体を作る\n!elixir -e 'IO.puts ~T[21:00:27.472988]'\n# hour, minute, second\n!elixir -e 't = ~T[21:00:27.472988];IO.puts t.hour'\n!elixir -e 't = ~T[21:00:27.472988];IO.puts t.minute'\n!elixir -e 't = ~T[21:00:27.472988];IO.puts t.second'",
"21:10:05.960965\n21:00:27.472988\n21\n0\n27\n\n2022-03-22\n2022-03-22\n2020-12-12\n"
],
[
"# Date\n!elixir -e 'IO.puts Date.utc_today' \n# シギルで Date 構造体を作る\n!elixir -e 'IO.puts ~D[2022-03-22]'\n#\n!elixir -e '{:ok, date} = Date.new(2020, 12,12); IO.puts date'\n!elixir -e '{:ok, date} = Date.new(2020, 12,12); IO.puts Date.day_of_week date'\n!elixir -e '{:ok, date} = Date.new(2020, 12,12); IO.puts Date.leap_year? date'\n!echo\n# NaiveDateTime Date と Time の両方を扱えるがタイムゾーンのサポートがない\n!elixir -e 'IO.puts NaiveDateTime.utc_now'\n!elixir -e 'IO.puts ~N[2022-03-22 21:14:23.371420]'\n!elixir -e 'IO.puts NaiveDateTime.add(~N[2022-03-22 21:14:23.371420],30)'\n!elixir -e 'IO.puts NaiveDateTime.add(~N[2022-03-22 21:14:23],30)'",
"2022-03-22\n2022-03-22\n2020-12-12\n6\ntrue\n\n2022-03-22 21:18:07.169008\n2022-03-22 21:14:23.371420\n2022-03-22 21:14:53.371420\n2022-03-22 21:14:53\n"
],
[
"# DateTime\n# DateTime は Date と Time の両方を扱えタイムゾーンのサポートがある\n# しかし!!!! Elixir がデフォルトではタイムゾーンデータベースがない\n# デフォルトでは Calendar.get_time_zone_database/0 によって返されるタイムゾーンデータベースを使う\n# デフォルトでは Calendar.UTCOnlyTimeZoneDatabase で、Etc/UTC のみを処理し\n# 他のタイムゾーンでは {:error, :utc_only_time_zone_database} を返す\n# タイムゾーンを提供することにより NaiveDateTime から DateTimeのインスタンスを作ることができる\n!elixir -e 'IO.inspect DateTime.from_naive(~N[2016-05-24 13:26:08.003], \"Etc/UTC\")'",
"{:ok, ~U[2016-05-24 13:26:08.003Z]}\n"
],
[
"# タイムゾーンの利用\n# elixir でタイムゾーンを利用するには tzdata パッケージをインストールし\n# Tzdata タイムゾーンデータベースとして使用する\n# パリのタイムゾーンで時間を作成してそれをニューヨーク時間に変換してみる\n# パリとニューヨークの時差は 6 時間である\n# %%writefile temp.exs\n# config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase\n# paris_datetime = DateTime.from_naive!(~N[2019-01-01 12:00:00], \"Europe/Paris\")\n# {:ok, ny_datetime} = DateTime.shift_zone(paris_datetime, \"America/New_York\")\n# IO.inspect paris_datetime\n# IO.inspect ny_datetime",
"Overwriting temp.exs\n"
]
],
[
[
"# カスタムMixタスク 省略",
"_____no_output_____"
],
[
"# いまここ",
"_____no_output_____"
],
[
"# IEx Helpers 省略",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
]
] |
d09adcdc316f1b20363a07ff55324d22c1727b7c | 2,621 | ipynb | Jupyter Notebook | Week 6/jupyter notebook/Section/Section_1.ipynb | Opal1031/Programming-Basics- | 9356191a40725ff0202f13afe34d814736815991 | [
"MIT"
] | 1 | 2022-03-07T11:34:56.000Z | 2022-03-07T11:34:56.000Z | Week 6/jupyter notebook/Section/Section_1.ipynb | Opal1031/Programming-Basics- | 9356191a40725ff0202f13afe34d814736815991 | [
"MIT"
] | null | null | null | Week 6/jupyter notebook/Section/Section_1.ipynb | Opal1031/Programming-Basics- | 9356191a40725ff0202f13afe34d814736815991 | [
"MIT"
] | null | null | null | 20.801587 | 77 | 0.416635 | [
[
[
"num = int(input(\"숫자를 하나 입력하세요 : \"))\n\nwhile num >= 0:\n print(\"입력된 숫자는\", num, \"입니다.\")\n num -= 1\n \nprint(\"끝\")",
"입력된 숫자는 5 입니다.\n입력된 숫자는 4 입니다.\n입력된 숫자는 3 입니다.\n입력된 숫자는 2 입니다.\n입력된 숫자는 1 입니다.\n입력된 숫자는 0 입니다.\n끝\n"
],
[
"i = 1\n\nwhile i < 11:\n print(\"나무를 %2d번 찍었습니다.\" %i, \"쾅, \"*(i-1)+\"쾅\")\n i += 1\n\nprint(\"나무가 넘어갔습니다.\")",
"나무를 1번 찍었습니다. 쾅\n나무를 2번 찍었습니다. 쾅, 쾅\n나무를 3번 찍었습니다. 쾅, 쾅, 쾅\n나무를 4번 찍었습니다. 쾅, 쾅, 쾅, 쾅\n나무를 5번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅\n나무를 6번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅, 쾅\n나무를 7번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅\n나무를 8번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅\n나무를 9번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅\n나무를 10번 찍었습니다. 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅, 쾅\n나무가 넘어갔습니다.\n"
],
[
"size = int(input(\"나무 크기 : \"))\nleaf = input(\"잎 모양 : \")\nstem = input(\"줄기 모양 : \")\n\ni = 1\n\nwhile i <= size:\n print(\" \" * (size-i) + leaf * i)\n i += 1\n\nprint(\" \" * (size - 1) + stem)\nprint(\" \" * (size - 1) + stem)",
" aa\n aaaa\n aaaaaa\n aaaaaaaa\naaaaaaaaaa\n ㅁ\n ㅁ\n"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code"
]
] |
d09ae574d939557a3c1cfc7412eb55de3f854b69 | 11,773 | ipynb | Jupyter Notebook | docs/tutorials/neurotorch-tutorial.ipynb | jgornet/NeuroTorch | 86e32934821f3b7fea9155b83493a2dffcf45442 | [
"BSD-3-Clause"
] | 2 | 2019-04-01T18:45:44.000Z | 2020-09-03T20:25:44.000Z | docs/tutorials/neurotorch-tutorial.ipynb | jgornet/NeuroTorch | 86e32934821f3b7fea9155b83493a2dffcf45442 | [
"BSD-3-Clause"
] | null | null | null | docs/tutorials/neurotorch-tutorial.ipynb | jgornet/NeuroTorch | 86e32934821f3b7fea9155b83493a2dffcf45442 | [
"BSD-3-Clause"
] | 3 | 2018-10-31T20:02:35.000Z | 2019-02-19T20:45:34.000Z | 36.905956 | 1,466 | 0.602905 | [
[
[
"# Import modules\nfrom __future__ import print_function\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n# Plot configurations\n%matplotlib inline\n\n# Notebook auto reloads code.\n%load_ext autoreload\n%autoreload 2",
"_____no_output_____"
]
],
[
[
"# NeuroTorch Tutorial\n\n**NeuroTorch** is a framework for reconstructing neuronal morphology from\noptical microscopy images. It interfaces PyTorch with different\nautomated neuron tracing algorithms for fast, accurate, scalable\nneuronal reconstructions. It uses deep learning to generate an initial\nsegmentation of neurons in optical microscopy images. This\nsegmentation is then traced using various automated neuron tracing\nalgorithms to convert the segmentation into an SWC file—the most\ncommon neuronal morphology file format. NeuroTorch is designed with\nscalability in mind and can handle teravoxel-sized images.\n\nThis IPython notebook will outline a brief tutorial for using NeuroTorch\nto train and predict on image volume datasets.",
"_____no_output_____"
],
[
"## Creating image datasets\n\nOne of NeuroTorch’s key features is its dynamic approach to volumetric datasets, which allows it to handle teravoxel-sized images without worrying about memory concerns and efficiency. Everything is loaded just-in-time based on when it is needed or expected to be needed. To load an image dataset, we need\nto specify the voxel coordinates of each image file as shown in files `inputs_spec.json` and `labels_spec.json`.\n\n### `inputs_spec.json`\n\n```json\n[\n {\n\t\"filename\" : \"inputs.tif\",\n\t\"bounding_box\" : [[0, 0, 0], [1024, 512, 50]]\n },\n {\n\t\"filename\" : \"inputs.tif\",\n\t\"bounding_box\" : [[0, 0, 50], [1024, 512, 100]]\n }\n]\n\n```\n\n### `labels_spec.json`\n\n```json\n[\n {\n\t\"filename\" : \"labels.tif\",\n\t\"bounding_box\" : [[0, 0, 0], [1024, 512, 50]]\n },\n {\n\t\"filename\" : \"labels.tif\",\n\t\"bounding_box\" : [[0, 0, 50], [1024, 512, 100]]\n }\n]\n```",
"_____no_output_____"
],
[
"## Loading image datasets\n\nNow that the image datasets for the inputs and labels have been specified,\nthese datasets can be loaded with NeuroTorch.",
"_____no_output_____"
]
],
[
[
"from neurotorch.datasets.specification import JsonSpec\nimport os\n\nIMAGE_PATH = '../../tests/images/'\n\njson_spec = JsonSpec() # Initialize the JSON specification\n\n# Create a dataset containing the inputs\ninputs = json_spec.open(os.path.join(IMAGE_PATH,\n \"inputs_spec.json\")) \n\n# Create a dataset containing the labels\nlabels = json_spec.open(os.path.join(IMAGE_PATH,\n \"labels_spec.json\"))\n",
"_____no_output_____"
]
],
[
[
"## Augmenting datasets\n\nWith the image datasets, it is possible to augment data on-the-fly. To implement an augmentation–such as branch occlusion—instantiate an aligned volume and specify the augmentation with the aligned volume.",
"_____no_output_____"
]
],
[
[
"from neurotorch.datasets.dataset import AlignedVolume\nfrom neurotorch.augmentations.occlusion import Occlusion\nfrom neurotorch.augmentations.blur import Blur\nfrom neurotorch.augmentations.brightness import Brightness\nfrom neurotorch.augmentations.dropped import Drop\nfrom neurotorch.augmentations.duplicate import Duplicate\nfrom neurotorch.augmentations.stitch import Stitch\nfrom neurotorch.augmentations.occlusion import Occlusion\n\nvolume = AlignedVolume([inputs, labels])\n\naugmented_volume = Occlusion(volume, frequency=0.5)\naugmented_volume = Stitch(augmented_volume, frequency=0.5)\naugmented_volume = Drop(volume, frequency=0.5)\naugmented_volume = Blur(augmented_volume, frequency=0.5)\naugmented_volume = Duplicate(augmented_volume, frequency=0.5)\n",
"_____no_output_____"
]
],
[
[
"## Training with the image datasets\n\nTo train a neural network using these image datasets, load the \nneural network architecture and initialize a `Trainer`. To save\ntraining checkpoints, add a `CheckpointWriter` to the `Trainer` object.\nLastly, call the `Trainer` object to run training.",
"_____no_output_____"
]
],
[
[
"from neurotorch.core.trainer import Trainer\nfrom neurotorch.nets.RSUNet import RSUNet\nfrom neurotorch.training.checkpoint import CheckpointWriter\nfrom neurotorch.training.logging import ImageWriter, LossWriter\n\nnet = RSUNet() # Initialize the U-Net architecture\n\n# Setup the trainer\ntrainer = Trainer(net, augmented_volume, max_epochs=10,\n gpu_device=0)\n\n# Setup the trainer the add a checkpoint every 500 epochs\ntrainer = LossWriter(trainer, \".\", \"tutorial_tensorboard\")\ntrainer = ImageWriter(trainer, \".\", \"tutorial_tensorboard\")\ntrainer = CheckpointWriter(trainer, checkpoint_dir='.',\n checkpoint_period=50)\n\ntrainer.run_training()\n",
"_____no_output_____"
]
],
[
[
"## Predicting using NeuroTorch\n\nOnce training has completed, we can use the training checkpoints\nto predict on image datasets. We first have to \nload the neural network architecture and image volume.\nWe then have to initialize a `Predictor` object and an output volume.\nOnce these have been specified, we can begin prediction.",
"_____no_output_____"
]
],
[
[
"from neurotorch.nets.RSUNet import RSUNet\nfrom neurotorch.core.predictor import Predictor\nfrom neurotorch.datasets.filetypes import TiffVolume\nfrom neurotorch.datasets.dataset import Array\nfrom neurotorch.datasets.datatypes import (BoundingBox, Vector)\nimport numpy as np\nimport tifffile as tif\nimport os\n\nIMAGE_PATH = '../../tests/images/'\n\nnet = RSUNet() # Initialize the U-Net architecture\n\ncheckpoint = './iteration_1000.ckpt' # Specify the checkpoint path\n\nwith TiffVolume(os.path.join(IMAGE_PATH,\n \"inputs.tif\"),\n BoundingBox(Vector(0, 0, 0),\n Vector(1024, 512, 50))) as inputs:\n predictor = Predictor(net, checkpoint, gpu_device=0)\n\n output_volume = Array(np.zeros(inputs.getBoundingBox()\n .getNumpyDim(), dtype=np.float32))\n\n predictor.run(inputs, output_volume, batch_size=5)\n\n tif.imsave(\"test_prediction.tif\",\n output_volume.getArray().astype(np.float32))\n\n",
"_____no_output_____"
]
],
[
[
"## Displaying the prediction\n\nPredictions are output in logits form. To map this to a\nprobability distribution, we need to apply a sigmoid function\nto the prediction. We can then evaluate the prediction and \nground-truth.",
"_____no_output_____"
]
],
[
[
"# Apply sigmoid function\nprobability_map = 1/(1+np.exp(-output_volume.getArray()))\n\n# Plot prediction and ground-truth\nplt.subplot(2, 1, 1)\nplt.title('Prediction')\nplt.imshow(output_volume.getArray()[25])\nplt.axis('off')\n\nplt.subplot(2, 1, 2)\nplt.title('Ground-Truth')\nplt.imshow(labels.get(\n BoundingBox(Vector(0, 0, 0),\n Vector(1024, 512, 50))).getArray()[25],\n cmap='gray'\n )\nplt.axis('off')\n\nplt.show()",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09aeaa464e4b5025b6f3adc6e9e0961cecb97c4 | 120,115 | ipynb | Jupyter Notebook | examples/jupyter/mgxs-part-i.ipynb | cchaugen/temp_openmc_for_Sterling | 9f346c9c7ab3128fa40548e936290b97610a2235 | [
"MIT"
] | null | null | null | examples/jupyter/mgxs-part-i.ipynb | cchaugen/temp_openmc_for_Sterling | 9f346c9c7ab3128fa40548e936290b97610a2235 | [
"MIT"
] | 1 | 2021-09-14T20:44:02.000Z | 2021-09-14T20:44:02.000Z | examples/jupyter/mgxs-part-i.ipynb | cchaugen/temp_openmc_for_Sterling | 9f346c9c7ab3128fa40548e936290b97610a2235 | [
"MIT"
] | null | null | null | 102.051827 | 74,690 | 0.790809 | [
[
[
"This IPython Notebook introduces the use of the `openmc.mgxs` module to calculate multi-group cross sections for an infinite homogeneous medium. In particular, this Notebook introduces the the following features:\n\n* **General equations** for scalar-flux averaged multi-group cross sections\n* Creation of multi-group cross sections for an **infinite homogeneous medium**\n* Use of **tally arithmetic** to manipulate multi-group cross sections",
"_____no_output_____"
],
[
"## Introduction to Multi-Group Cross Sections (MGXS)",
"_____no_output_____"
],
[
"Many Monte Carlo particle transport codes, including OpenMC, use continuous-energy nuclear cross section data. However, most deterministic neutron transport codes use *multi-group cross sections* defined over discretized energy bins or *energy groups*. An example of U-235's continuous-energy fission cross section along with a 16-group cross section computed for a light water reactor spectrum is displayed below.",
"_____no_output_____"
]
],
[
[
"from IPython.display import Image\nImage(filename='images/mgxs.png', width=350)",
"_____no_output_____"
]
],
[
[
"A variety of tools employing different methodologies have been developed over the years to compute multi-group cross sections for certain applications, including NJOY (LANL), MC$^2$-3 (ANL), and Serpent (VTT). The `openmc.mgxs` Python module is designed to leverage OpenMC's tally system to calculate multi-group cross sections with arbitrary energy discretizations for fine-mesh heterogeneous deterministic neutron transport applications.\n\nBefore proceeding to illustrate how one may use the `openmc.mgxs` module, it is worthwhile to define the general equations used to calculate multi-group cross sections. This is only intended as a brief overview of the methodology used by `openmc.mgxs` - we refer the interested reader to the large body of literature on the subject for a more comprehensive understanding of this complex topic.",
"_____no_output_____"
],
[
"### Introductory Notation\nThe continuous real-valued microscopic cross section may be denoted $\\sigma_{n,x}(\\mathbf{r}, E)$ for position vector $\\mathbf{r}$, energy $E$, nuclide $n$ and interaction type $x$. Similarly, the scalar neutron flux may be denoted by $\\Phi(\\mathbf{r},E)$ for position $\\mathbf{r}$ and energy $E$. **Note**: Although nuclear cross sections are dependent on the temperature $T$ of the interacting medium, the temperature variable is neglected here for brevity.",
"_____no_output_____"
],
[
"### Spatial and Energy Discretization\nThe energy domain for critical systems such as thermal reactors spans more than 10 orders of magnitude of neutron energies from 10$^{-5}$ - 10$^7$ eV. The multi-group approximation discretization divides this energy range into one or more energy groups. In particular, for $G$ total groups, we denote an energy group index $g$ such that $g \\in \\{1, 2, ..., G\\}$. The energy group indices are defined such that the smaller group the higher the energy, and vice versa. The integration over neutron energies across a discrete energy group is commonly referred to as **energy condensation**.\n\nMulti-group cross sections are computed for discretized spatial zones in the geometry of interest. The spatial zones may be defined on a structured and regular fuel assembly or pin cell mesh, an arbitrary unstructured mesh or the constructive solid geometry used by OpenMC. For a geometry with $K$ distinct spatial zones, we designate each spatial zone an index $k$ such that $k \\in \\{1, 2, ..., K\\}$. The volume of each spatial zone is denoted by $V_{k}$. The integration over discrete spatial zones is commonly referred to as **spatial homogenization**.",
"_____no_output_____"
],
[
"### General Scalar-Flux Weighted MGXS\nThe multi-group cross sections computed by `openmc.mgxs` are defined as a *scalar flux-weighted average* of the microscopic cross sections across each discrete energy group. This formulation is employed in order to preserve the reaction rates within each energy group and spatial zone. In particular, spatial homogenization and energy condensation are used to compute the general multi-group cross section $\\sigma_{n,x,k,g}$ as follows:\n\n$$\\sigma_{n,x,k,g} = \\frac{\\int_{E_{g}}^{E_{g-1}}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\sigma_{n,x}(\\mathbf{r},E')\\Phi(\\mathbf{r},E')}{\\int_{E_{g}}^{E_{g-1}}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\Phi(\\mathbf{r},E')}$$\n\nThis scalar flux-weighted average microscopic cross section is computed by `openmc.mgxs` for most multi-group cross sections, including total, absorption, and fission reaction types. These double integrals are stochastically computed with OpenMC's tally system - in particular, [filters](http://openmc.readthedocs.io/en/latest/usersguide/tallies.html#filters) on the energy range and spatial zone (material, cell or universe) define the bounds of integration for both numerator and denominator.",
"_____no_output_____"
],
[
"### Multi-Group Scattering Matrices\nThe general multi-group cross section $\\sigma_{n,x,k,g}$ is a vector of $G$ values for each energy group $g$. The equation presented above only discretizes the energy of the incoming neutron and neglects the outgoing energy of the neutron (if any). Hence, this formulation must be extended to account for the outgoing energy of neutrons in the discretized scattering matrix cross section used by deterministic neutron transport codes. \n\nWe denote the incoming and outgoing neutron energy groups as $g$ and $g'$ for the microscopic scattering matrix cross section $\\sigma_{n,s}(\\mathbf{r},E)$. As before, spatial homogenization and energy condensation are used to find the multi-group scattering matrix cross section $\\sigma_{n,s,k,g \\to g'}$ as follows:\n\n$$\\sigma_{n,s,k,g\\rightarrow g'} = \\frac{\\int_{E_{g'}}^{E_{g'-1}}\\mathrm{d}E''\\int_{E_{g}}^{E_{g-1}}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\sigma_{n,s}(\\mathbf{r},E'\\rightarrow E'')\\Phi(\\mathbf{r},E')}{\\int_{E_{g}}^{E_{g-1}}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\Phi(\\mathbf{r},E')}$$\n\nThis scalar flux-weighted multi-group microscopic scattering matrix is computed using OpenMC tallies with both energy in and energy out filters.",
"_____no_output_____"
],
[
"### Multi-Group Fission Spectrum\nThe energy spectrum of neutrons emitted from fission is denoted by $\\chi_{n}(\\mathbf{r},E' \\rightarrow E'')$ for incoming and outgoing energies $E'$ and $E''$, respectively. Unlike the multi-group cross sections $\\sigma_{n,x,k,g}$ considered up to this point, the fission spectrum is a probability distribution and must sum to unity. The outgoing energy is typically much less dependent on the incoming energy for fission than for scattering interactions. As a result, it is common practice to integrate over the incoming neutron energy when computing the multi-group fission spectrum. The fission spectrum may be simplified as $\\chi_{n}(\\mathbf{r},E)$ with outgoing energy $E$.\n\nUnlike the multi-group cross sections defined up to this point, the multi-group fission spectrum is weighted by the fission production rate rather than the scalar flux. This formulation is intended to preserve the total fission production rate in the multi-group deterministic calculation. In order to mathematically define the multi-group fission spectrum, we denote the microscopic fission cross section as $\\sigma_{n,f}(\\mathbf{r},E)$ and the average number of neutrons emitted from fission interactions with nuclide $n$ as $\\nu_{n}(\\mathbf{r},E)$. The multi-group fission spectrum $\\chi_{n,k,g}$ is then the probability of fission neutrons emitted into energy group $g$. \n\nSimilar to before, spatial homogenization and energy condensation are used to find the multi-group fission spectrum $\\chi_{n,k,g}$ as follows:\n\n$$\\chi_{n,k,g'} = \\frac{\\int_{E_{g'}}^{E_{g'-1}}\\mathrm{d}E''\\int_{0}^{\\infty}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\chi_{n}(\\mathbf{r},E'\\rightarrow E'')\\nu_{n}(\\mathbf{r},E')\\sigma_{n,f}(\\mathbf{r},E')\\Phi(\\mathbf{r},E')}{\\int_{0}^{\\infty}\\mathrm{d}E'\\int_{\\mathbf{r} \\in V_{k}}\\mathrm{d}\\mathbf{r}\\nu_{n}(\\mathbf{r},E')\\sigma_{n,f}(\\mathbf{r},E')\\Phi(\\mathbf{r},E')}$$\n\nThe fission production-weighted multi-group fission spectrum is computed using OpenMC tallies with both energy in and energy out filters.\n\nThis concludes our brief overview on the methodology to compute multi-group cross sections. The following sections detail more concretely how users may employ the `openmc.mgxs` module to power simulation workflows requiring multi-group cross sections for downstream deterministic calculations.",
"_____no_output_____"
],
[
"## Generate Input Files",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport openmc\nimport openmc.mgxs as mgxs",
"_____no_output_____"
]
],
[
[
"We being by creating a material for the homogeneous medium.",
"_____no_output_____"
]
],
[
[
"# Instantiate a Material and register the Nuclides\ninf_medium = openmc.Material(name='moderator')\ninf_medium.set_density('g/cc', 5.)\ninf_medium.add_nuclide('H1', 0.028999667)\ninf_medium.add_nuclide('O16', 0.01450188)\ninf_medium.add_nuclide('U235', 0.000114142)\ninf_medium.add_nuclide('U238', 0.006886019)\ninf_medium.add_nuclide('Zr90', 0.002116053)",
"_____no_output_____"
]
],
[
[
"With our material, we can now create a `Materials` object that can be exported to an actual XML file.",
"_____no_output_____"
]
],
[
[
"# Instantiate a Materials collection and export to XML\nmaterials_file = openmc.Materials([inf_medium])\nmaterials_file.export_to_xml()",
"_____no_output_____"
]
],
[
[
"Now let's move on to the geometry. This problem will be a simple square cell with reflective boundary conditions to simulate an infinite homogeneous medium. The first step is to create the outer bounding surfaces of the problem.",
"_____no_output_____"
]
],
[
[
"# Instantiate boundary Planes\nmin_x = openmc.XPlane(boundary_type='reflective', x0=-0.63)\nmax_x = openmc.XPlane(boundary_type='reflective', x0=0.63)\nmin_y = openmc.YPlane(boundary_type='reflective', y0=-0.63)\nmax_y = openmc.YPlane(boundary_type='reflective', y0=0.63)",
"_____no_output_____"
]
],
[
[
"With the surfaces defined, we can now create a cell that is defined by intersections of half-spaces created by the surfaces.",
"_____no_output_____"
]
],
[
[
"# Instantiate a Cell\ncell = openmc.Cell(cell_id=1, name='cell')\n\n# Register bounding Surfaces with the Cell\ncell.region = +min_x & -max_x & +min_y & -max_y\n\n# Fill the Cell with the Material\ncell.fill = inf_medium",
"_____no_output_____"
]
],
[
[
"OpenMC requires that there is a \"root\" universe. Let us create a root universe and add our square cell to it.",
"_____no_output_____"
]
],
[
[
"# Create root universe\nroot_universe = openmc.Universe(name='root universe', cells=[cell])",
"_____no_output_____"
]
],
[
[
"We now must create a geometry that is assigned a root universe and export it to XML.",
"_____no_output_____"
]
],
[
[
"# Create Geometry and set root Universe\nopenmc_geometry = openmc.Geometry(root_universe)\n\n# Export to \"geometry.xml\"\nopenmc_geometry.export_to_xml()",
"_____no_output_____"
]
],
[
[
"Next, we must define simulation parameters. In this case, we will use 10 inactive batches and 40 active batches each with 2500 particles.",
"_____no_output_____"
]
],
[
[
"# OpenMC simulation parameters\nbatches = 50\ninactive = 10\nparticles = 2500\n\n# Instantiate a Settings object\nsettings_file = openmc.Settings()\nsettings_file.batches = batches\nsettings_file.inactive = inactive\nsettings_file.particles = particles\nsettings_file.output = {'tallies': True}\n\n# Create an initial uniform spatial source distribution over fissionable zones\nbounds = [-0.63, -0.63, -0.63, 0.63, 0.63, 0.63]\nuniform_dist = openmc.stats.Box(bounds[:3], bounds[3:], only_fissionable=True)\nsettings_file.source = openmc.source.Source(space=uniform_dist)\n\n# Export to \"settings.xml\"\nsettings_file.export_to_xml()",
"_____no_output_____"
]
],
[
[
"Now we are ready to generate multi-group cross sections! First, let's define a 2-group structure using the built-in `EnergyGroups` class.",
"_____no_output_____"
]
],
[
[
"# Instantiate a 2-group EnergyGroups object\ngroups = mgxs.EnergyGroups()\ngroups.group_edges = np.array([0., 0.625, 20.0e6])",
"_____no_output_____"
]
],
[
[
"We can now use the `EnergyGroups` object, along with our previously created materials and geometry, to instantiate some `MGXS` objects from the `openmc.mgxs` module. In particular, the following are subclasses of the generic and abstract `MGXS` class:\n\n* `TotalXS`\n* `TransportXS`\n* `AbsorptionXS`\n* `CaptureXS`\n* `FissionXS`\n* `KappaFissionXS`\n* `ScatterXS`\n* `ScatterMatrixXS`\n* `Chi`\n* `ChiPrompt`\n* `InverseVelocity`\n* `PromptNuFissionXS`\n\nOf course, we are aware that the fission cross section (`FissionXS`) can sometimes be paired with the fission neutron multiplication to become $\\nu\\sigma_f$. This can be accomodated in to the `FissionXS` class by setting the `nu` parameter to `True` as shown below.\n\nAdditionally, scattering reactions (like (n,2n)) can also be defined to take in to account the neutron multiplication to become $\\nu\\sigma_s$. This can be accomodated in the the transport (`TransportXS`), scattering (`ScatterXS`), and scattering-matrix (`ScatterMatrixXS`) cross sections types by setting the `nu` parameter to `True` as shown below.\n\nThese classes provide us with an interface to generate the tally inputs as well as perform post-processing of OpenMC's tally data to compute the respective multi-group cross sections. In this case, let's create the multi-group total, absorption and scattering cross sections with our 2-group structure.",
"_____no_output_____"
]
],
[
[
"# Instantiate a few different sections\ntotal = mgxs.TotalXS(domain=cell, groups=groups)\nabsorption = mgxs.AbsorptionXS(domain=cell, groups=groups)\nscattering = mgxs.ScatterXS(domain=cell, groups=groups)\n\n# Note that if we wanted to incorporate neutron multiplication in the\n# scattering cross section we would write the previous line as:\n# scattering = mgxs.ScatterXS(domain=cell, groups=groups, nu=True)",
"_____no_output_____"
]
],
[
[
"Each multi-group cross section object stores its tallies in a Python dictionary called `tallies`. We can inspect the tallies in the dictionary for our `Absorption` object as follows. ",
"_____no_output_____"
]
],
[
[
"absorption.tallies",
"_____no_output_____"
]
],
[
[
"The `Absorption` object includes tracklength tallies for the 'absorption' and 'flux' scores in the 2-group structure in cell 1. Now that each `MGXS` object contains the tallies that it needs, we must add these tallies to a `Tallies` object to generate the \"tallies.xml\" input file for OpenMC.",
"_____no_output_____"
]
],
[
[
"# Instantiate an empty Tallies object\ntallies_file = openmc.Tallies()\n\n# Add total tallies to the tallies file\ntallies_file += total.tallies.values()\n\n# Add absorption tallies to the tallies file\ntallies_file += absorption.tallies.values()\n\n# Add scattering tallies to the tallies file\ntallies_file += scattering.tallies.values()\n\n# Export to \"tallies.xml\"\ntallies_file.export_to_xml()",
"/home/romano/openmc/openmc/mixin.py:61: IDWarning: Another CellFilter instance already exists with id=3.\n warn(msg, IDWarning)\n/home/romano/openmc/openmc/mixin.py:61: IDWarning: Another EnergyFilter instance already exists with id=4.\n warn(msg, IDWarning)\n"
]
],
[
[
"Now we a have a complete set of inputs, so we can go ahead and run our simulation.",
"_____no_output_____"
]
],
[
[
"# Run OpenMC\nopenmc.run()",
"\n %%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%\n %%%%%%%%%%%%%%%%%%%%%%%%\n ############### %%%%%%%%%%%%%%%%%%%%%%%%\n ################## %%%%%%%%%%%%%%%%%%%%%%%\n ################### %%%%%%%%%%%%%%%%%%%%%%%\n #################### %%%%%%%%%%%%%%%%%%%%%%\n ##################### %%%%%%%%%%%%%%%%%%%%%\n ###################### %%%%%%%%%%%%%%%%%%%%\n ####################### %%%%%%%%%%%%%%%%%%\n ####################### %%%%%%%%%%%%%%%%%\n ###################### %%%%%%%%%%%%%%%%%\n #################### %%%%%%%%%%%%%%%%%\n ################# %%%%%%%%%%%%%%%%%\n ############### %%%%%%%%%%%%%%%%\n ############ %%%%%%%%%%%%%%%\n ######## %%%%%%%%%%%%%%\n %%%%%%%%%%%\n\n | The OpenMC Monte Carlo Code\n Copyright | 2011-2017 Massachusetts Institute of Technology\n License | http://openmc.readthedocs.io/en/latest/license.html\n Version | 0.9.0\n Git SHA1 | 9b7cebf7bc34d60e0f1750c3d6cb103df11e8dc4\n Date/Time | 2017-12-04 20:56:46\n OpenMP Threads | 4\n\n Reading settings XML file...\n Reading cross sections XML file...\n Reading materials XML file...\n Reading geometry XML file...\n Building neighboring cells lists for each surface...\n Reading H1 from /home/romano/openmc/scripts/nndc_hdf5/H1.h5\n Reading O16 from /home/romano/openmc/scripts/nndc_hdf5/O16.h5\n Reading U235 from /home/romano/openmc/scripts/nndc_hdf5/U235.h5\n Reading U238 from /home/romano/openmc/scripts/nndc_hdf5/U238.h5\n Reading Zr90 from /home/romano/openmc/scripts/nndc_hdf5/Zr90.h5\n Maximum neutron transport energy: 2.00000E+07 eV for H1\n Reading tallies XML file...\n Writing summary.h5 file...\n Initializing source particles...\n\n ====================> K EIGENVALUE SIMULATION <====================\n\n Bat./Gen. k Average k \n ========= ======== ==================== \n 1/1 1.11184 \n 2/1 1.15820 \n 3/1 1.18468 \n 4/1 1.17492 \n 5/1 1.19645 \n 6/1 1.18436 \n 7/1 1.14070 \n 8/1 1.15150 \n 9/1 1.19202 \n 10/1 1.17677 \n 11/1 1.20272 \n 12/1 1.21366 1.20819 +/- 0.00547\n 13/1 1.15906 1.19181 +/- 0.01668\n 14/1 1.14687 1.18058 +/- 0.01629\n 15/1 1.14570 1.17360 +/- 0.01442\n 16/1 1.13480 1.16713 +/- 0.01343\n 17/1 1.17680 1.16852 +/- 0.01144\n 18/1 1.16866 1.16853 +/- 0.00990\n 19/1 1.19253 1.17120 +/- 0.00913\n 20/1 1.18124 1.17220 +/- 0.00823\n 21/1 1.19206 1.17401 +/- 0.00766\n 22/1 1.17681 1.17424 +/- 0.00700\n 23/1 1.17634 1.17440 +/- 0.00644\n 24/1 1.13659 1.17170 +/- 0.00654\n 25/1 1.17144 1.17169 +/- 0.00609\n 26/1 1.20649 1.17386 +/- 0.00610\n 27/1 1.11238 1.17024 +/- 0.00678\n 28/1 1.18911 1.17129 +/- 0.00647\n 29/1 1.14681 1.17000 +/- 0.00626\n 30/1 1.12152 1.16758 +/- 0.00641\n 31/1 1.12729 1.16566 +/- 0.00639\n 32/1 1.15399 1.16513 +/- 0.00612\n 33/1 1.13547 1.16384 +/- 0.00599\n 34/1 1.17723 1.16440 +/- 0.00576\n 35/1 1.09296 1.16154 +/- 0.00622\n 36/1 1.19621 1.16287 +/- 0.00612\n 37/1 1.12560 1.16149 +/- 0.00605\n 38/1 1.17872 1.16211 +/- 0.00586\n 39/1 1.17721 1.16263 +/- 0.00568\n 40/1 1.13724 1.16178 +/- 0.00555\n 41/1 1.18526 1.16254 +/- 0.00542\n 42/1 1.13779 1.16177 +/- 0.00531\n 43/1 1.15066 1.16143 +/- 0.00516\n 44/1 1.12174 1.16026 +/- 0.00514\n 45/1 1.17478 1.16068 +/- 0.00501\n 46/1 1.14146 1.16014 +/- 0.00489\n 47/1 1.20464 1.16135 +/- 0.00491\n 48/1 1.15119 1.16108 +/- 0.00479\n 49/1 1.17938 1.16155 +/- 0.00468\n 50/1 1.15798 1.16146 +/- 0.00457\n Creating state point statepoint.50.h5...\n\n =======================> TIMING STATISTICS <=======================\n\n Total time for initialization = 4.0504E-01 seconds\n Reading cross sections = 3.6457E-01 seconds\n Total time in simulation = 6.3478E+00 seconds\n Time in transport only = 6.0079E+00 seconds\n Time in inactive batches = 8.1713E-01 seconds\n Time in active batches = 5.5307E+00 seconds\n Time synchronizing fission bank = 5.4640E-03 seconds\n Sampling source sites = 4.0981E-03 seconds\n SEND/RECV source sites = 1.2606E-03 seconds\n Time accumulating tallies = 1.2030E-04 seconds\n Total time for finalization = 9.6554E-04 seconds\n Total time elapsed = 6.7713E+00 seconds\n Calculation Rate (inactive) = 30594.8 neutrons/second\n Calculation Rate (active) = 18080.8 neutrons/second\n\n ============================> RESULTS <============================\n\n k-effective (Collision) = 1.15984 +/- 0.00411\n k-effective (Track-length) = 1.16146 +/- 0.00457\n k-effective (Absorption) = 1.16177 +/- 0.00380\n Combined k-effective = 1.16105 +/- 0.00364\n Leakage Fraction = 0.00000 +/- 0.00000\n\n"
]
],
[
[
"## Tally Data Processing",
"_____no_output_____"
],
[
"Our simulation ran successfully and created statepoint and summary output files. We begin our analysis by instantiating a `StatePoint` object. ",
"_____no_output_____"
]
],
[
[
"# Load the last statepoint file\nsp = openmc.StatePoint('statepoint.50.h5')",
"_____no_output_____"
]
],
[
[
"In addition to the statepoint file, our simulation also created a summary file which encapsulates information about the materials and geometry. By default, a `Summary` object is automatically linked when a `StatePoint` is loaded. This is necessary for the `openmc.mgxs` module to properly process the tally data.",
"_____no_output_____"
],
[
"The statepoint is now ready to be analyzed by our multi-group cross sections. We simply have to load the tallies from the `StatePoint` into each object as follows and our `MGXS` objects will compute the cross sections for us under-the-hood.",
"_____no_output_____"
]
],
[
[
"# Load the tallies from the statepoint into each MGXS object\ntotal.load_from_statepoint(sp)\nabsorption.load_from_statepoint(sp)\nscattering.load_from_statepoint(sp)",
"_____no_output_____"
]
],
[
[
"Voila! Our multi-group cross sections are now ready to rock 'n roll!",
"_____no_output_____"
],
[
"## Extracting and Storing MGXS Data",
"_____no_output_____"
],
[
"Let's first inspect our total cross section by printing it to the screen.",
"_____no_output_____"
]
],
[
[
"total.print_xs()",
"Multi-Group XS\n\tReaction Type =\ttotal\n\tDomain Type =\tcell\n\tDomain ID =\t1\n\tCross Sections [cm^-1]:\n Group 1 [0.625 - 20000000.0eV]:\t6.81e-01 +/- 2.69e-01%\n Group 2 [0.0 - 0.625 eV]:\t1.40e+00 +/- 5.93e-01%\n\n\n\n"
]
],
[
[
"Since the `openmc.mgxs` module uses [tally arithmetic](http://openmc.readthedocs.io/en/latest/examples/tally-arithmetic.html) under-the-hood, the cross section is stored as a \"derived\" `Tally` object. This means that it can be queried and manipulated using all of the same methods supported for the `Tally` class in the OpenMC Python API. For example, we can construct a [Pandas](http://pandas.pydata.org/) `DataFrame` of the multi-group cross section data.",
"_____no_output_____"
]
],
[
[
"df = scattering.get_pandas_dataframe()\ndf.head(10)",
"_____no_output_____"
]
],
[
[
"Each multi-group cross section object can be easily exported to a variety of file formats, including CSV, Excel, and LaTeX for storage or data processing.",
"_____no_output_____"
]
],
[
[
"absorption.export_xs_data(filename='absorption-xs', format='excel')",
"_____no_output_____"
]
],
[
[
"The following code snippet shows how to export all three `MGXS` to the same HDF5 binary data store.",
"_____no_output_____"
]
],
[
[
"total.build_hdf5_store(filename='mgxs', append=True)\nabsorption.build_hdf5_store(filename='mgxs', append=True)\nscattering.build_hdf5_store(filename='mgxs', append=True)",
"_____no_output_____"
]
],
[
[
"## Comparing MGXS with Tally Arithmetic",
"_____no_output_____"
],
[
"Finally, we illustrate how one can leverage OpenMC's [tally arithmetic](http://openmc.readthedocs.io/en/latest/examples/tally-arithmetic.html) data processing feature with `MGXS` objects. The `openmc.mgxs` module uses tally arithmetic to compute multi-group cross sections with automated uncertainty propagation. Each `MGXS` object includes an `xs_tally` attribute which is a \"derived\" `Tally` based on the tallies needed to compute the cross section type of interest. These derived tallies can be used in subsequent tally arithmetic operations. For example, we can use tally artithmetic to confirm that the `TotalXS` is equal to the sum of the `AbsorptionXS` and `ScatterXS` objects.",
"_____no_output_____"
]
],
[
[
"# Use tally arithmetic to compute the difference between the total, absorption and scattering\ndifference = total.xs_tally - absorption.xs_tally - scattering.xs_tally\n\n# The difference is a derived tally which can generate Pandas DataFrames for inspection\ndifference.get_pandas_dataframe()",
"_____no_output_____"
]
],
[
[
"Similarly, we can use tally arithmetic to compute the ratio of `AbsorptionXS` and `ScatterXS` to the `TotalXS`.",
"_____no_output_____"
]
],
[
[
"# Use tally arithmetic to compute the absorption-to-total MGXS ratio\nabsorption_to_total = absorption.xs_tally / total.xs_tally\n\n# The absorption-to-total ratio is a derived tally which can generate Pandas DataFrames for inspection\nabsorption_to_total.get_pandas_dataframe()",
"_____no_output_____"
],
[
"# Use tally arithmetic to compute the scattering-to-total MGXS ratio\nscattering_to_total = scattering.xs_tally / total.xs_tally\n\n# The scattering-to-total ratio is a derived tally which can generate Pandas DataFrames for inspection\nscattering_to_total.get_pandas_dataframe()",
"_____no_output_____"
]
],
[
[
"Lastly, we sum the derived scatter-to-total and absorption-to-total ratios to confirm that they sum to unity.",
"_____no_output_____"
]
],
[
[
"# Use tally arithmetic to ensure that the absorption- and scattering-to-total MGXS ratios sum to unity\nsum_ratio = absorption_to_total + scattering_to_total\n\n# The sum ratio is a derived tally which can generate Pandas DataFrames for inspection\nsum_ratio.get_pandas_dataframe()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09afb0e2c8156fafb32f34a04dcdd46a68e0e42 | 90,903 | ipynb | Jupyter Notebook | Bungee/.ipynb_checkpoints/BungeeCharacLab-checkpoint.ipynb | JNichols-19/PH211 | 41098af2ecdd25609f75dddce5d2ceca9f45225a | [
"MIT"
] | null | null | null | Bungee/.ipynb_checkpoints/BungeeCharacLab-checkpoint.ipynb | JNichols-19/PH211 | 41098af2ecdd25609f75dddce5d2ceca9f45225a | [
"MIT"
] | null | null | null | Bungee/.ipynb_checkpoints/BungeeCharacLab-checkpoint.ipynb | JNichols-19/PH211 | 41098af2ecdd25609f75dddce5d2ceca9f45225a | [
"MIT"
] | 4 | 2021-01-14T00:39:48.000Z | 2021-06-07T19:40:34.000Z | 174.143678 | 26,356 | 0.889069 | [
[
[
"# Bungee Characterization Lab\n## PH 211 COCC\n### Bruce Emerson 1/20/2021\n\nThis notebook is meant to provide tools and discussion to support data analysis and presentation as you generate your lab reports. \n\n[Bungee Characterization (Bungee I)](http://coccweb.cocc.edu/bemerson/PhysicsGlobal/Courses/PH211/PH211Materials/PH211Labs/PH211LabbungeeI.html) and [Bungee I Lab Discussion](http://coccweb.cocc.edu/bemerson/PhysicsGlobal/Courses/PH211/PH211Materials/PH211Labs/PH211LabDbungeeI.html)\n\nIn this lab we are gathering some data, entering the data into the notebook, plotting the data as a scatterplot, plotting a physics model of the bungee, and finally looking for patterns through normalizing the data.\n\nFor the formal lab report you will want to create your own description of what you understand the process and intended outcome of the lab is. Please don't just copy the purpose statement from the lab page.",
"_____no_output_____"
],
[
"## Dependencies\n\nThis is where we load in the various libraries of python tools that are needed for the particular work we are undertaking. \n \n```numpy``` is a numerical tools library - often imported as np. ```numpy``` also contains the statistical tools that we will use in this lab. There are other libraries dedicated to statistical tools but 'numpy' has everything we need. \n```matplotlib```is a 'MATLAB like' library. \n```matplotlib.pyplot``` is often imported as ```plt``` to make it easier to use. ```matplotlib``` has the plotting tools that we need for this lab. \n\nThe following code cell will need to be run first before any other code cells.",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport matplotlib as mplot\nimport matplotlib.pyplot as plt\n",
"_____no_output_____"
]
],
[
[
"## Data Entry (Lists/Vectors)\n\nAs we learned last week we can manually enter our data in as lists. See last weeks lab for reminders if needed. In this lab we are working with data pairs (x,y data). There are a number of ways of doing this but the most conceptually direct approach is to create an ordered list of the xdata and the ydata separately. Notice that I can 'fold' long lines of data by entering a new line after the comma. This is handy when manually entering data. The data shown here is completely manufactured but has some of the same characteristics as the data you are gathering.\n\nBe aware that you will gathering two sets of data yourself and getting a third data set from another group. Plan out how you will keep track of each data set with thoughtful naming choices.",
"_____no_output_____"
],
[
"### Comments in Code:\n\nFrom this point going forward I will be looking for consistent description of what is happening in the code cells both within and before the code cell. You are of little value to a future employer if they can't hand you work to another employee who can make sense of what you did. A good metric is you should spend at least as much effort commenting and explaining what you're doing as you do actually doing the work.\n\nIn a python code cell any line that starts with a '#' will be ignored by python and interpreted as a comment. \n\n```# this is the actual data from your experiment```\n\nThis is a typical format of a comment that is easy to read in the code. It is sometimes helpful to comment at the end of a line to explain particular items in that line.\n\n```ydata2 = [2., 3.] # I can also comment at the end of a line```",
"_____no_output_____"
]
],
[
[
"# this is the actual data from your experiment\nxdata1 = [3.23961446, 12.3658087, 27.08638038, 36.88808393,\n 48.5373278, 43.90496472, 75.81073494, 105.42389529,\n 123.53497036, 158.87537602]\nydata1 = [0.62146893, 1.53513096, 3.97591135,\n 4.54284862, 6.23415512, 5.12951366,\n 6.1733864, 7.9524996, 8.90050684, 10.29383595]\n\n# these are a couple of specific data point I want to scatterplot on top of my plot\nxdata2 = [60., 100.]\nydata2 = [2., 3.] # I can also comment at the end of a line\n\n# print out and check my data\nprint(\"stretch data:\",xdata1)\nprint(\"force data:\",ydata1)",
"stretch data: [3.23961446, 12.3658087, 27.08638038, 36.88808393, 48.5373278, 43.90496472, 75.81073494, 105.42389529, 123.53497036, 158.87537602]\nforce data: [0.62146893, 1.53513096, 3.97591135, 4.54284862, 6.23415512, 5.12951366, 6.1733864, 7.9524996, 8.90050684, 10.29383595]\n"
]
],
[
[
"## Number of Data Points:\n\nBecause we are scatter plotting the data we need to be sure that every x value has a related y value or the plotting routines will complain. Previously we learned to use the ```len()``` function to determine the number of data points in a list. We do that again here.\n\n#### Extra: Conditional Statements:\n\nIt seems reasonable that we could ask python to check whether the two data sets are the same length and we can. There are a number of what are called conditional statements. The \"if-else' statement is one of these. \n\n[if-else examples](https://pythonguides.com/python-if-else/)\n\n```\nif (xdata1length = ydata1length):\n print(\"Looks good:)\")\n else\n print(\"Something is wrong here!!!\")\n```\nInside the parentheses is the conditional statement which, in this case, asks if ```xdata1length = ydata1length```. 'If' this statement is true then python will look at the next line(s) to see what it should do. If the conditional statement is false (not true) python will look for an ```else``` command and do whatever is on the lines after the else statement. Python expects that everything related to the ```if-else``` statement will be indented after the line where it begins. The next line of code (or even a blank line) that is NOT indented represents the end of the conditional statement. Just play with a few things in the statement if you have time and see what happens.",
"_____no_output_____"
],
[
"***\n## Lab Deliverable:\n\nFor your lab notebook you will include the usual 'header' information we talked about last week in the first cell of the lab (a markdown cell for sure). After the header cell describe the process by which you collected data from you micro-bungee cord. The actual data can be entered directly into the code. \n\nInsert an appropriate title and describe how you determined the varibility of your data across the range of your data points. At some point in your description you need to articulate, in percentage terms, a numerical value for variability of your data that matches your description and data.\n\n***",
"_____no_output_____"
]
],
[
[
"# determine the lengths of the data lists\nxdata1length = len(xdata1)\nydata1length = len(ydata1)\n\n# print out the lengths- visually check that they are the same\nprint(\"number of data points (x):\", xdata1length)\nprint(\"number of data points (y):\", ydata1length)\n\nif (xdata1length == ydata1length):\n print(\"Looks good:)\")\nelse:\n print(\"Something is wrong here!!!\")\n\n \n",
"number of data points (x): 10\nnumber of data points (y): 10\nLooks good:)\n"
]
],
[
[
"### Scatter Plots \n\nMost data that we will want to analyze in physics is (x,y) data. For this type of data the typical plot type is called a scatter plot which is just what you think of when you plot individual data points.\n\nTo begin the process in python we need to create a container for the multiple plots we will be creating. One way (not the only way) to dothis is with the ```plt.subplots``` function. This creates a container (called fig1 in this case) and a first set of axes called ax1 in this case. \n\n[pyplot subplots documentation](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.subplots.html)\n\nWe can then layer multiple plots onto these axes (ax1) by plotting and replotting until we are ready to show the whole thing. In this cell I am only creating a single plot of the first data set.\n\n[pyplot scatter documentation](https://matplotlib.org/3.1.0/api/_as_gen/matplotlib.pyplot.scatter.html) \n\nTo try to keep things clearer for myself I have typically defined a new figure and a new set of axes for each plot. You will find that if you look at samples from the web that many coders jsut reuse the same labels over and over again. This works from a coding perspective but it violates a core expectation for all sciences that your code be clear in its communication. I encourage you to consider the choices you make in this regard.",
"_____no_output_____"
]
],
[
[
"# create a figure with a set of axes as we did with histograms\nfig1, ax1 = plt.subplots()\n\n# scatter plot data set 1\nax1.scatter(xdata1, ydata1)\n\n# set up labels and titles for the plot and turn on the grid lines\nax1.set(xlabel='independent variable (units)', ylabel='dependent variable (units)',\n title='My Data from Lab')\nax1.grid()\n\n# Set the size of my plot for better visibility\nfig1.set_size_inches(10, 9)\n\n# uncomment this line if I want to save a png of the plot for other purposes\n#fig1.savefig(\"myplot.png\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Adding more data\n\nWhen I want to add more data I just make another plot on a new set of axes. I have to start a new container (fig) because the ```plt.show()``` call blocks me from adding more information to the plot (there is something in this that is still not clear to me and perhaps soon I will ",
"_____no_output_____"
]
],
[
[
"# a new set of axes\nfig2, ax2 = plt.subplots()\nax2.scatter(xdata1, ydata1, color = 'blue')\nax2.scatter(xdata2, ydata2, color = 'red')\n\nax2.set(xlabel='independent variable (units)', ylabel='dependent variable (units)',\n title='My Data from Lab')\nax2.grid()\n\n# Set the size of my plot for better visibility\nfig2.set_size_inches(10, 9)\n\n#fig.savefig(\"myplot.png\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Discussion: Deliverable 2\n\nThe second deliverable asks you to consider the data from your plot(s) and describe whether it has features that are consistent with an ideal physics spring (Hooke's Law). Are some regions linear? ....sort of? Is the spring stiffer at the beginning or at the end of data? Explain your answer. Do both sets of data show similar behavior? How or how not?",
"_____no_output_____"
],
[
"### Add physics model...\n\nFor the lab you are asked to draw straight lines that 'model' (describe) the behavior of the early and latter parts of your data sets. When we are creating physics models we are now generating 'data points' from a mathematical description. Again, there are a number of ways to do this but what I will show here is typical of physics and engineering models.\n\nIt starts by defining a set of x values.```numpy.linspace()``` is a tool for doing this and because we did ```import numpy as np``` it shows in the code as ```np.linspace()```\n\n[numpy.linspace documentation](https://docs.scipy.org/doc/numpy/reference/generated/numpy.linspace.html)\n\nWhat the function does is generate a list of values that are evenly distributed between 'begin' and 'end' in ```np.linspace('begin','end',# values)``` \n\nIn this lab we are exploring linear models (Hooke's Law) for the behavior of the bungee (spring) which means we need a slope and a y intercept. One the nice features of lists is that if I multiply a list by a number I get a new list with the same number of elements each of which is multiplied by the number. Be careful. The calculation that looks like it's relating a single x and y value is really connecting a list of x and y values.",
"_____no_output_____"
]
],
[
[
"# actual model parameters - slope and intercept\nmodel1slope = .12\nmodel2slope = .045\nmodel1int = 0.\nmodel2int = 3.\n\n# range of x values -- choose lower and upper limits of range\nmodel1x = np.linspace(0.,50.,20)\nmodel2x = np.linspace(30.,170.,20)\n\n# in case you want to check how many values are generated\n# modellength = len(model1x)\n# print(modellength)\n\n# generate y values from model\nmodel1y = model1slope*model1x + model1int\nmodel2y = model2slope*model2x + model2int\n",
"_____no_output_____"
]
],
[
[
"### Plotting References\n\nThere are a range of different marks that you can use to plot your data points on the scatter plot. Here is the link... \n\n[marker types for scatter plot](https://matplotlib.org/3.1.0/api/markers_api.html#module-matplotlib.markers)\n\nThere are also a range of colors that you can use for all plots. I am not yet clear when some can or can't be used but here's the reference if you want to experiment...\n\n[matplotlib named colors](https://matplotlib.org/3.1.0/gallery/color/named_colors.html)\n\nWhen plotting lines (```ax2.plot()```) there are a few line styles you can use from solid lines to various dashed lines. Here's the reference....\n\n[matplotlib line styles for plot](https://matplotlib.org/gallery/lines_bars_and_markers/line_styles_reference.html)\n\nYou will notice that I added a label to each plot. This is then picked up and attached to each plot and displayed in the legend. You can decide where to place the legend on the plot by choosing different values for 'loc'. Play with this to get a helpful placement.",
"_____no_output_____"
]
],
[
[
"fig3, ax3 = plt.subplots()\n# scatter plot of the data\nax3.scatter(xdata1, ydata1, marker = 'x', color = 'black', label = \"82 cm Bungee\")\n\n# draw the two lines that represent my model\nax3.plot(model1x, model1y, color = 'red', linestyle = ':', linewidth = 3., label = \"initial\")\nax3.plot(model2x, model2y, color = 'green', linestyle = '--', linewidth = 2., label = \"tail\")\n\n# set up overall plot labels\nax2.set(xlabel='independent variable (units)', ylabel='dependent variable (units)',\n title='data and model')\nax3.grid()\n\n# Set the size of my plot for better visibility\nfig3.set_size_inches(10, 9)\n\n# this creates a key to the meaning of the different symbols on the plot\nplt.legend(loc= 4)\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Discussion: Deliverable III\n\nSo what does your plot above mean? What explanation of the behavior of the bungee is suggested by the two line fit?",
"_____no_output_____"
],
[
"### Normalization\n\nNormalization is the process of trying to see if a particular feature of the data has a simple dependence. In this case each bungee is a different length but otherwise they seem like would behave very similarly. To explore this question we normalize the stretch by dividing it by the original length of the cord. Do this for **both** sets of data and then replot.\n\nThe value of this normalization exercise is the impact of plotting data from multiple bungees. What I show here is the normalization of just one bungee. You will need to do 2 or 3 depending on how much data you have and plot them all simultaneously. Using different colors for each data set will help keep track of which ones are which.\n\nYou will note that I couldn't normalize by doing the obvious thing - ```xdata1norm = xdata1/length1```. Python doesn't like this (try it and look at the error message) so I had to hunt around and found this useful function. There may be other ways to accomplish this task but this works so that's where I'm going. As usual here is the documentation link:\n\n[numpy.true_divide](https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.true_divide.html)",
"_____no_output_____"
]
],
[
[
"length1 = 75.\nxdata1norm = np.true_divide(xdata1,length1)",
"_____no_output_____"
],
[
"\nfig, axn = plt.subplots()\naxn.scatter(xdata1norm, ydata1)\n\naxn.set(xlabel='independent variable (units)', ylabel='dependent variable (units)',\n title='My Data from Lab')\naxn.grid()\n\n#fig.savefig(\"myplot.png\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"### Discussion: Deliverable V\n\nWhat does it mean? What we expect is that the data for all the different bungees makes a single shape as opposed to a family of similar shapes. How might this help us predict the behavior of a bungee of a different length?",
"_____no_output_____"
],
[
"## Reflection\n\nAs usual I learned a bunch of new stuff in the process of creating this notebook as a framework for your lab report. Thanks.",
"_____no_output_____"
],
[
"### Extensions\n\nExtensions are ideas that I didn't have time to explore or develop fully for this lab. These are offered as opportunities for students with more programming experience than is typical for students in the class.\n\n#### Create a separate legend for reporting slopes of the fit lines\n\nI feel like this might be nice at some future time.\n\n#### Least squares fit of straight line to data\n\nI am strongly in favor of drawing our own lines on the data because it makes us think about what the terms in the line mean in contect. Never the less I would love to know how to get numpy to do a least squares polynomial fit that I can hold to a linear function. Would also be interested in a higher order ($x^2$ or $x^3$) fit to the whole data set.\n\n#### Standard Deviation\n\nThis is a long way out. Each data point has a certain amount of uncertainty due to issues of reproducibility (mostly due to heating of the bungee). Is there a straightforward way to attach a bar of the correct length (either horizontally or vertically) to represent the standard deviation.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown",
"markdown"
]
] |
d09afdfbdbd8bc5d7c0a5e05d44765496294b6ba | 3,520 | ipynb | Jupyter Notebook | Assignment 1_Zubair.ipynb | Abdullahzz/cv | 5b6378063787232785416369d761e23535389b2a | [
"MIT"
] | null | null | null | Assignment 1_Zubair.ipynb | Abdullahzz/cv | 5b6378063787232785416369d761e23535389b2a | [
"MIT"
] | null | null | null | Assignment 1_Zubair.ipynb | Abdullahzz/cv | 5b6378063787232785416369d761e23535389b2a | [
"MIT"
] | null | null | null | 29.090909 | 429 | 0.571023 | [
[
[
"name =input(\"Please enter your name: \")\nage = int(input(\"Please enter your age: \"))\ndegree = input(\"Please enter your degree: \")\nuniversity = input(\"Please enter the university you graduated from: \")\njob = input(\"Please enter the position you wanted: \")\ntel_no = input(\"Please enter your phone number: \")\nemail =input(\"Please enter your email: \")\n\nprint(\"\\nHello Sir/Madam, my name is\", name, \"and I'm \", age, \"years old. I just graduated from \", university,\n \" with a bachelor degree in \", degree, \". I would like to apply for a job at Imagine AI for the position \",\n job, \" where I am very skilled in that work field. Within this application I inclucded my resume. You can email me at\",\n email, \" or contact \", tel_no, \", thank you.\")",
"Please enter your name: abdullah zubair zulkifli\nPlease enter your age: 23\nPlease enter your degree: intelligent system engineering\nPlease enter the university you graduated from: UiTM\nPlease enter the position you wanted: computer vision Engineer\nPlease enter your phone number: 012-345678\nPlease enter your email: [email protected]\n"
],
[
"a, b, c = 2,8,1",
"_____no_output_____"
],
[
"x1 = (-b + math.sqrt(b*b-4*a*c))/2*a\nx2 = (-b - math.sqrt(b*b-4*a*c))/2*a",
"_____no_output_____"
],
[
"print(x1,x2)",
"-0.5166852264521173 -15.483314773547882\n"
],
[
"#sorry for the late submit sir Goke. Im abdullah Zubair and currently undergo internship.\n#so i might miss some assignment due date but i'll try my best to catch up with others.",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09b13c518f7a63ad2347538123bef465e2b1b4e | 200,227 | ipynb | Jupyter Notebook | notebook/debugging/data_similarity.ipynb | ben-heil/saged | a2c039f00bfc97011c2ee1343c39af42e6f2e1e6 | [
"BSD-3-Clause"
] | 3 | 2020-07-24T01:06:04.000Z | 2021-12-20T14:35:21.000Z | notebook/debugging/data_similarity.ipynb | ben-heil/saged | a2c039f00bfc97011c2ee1343c39af42e6f2e1e6 | [
"BSD-3-Clause"
] | 39 | 2020-06-26T12:54:35.000Z | 2022-03-23T14:16:34.000Z | notebook/debugging/data_similarity.ipynb | ben-heil/saged | a2c039f00bfc97011c2ee1343c39af42e6f2e1e6 | [
"BSD-3-Clause"
] | 1 | 2020-05-29T14:24:16.000Z | 2020-05-29T14:24:16.000Z | 79.803507 | 44,324 | 0.767938 | [
[
[
"# Data Similarity\nPrevious experiments have had some strange results, with models occasionally performing abnormally well (or badly) on the out of sample set. To make sure that there are no duplicate samples or abnormally similar studies, I made this notebook",
"_____no_output_____"
]
],
[
[
"import json\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport yaml\nfrom plotnine import *\nfrom sklearn.metrics.pairwise import euclidean_distances\n\nfrom saged import utils, datasets, models",
"_____no_output_____"
]
],
[
[
"## Load the data",
"_____no_output_____"
]
],
[
[
"dataset_config_file = '../../dataset_configs/refinebio_labeled_dataset.yml'",
"_____no_output_____"
],
[
"dataset_config_str = \"\"\"name: \"RefineBioMixedDataset\"\ncompendium_path: \"../../data/subset_compendium.pkl\"\nmetadata_path: \"../../data/aggregated_metadata.json\"\nlabel_path: \"../../data/sample_classifications.pkl\"\n\"\"\"\ndataset_config = yaml.safe_load(dataset_config_str)\n\ndataset_name = dataset_config.pop('name') \nMixedDatasetClass = datasets.RefineBioMixedDataset \n\nall_data = MixedDatasetClass.from_config(**dataset_config) ",
"_____no_output_____"
]
],
[
[
"## Look for samples that are very similar to each other despite having different IDs",
"_____no_output_____"
]
],
[
[
"sample_names = all_data.get_samples()\nassert len(sample_names) == len(set(sample_names))\n\nsample_names[:5]",
"_____no_output_____"
],
[
"expression = all_data.get_all_data()\nprint(len(sample_names))\nprint(expression.shape)",
"10234\n(10234, 14634)\n"
],
[
"sample_distance_matrix = euclidean_distances(expression, expression)\n\n# This is unrelated to debugging the data, I'm just curious\ngene_distance_matrix = euclidean_distances(expression.T, expression.T)",
"_____no_output_____"
],
[
"sample_distance_matrix.shape",
"_____no_output_____"
],
[
"sample_distance_matrix",
"_____no_output_____"
],
[
"# See if there are any zero distances outside the diagonal\nnum_zeros = 10234 * 10234 - np.count_nonzero(sample_distance_matrix)\nnum_zeros",
"_____no_output_____"
]
],
[
[
"Since there are as many zeros as elements in the diagonal, there are no duplicate samples with different IDs (unless noise was added somewhere)",
"_____no_output_____"
],
[
"### Get all distances\nBecause we know there aren't any zeros outside of the diagonal, we can zero out the lower diagonal and use the the non-zero entries of the upper diagonal to visualize the distance distribution",
"_____no_output_____"
]
],
[
[
"triangle = np.triu(sample_distance_matrix, k=0)\ntriangle",
"_____no_output_____"
],
[
"distances = triangle.flatten()\nnonzero_distances = distances[distances != 0]\nnonzero_distances.shape",
"_____no_output_____"
],
[
"plt.hist(nonzero_distances, bins=20)",
"_____no_output_____"
]
],
[
[
"Distribution looks bimodal, probably due to different platforms having different distances from each other?",
"_____no_output_____"
]
],
[
[
"plt.hist(nonzero_distances[nonzero_distances < 200])",
"_____no_output_____"
],
[
"plt.hist(nonzero_distances[nonzero_distances < 100])",
"_____no_output_____"
]
],
[
[
"Looks like there may be some samples that are abnormally close to each other. I wonder whether they're in the same study",
"_____no_output_____"
],
[
"## Correspondence between distance and study",
"_____no_output_____"
]
],
[
[
"# There is almost certainly a vectorized way of doing this but oh well\ndistances = []\nfirst_samples = []\nsecond_samples = []\n\nfor row_index in range(sample_distance_matrix.shape[0]):\n for col_index in range(sample_distance_matrix.shape[0]):\n distance = sample_distance_matrix[row_index, col_index]\n if distance == 0:\n continue\n distances.append(distance)\n first_samples.append(sample_names[row_index])\n second_samples.append(sample_names[col_index])",
"_____no_output_____"
],
[
"distance_df = pd.DataFrame({'distance': distances, 'sample_1': first_samples,\n 'sample_2': second_samples})",
"_____no_output_____"
],
[
"# Free up memory to prevent swapping (probably hopeless if the user has < 32GB)\ndel(triangle)\ndel(sample_distance_matrix)\ndel(distances)\ndel(first_samples)\ndel(second_samples)\ndel(nonzero_distances)",
"_____no_output_____"
],
[
"distance_df",
"_____no_output_____"
],
[
"sample_to_study = all_data.sample_to_study",
"_____no_output_____"
],
[
"del(all_data)",
"_____no_output_____"
],
[
"distance_df['study_1'] = distance_df['sample_1'].map(sample_to_study)\ndistance_df['study_2'] = distance_df['sample_2'].map(sample_to_study)\ndistance_df['same_study'] = distance_df['study_1'] == distance_df['study_2']",
"_____no_output_____"
],
[
"distance_df.head()",
"_____no_output_____"
],
[
"print(len(distance_df))",
"104723274\n"
]
],
[
[
"For some reason my computer didn't want me to make a figure with 50 million points. We'll work with means instead",
"_____no_output_____"
]
],
[
[
"means_df = distance_df.groupby(['study_1', 'same_study']).mean()",
"_____no_output_____"
],
[
"means_df",
"_____no_output_____"
],
[
"means_df = means_df.unstack(level='same_study')\nmeans_df = means_df.reset_index()\nmeans_df.head()",
"_____no_output_____"
],
[
"# Get rid of the multilevel confusion\nmeans_df.columns = means_df.columns.droplevel()\n\nmeans_df.columns = ['study_name', 'distance_to_other', 'distance_to_same']\nmeans_df['difference'] = means_df['distance_to_other'] - means_df['distance_to_same']\nmeans_df.head()",
"_____no_output_____"
],
[
"plot = ggplot(means_df, aes(x='study_name', y='difference')) \nplot += geom_point()\nplot += ylab('out of study - in-study mean')\nplot",
"_____no_output_____"
],
[
"means_df.sort_values(by='difference')",
"_____no_output_____"
]
],
[
[
"These results indicate that most of the data is behaving as expected (the distance between pairs of samples from different studies is less than the distance between pairs of samples within the same study).\n\nThe outliers are mostly bead-chip, which makes sense (though they shouldn't be in the dataset and I'll need to look more closely at that later). The one exception is SRP049820 which is run on an Illumina Genome Analyzer II. Maybe it's due to the old tech?",
"_____no_output_____"
],
[
"## Without BE Correction",
"_____no_output_____"
]
],
[
[
"%reset -f\n# Calling reset because the notebook runs out of memory otherwise\nimport json\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport yaml\nfrom plotnine import *\nfrom sklearn.metrics.pairwise import euclidean_distances\n\nfrom saged import utils, datasets, models",
"_____no_output_____"
],
[
"dataset_config_file = '../../dataset_configs/refinebio_labeled_dataset.yml'\n\ndataset_config_str = \"\"\"name: \"RefineBioMixedDataset\"\ncompendium_path: \"../../data/subset_compendium.pkl\"\nmetadata_path: \"../../data/aggregated_metadata.json\"\nlabel_path: \"../../data/sample_classifications.pkl\"\n\"\"\"\ndataset_config = yaml.safe_load(dataset_config_str)\n\ndataset_name = dataset_config.pop('name') \nMixedDatasetClass = datasets.RefineBioMixedDataset \n\nall_data = MixedDatasetClass.from_config(**dataset_config) ",
"_____no_output_____"
],
[
"# Correct for batch effects\nall_data = datasets.correct_batch_effects(all_data, 'limma')",
"_____no_output_____"
]
],
[
[
"## Look for samples that are very similar to each other despite having different IDs",
"_____no_output_____"
]
],
[
[
"sample_names = all_data.get_samples()\nassert len(sample_names) == len(set(sample_names))\n\nsample_names[:5]",
"_____no_output_____"
],
[
"expression = all_data.get_all_data()\nprint(len(sample_names))\nprint(expression.shape)",
"10234\n(10234, 14634)\n"
],
[
"sample_distance_matrix = euclidean_distances(expression, expression)\n\n# This is unrelated to debugging the data, I'm just curious\ngene_distance_matrix = euclidean_distances(expression.T, expression.T)",
"_____no_output_____"
],
[
"sample_distance_matrix.shape",
"_____no_output_____"
],
[
"sample_distance_matrix",
"_____no_output_____"
],
[
"# See if there are any zero distances outside the diagonal\nnum_zeros = 10234 * 10234 - np.count_nonzero(sample_distance_matrix)\nnum_zeros",
"_____no_output_____"
]
],
[
[
"Since there are as many zeros as elements in the diagonal, there are no duplicate samples with different IDs (unless noise was added somewhere)",
"_____no_output_____"
],
[
"### Get all distances\nBecause we know there aren't any zeros outside of the diagonal, we can zero out the lower diagonal and use the the non-zero entries of the upper diagonal to visualize the distance distribution",
"_____no_output_____"
]
],
[
[
"triangle = np.triu(sample_distance_matrix, k=0)\ntriangle",
"_____no_output_____"
],
[
"distances = triangle.flatten()\nnonzero_distances = distances[distances != 0]\nnonzero_distances.shape",
"_____no_output_____"
],
[
"plt.hist(nonzero_distances, bins=20)",
"_____no_output_____"
]
],
[
[
"Distribution looks bimodal, probably due to different platforms having different distances from each other?",
"_____no_output_____"
]
],
[
[
"plt.hist(nonzero_distances[nonzero_distances < 200])",
"_____no_output_____"
],
[
"plt.hist(nonzero_distances[nonzero_distances < 100])",
"_____no_output_____"
]
],
[
[
"Looks like there may be some samples that are abnormally close to each other. I wonder whether they're in the same study",
"_____no_output_____"
],
[
"## Correspondence between distance and study",
"_____no_output_____"
]
],
[
[
"# There is almost certainly a vectorized way of doing this but oh well\ndistances = []\nfirst_samples = []\nsecond_samples = []\n\nfor row_index in range(sample_distance_matrix.shape[0]):\n for col_index in range(sample_distance_matrix.shape[0]):\n distance = sample_distance_matrix[row_index, col_index]\n if distance == 0:\n continue\n distances.append(distance)\n first_samples.append(sample_names[row_index])\n second_samples.append(sample_names[col_index])",
"_____no_output_____"
],
[
"distance_df = pd.DataFrame({'distance': distances, 'sample_1': first_samples,\n 'sample_2': second_samples})",
"_____no_output_____"
],
[
"# Free up memory to prevent swapping (probably hopeless if the user has < 32GB)\ndel(triangle)\ndel(sample_distance_matrix)\ndel(distances)\ndel(first_samples)\ndel(second_samples)\ndel(nonzero_distances)",
"_____no_output_____"
],
[
"distance_df",
"_____no_output_____"
],
[
"sample_to_study = all_data.sample_to_study",
"_____no_output_____"
],
[
"del(all_data)",
"_____no_output_____"
],
[
"distance_df['study_1'] = distance_df['sample_1'].map(sample_to_study)\ndistance_df['study_2'] = distance_df['sample_2'].map(sample_to_study)\ndistance_df['same_study'] = distance_df['study_1'] == distance_df['study_2']",
"_____no_output_____"
],
[
"distance_df.head()",
"_____no_output_____"
],
[
"print(len(distance_df))",
"104724522\n"
]
],
[
[
"For some reason my computer didn't want me to make a figure with 50 million points. We'll work with means instead",
"_____no_output_____"
]
],
[
[
"means_df = distance_df.groupby(['study_1', 'same_study']).mean()",
"_____no_output_____"
],
[
"means_df",
"_____no_output_____"
],
[
"means_df = means_df.unstack(level='same_study')\nmeans_df = means_df.reset_index()\nmeans_df.head()",
"_____no_output_____"
],
[
"# Get rid of the multilevel confusion\nmeans_df.columns = means_df.columns.droplevel()\n\nmeans_df.columns = ['study_name', 'distance_to_other', 'distance_to_same']\nmeans_df['difference'] = means_df['distance_to_other'] - means_df['distance_to_same']\nmeans_df.head()",
"_____no_output_____"
],
[
"plot = ggplot(means_df, aes(x='study_name', y='difference')) \nplot += geom_point()\nplot += ylab('out of study - in-study mean')\nplot",
"_____no_output_____"
],
[
"means_df.sort_values(by='difference')",
"_____no_output_____"
]
],
[
[
"These results indicate that most of the data is behaving as expected (the distance between pairs of samples from different studies is less than the distance between pairs of samples within the same study).\n\nThe outliers are mostly bead-chip, which makes sense (though they shouldn't be in the dataset and I'll need to look more closely at that later). The one exception is SRP049820 which is run on an Illumina Genome Analyzer II. Maybe it's due to the old tech?",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
]
] |
d09b1db319a1b15cceb8225f4b4f93b2f6fa3300 | 121,390 | ipynb | Jupyter Notebook | Notebook/Linear-Regression.ipynb | jason-neal/eniricML | 6f8daf3aede62166cd8a042a29bd60e073de2967 | [
"MIT"
] | null | null | null | Notebook/Linear-Regression.ipynb | jason-neal/eniricML | 6f8daf3aede62166cd8a042a29bd60e073de2967 | [
"MIT"
] | null | null | null | Notebook/Linear-Regression.ipynb | jason-neal/eniricML | 6f8daf3aede62166cd8a042a29bd60e073de2967 | [
"MIT"
] | null | null | null | 264.466231 | 62,088 | 0.915998 | [
[
[
"# LINEAR Regression on Precision table",
"_____no_output_____"
],
[
"import pandas as pd\nfrom sklearn import linear_model\nimport numpy as np\nimport seaborn as sns\nsns.set(color_codes=True)",
"_____no_output_____"
],
[
"def sk_linearReg_org(data): \n data_set = [[value[0], value[1], value[2], value[3]] for value in data]\n Y = [value[4] for value in data]\n clf = linear_model.LinearRegression()\n clf.fit(data_set, Y)\n\n # application of the model to the data\n model = [clf.intercept_+ np.sum(np.array(clf.coef_)*np.array(value)) for value in data_set] \n # calculation of the residuals\n res = np.array(Y)-np.array(model) \n \n return [clf.intercept_, clf.coef_, model, res]\n\ndef sk_linearReg(data_set, Y): \n # data_set = [[value[0], value[1], value[2], value[3]] for value in data]\n # Y = [value[4] for value in data]\n clf = linear_model.LinearRegression()\n clf.fit(data_set, Y)\n\n # application of the model to the data\n print(\"intercept=\",clf.intercept_,\"coef\", clf.coef_)\n df = data_set.copy()\n df = df.multiply(clf.coef_, axis=1)\n print(\"model\")\n model = clf.intercept_ + df.sum(axis=1)\n display(model.head())\n #model = [clf.intercept_+ np.sum(np.array(clf.coef_)*np.array(value)) for value in data_set] \n # calculation of the residuals\n res = Y.values - model.values \n \n return [clf.intercept_, clf.coef_, model, res]",
"_____no_output_____"
],
[
"# Using float quality from random forest\n\n# df = pd.read_csv(\"../metalicities/extended_quality.csv\")\ndf = pd.read_csv(\"random_forest/float_quality.csv\")\n\ndf.columns = df.columns.str.strip()\n#df.Resolution = df.Resolution.str.replace(\"k\",\"\").astype(float) * 1000\n#df.Band = df.Band.str.strip()\n#df = df[df.Band ==\"K\"]\nprint(df.head())\nlen(df)",
" Temp logg [Fe/H] Alpha Band Resolution vsini Sampling Quality \\\n0 4200 4.0 -1.0 0.0 0.90 60000 1.0 3.0 2899 \n1 4200 4.0 -1.0 0.0 0.90 60000 5.0 3.0 1946 \n2 4200 4.0 -1.0 0.0 0.90 60000 10.0 3.0 1141 \n3 4200 4.0 -1.0 0.0 1.25 60000 1.0 3.0 1543 \n4 4200 4.0 -1.0 0.0 1.25 60000 5.0 3.0 1090 \n\n Cond1 Cond2 Cond3 correctflag \n0 12.1 20.4 12.6 0 \n1 18.1 30.6 18.7 0 \n2 30.8 50.2 31.6 0 \n3 22.2 111.9 23.5 0 \n4 31.4 186.9 33.2 0 \n"
],
[
"# TODO apply the scaling/normalization to columns",
"_____no_output_____"
],
[
"data_table = df[[\"Temp\", \"logg\", \"[Fe/H]\", \"Resolution\", \"Band\", \"vsini\"]]\nexpected = df[\"Quality\"].astype(float)\ndata_table = data_table.astype(np.float)",
"_____no_output_____"
],
[
"[intercept_, coef_, model, res] = sk_linearReg(data_table, expected)",
"intercept= 20408.446173711713 coef [-2.99193627e+00 -5.05856000e+02 1.05706931e+03 4.21585140e-02\n -2.24870695e+03 -6.73343385e+02]\nmodel\n"
],
[
"print(data_table.columns)\nprint(coef_)",
"Index(['Temp', 'logg', '[Fe/H]', 'Resolution', 'Band', 'vsini'], dtype='object')\n[-2.99193627e+00 -5.05856000e+02 1.05706931e+03 4.21585140e-02\n -2.24870695e+03 -6.73343385e+02]\n"
],
[
"import matplotlib.pyplot as plt\nplt.style.use('presentation')\nplt.hist(res)data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAewAAAFcCAYAAAAK4I0VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzsvXtcVNe5//+ZGwMDzAwy3B28IaCIRgNaAU9j1Ig2jcaeSlJN25jY803aGnMa4zltYpvk5Nta0+Z2mvwaj0naE5uYNBf6zQUTo6YiXiAYg+igAsJwZwbmAnO//P7Y7MXeey7MwCBI9vv1yivO3rPXXmvtYT/redZzEXi9Xi94eHh4eHh4JjXCie4ADw8PDw8Pz8jwApuHh4eHh+cGgBfYPDw8PDw8NwC8wObh4eHh4bkB4AU2Dw8PDw/PDQAvsHl4eHh4eG4AeIHNw8PDw8NzA8ALbB4eHh4enhsAXmDz8HDQarXIycnB/v37fc7l5ORgz549Aa+tr6/Hvffe63O8qqoKO3bsiEj/ItkWDbPf49H+jQg/DzyTDfFEd4CHZ7LyySefYPv27eRzRUVF2G3s27cPu3btQlFREdRqdUT6FWpb9L3Hq/2pDj8PPJMNXsPm4fGDWq2GyWSCyWQix6qqqrB27dqw2nn77bcBUFp7VVVVRPoWalv0vcer/akOPw88kw1eYPPwBGDt2rX45JNPyOeqqioUFxeTfzPNpfv37/cxoe/YsQMmk4n8/+TJkwCAe++9l6Wt33vvvaiqqsK+fftQWFiIwsJC0nZ9fT127NiBHTt2EFM8sy0Afq9j3hsADh06hNWrV2P16tVBTfrc9un779mzB6tXr8amTZvIIiZQm6GOg8bfuWBt0/2g5zyc63fs2EH6xnwG/o5z53n//v0+bQabHx6eSMMLbB6eAKxfv568vOvr6zF//vywrn/hhRcgl8vxwgsvsI6XlZXh448/BkAJhQsXLmDBggU4deoUqqurUV1djba2NqLdHT58GP/2b/+GJ5980uceJpPJ73XMe9fX12P//v147733cOTIEQCU4AuVw4cPo6ysDEeOHMH06dNx6NChgG0G6s9I42CeC9R2VVUVTp06hSNHjuD111/HoUOHYDAYQr7+0KFDAIDq6mq8/vrr5BkEOs6kqqoKhw4dIm1qtVpynb/54eEZD/g9bB6eAOTl5UGr1QIAPv74Y6xfvx5Go3HM7ZaWluLxxx8HQO2Tb968GXK5HK+//jqqqqpQX18PrVYLk8kEhUKBvLw85OXl+W0r0HVMPv74YxiNRvz4xz8GQAn5cPZm1Wo1uX9xcTFMJlPANkc7Dua5QG2bzWaUlZWRcTO3J0K5fv369di/fz/27NmD0tJSspBasGCB3+NMTp48ibKyMsjlcgDA9u3b8dZbb2HBggV+54eHZzzgBTYPTxCKiopQUVFBNDimiZyJwWCAUqkMud3ly5cTre35559HfX09HnroIWzfvh2lpaVkoQAACoUiYDvBrqNRKpXYvHkzywEtHKHiT7gHanO042CeC9T2M8884/M9WsMO5Xq5XI4jR46goqICFRUV2LNnD44cOYK8vDy/x0fCbDYHnB8envGAN4nz8AShtLQUr7zyCuRyOdGuAEpAXLx4kXw+fPhwWO3S2h5AvfCrqqpQVFSEsrIyKBSKkJ2dQrmutLQUhw8fJkJ6x44dYzbbBmpztOMIpe2ioiJWvwONIdD1+/fvx759+1BaWoonn3wSRqMRJpMp4HEmxcXFrPvt378fpaWlYY+Nh2cs8AKbhycIRUVFqK+vx7p161jH8/LyUFRUhNWrV+Pee+/F9u3b/WpaarXab1x2aWkpqqqqiIm3rKwMFy5cwOrVq/HYY4+hrKwMzzzzzIj9C3YdfW+1Wo3t27dj06ZNKCwshFKpZIWrjYZAbY52HKG0XVpaivnz5xPnrvnz5/u1agTr26lTp1BYWIjVq1fjkUcegVwuD3icCb0IoZ3O1Go1eXY8PNcLgdfr9U50J3h4eHhGor6+HlVVVWSxsWnTJjzyyCMoKiqa4J7x8FwfeIHNw8Nzw7Bjxw5cvHgRcrkcy5cvH1ViGB6eGxVeYPPw8PDw8NwAjOsetlarZSWXoGMpmQkmxnKMh4eHh4fnm8K4CmxmJqGKigoolUoUFRXBYDCQEIrRHuPh4eHh4fkmMW4Cu6KighX2UFVVRbxo8/PzUVVVNaZjwXC53OMxJB4eHh4engljXBKn1NfXIy8vj5XMQKvVEqErl8tJQoWxHAtEf78lgqMBkpLi0dtrjmibk4GpOi5g6o5tqo4L4Md2IzJVxwVM3NiSkuIDnhsXgf3MM89ArVbDYDDg1KlTOHToENRqNbRaLfLy8lipEcdyjIeHh4eH55vCuAjs1157DQCVDpBOnlBRUUE047q6OpSWlsJkMo36GA8PDw/P5EbT0g8AyJ2RMME9mRpct0xnpaWlMBgMZP+5qKhoTMd4eHh4eCY35ZXNKK9snuhuTBmmZBx2pPcdpuo+zVQdFzB1xzZVxwXwY7sR8TcuTUs/WrvNOHdFhwYtVZwlR63EhpJZIWnaXK18orT0b8weNg8PDw/PNxNao956WzYeP3CW+vfaHGSoYsO6nhbQ3M+hMFVN8bzA5uHh4eEZM5qWfpRXNhOt+rl3zqNoQSpUimjUaHqQUTIrrOt/feAMIBBA2zMAANh7sDZkLX00Qv5GgBfYPDw8PDxjJndGAuJlEqJVry5QY+3STABAtaYn7Ot/smEB4PWGpaVzhf7eg7VYPFeFzJT4KSG8eYHNw8PDwxMRqjU9uKN4JgDA5hhOYFWYmxz29TWaHni9XtbnkbR0rtDfujYHbxxuwLkrurBN6l1GO1IV0pCvuR7wApuHh4eHJyJkJMUR4RyKVh3K9eG2Rwt9ndGK594+D73JBiB8k7okSoR///6isMcwnvACm4eHh4cnIjA16VC16lCvD7U9ptA/fLYVh45eBTB6k3qoQv56cN3isHl4eHh4eMYbpmC32l24o3gm7iieiZoQ99G33pZNPm9dmwNg2Ot8ouE1bB4eHh6eKcloTPS0ST02VooaTc+kChHjNWweHh4ennFH09J/3TXV0ZjoM5LisHHFbOTPUaH6UjcatAY0aA3Ye7B2wjVtXmDz8PDw8Iw7N0qaUlqw52ep8MDGBeT41rU5E65l8ybxG5zHHtuN48c/D3i+srLmOvYmMGYzleIvPj5w2r2R2LZtK1atWoMtW34UqW7x8PCMM5PJkStc8zY3zGyksLLxhhfYU4Ds7Fzs3v3YRHcjKI8/vhvZ2bl48MEdE90VHh6e64i/2OhQ05RGmnAzoI01TC3S8AI7gkyUc4JcLkdOTu51vScPDw9PqERKUw30jh3p3TtaLX+sYWqRht/DjiCTdY+muvoMSkoK0N7eRo7t3Pkg7r33XgBAe3sbdu58EO3tbdi2bStKSgqwbdtWVFefYbXz0ksvYPPmDSgpKcDOnQ+ioUHDOv/73z+N0tKVKC1did///mlyfNu2raipOYu//e2v2LZta0jtmc1m7Nz5IEpLV2Lz5g0oL38vonPCw8MTOUZyKKMduTaumI30MWjXgd6xI717/YVrTfR+9GjgNewIMNF7NCaTyUd4AkBcXBwyMqajsHAZ7rjjTjz++H/g1VffwLFjR6DRXMKxY0dht1Pfrak5i4cf/im2bPkR0tMzUF7+Hh5++Kc4dOgDZGRMx86dD8JkMmHXrl9CLlegvPxd3HffVnKeFsRPPfU7DAyY8fLLL2Lnzgfx3HMv4dVX38DOnQ+yTOIjtff979+B9PQMPPXU7wAA+/b9X3R0tGPVqjXXZU55eHhCZyRT80ia6mg1ZPreobx7J9t+9GjgBXYEmOg9msuXNbjvvq0+xwsKluK5514CADz66K9QWroSL730Av7xj/exe/evIJfLWfVet2z5ETZs2AQAKCxchm3btuLgwb9gy5YfoabmLD755BhxGnv00V9Bo7mE48c/R3Z2Li5f1rDOx8XF4+DBv/jtb3t7W9D20tMzMDBgxvPPv0zOP/vsn1BWtjFCM8bDwxMJ6q7q8JcP68esrIwk8IO9Y0N99460Hz2Z4q0DwQvsCDGRqzemYA7GU0/9Dg8//FPccssqrFy52m873M81NWdx+TKlva9bt9LnmvT0DPJ/pgd4YeEyFBYu89uPkdozGo3Izs5ltZeRMZ3ci4eHZ3KQn6Uadd1rIDzrZKB3bKjv3pG0/BuhJCcvsCPEZPMm9MfAAKVNd3S0h3VdXBwlOAOFiAXSpEfb3ksvvRBWezw8PBPHWJSVcKyTgd6xY333Blo0JCX5D0GdSE2cdzqLEJPNm5CL2WzG3r1PY9euX6Kjo92vkK2pOevzOT09g2i23H3ynTsfxLFjR5CenoGOjnYSaw1Qjm6lpb4aNIAR25s3bz4uX9aw2mtvbwt7ocHDwzP+jNahjHZUowX+SPm+ue9Y+vqxvnvDdUibSOficdOw9+3bh4sXL6KoqAjbt28nx4qLi1FfXx+RYzwUgZzOAJBwr8cf342CgqXYsGET0tMz8PDDP8X3vrcB0dFK8t2XX34RAJCbOx/l5e/i8mUNnnrqd8jImI5bblmFhx56ALt3/wpxcfEoL38PGs0lPPXUMsTHxyM9PQMPPfQAHnjg58TpLDd3HqsvtFAPtz2AcjqjNXMeHp7Jw2gFJi30br15+qg05EiasEOxEky0czEACLxerzfSjWq1WqjVagDApk2b8N5776GiogJarRbbt2/Hvn37kJ+fT747mmOlpaUB7890pIoESUnxEW8zUoyU6ezAgTeg0VzEyy+/iHfe+QfZF37ssd3o7e3Cn//8F7S3t6GsbCOeffZPePnlF3H5sgbZ2bl44IGfs/ahX3rpBRw//jk6OtpRULAUDzywgywIKA3+v4iWfuutq/HAAzvI/Y4dO4K9e59GenoGXn31jZDae/zx3dBoLkEul2PLlh9hYMCM7OzcgHvjTCbzMxsLU3VcAD+2G5Fg4woWM80UfDlqZViCb6zX+6Na08NaNBTmJqPLaIfBYGG12947QMz3T92/bFyciwOZ4oFxEtg09fX1uHDhAsrKyrBnzx4UFRWhtLQUFRUVqKqqAoBRH3vyyScD3tflckMsFo3XsKYcWq0Wq1evRnV1NeRy+UR3h4dnSlB3VQeAcsz6JvKfL1UCAH77YInPuZYuE3627xgA4E+7ViIzNbz3zliv9wf3efnr/98OD1syBQDuXnt9E1aNm0lcq9Xiz3/+My5evIiysjKW1i2Xy6HVagFgTMcC0d9viehYpvrqWK8fAADodAOw2wUT3KvIMNWf2VRkqo3tLx/WAwB2b1kyqrHdCGFG3HFpWvrR2m3GuSs6ogE/8twXPhrwp1XNxAT96alrJKaaO+ZAcxDo+rFAP68NJbNYGvwjz32BxXNVyEyJh1ImYWni4/F7DaZhj5vTmVqtxgsvvID58+ejvr4earWaCFqTyQS1Wj2mYzw8PDyTEU1LP/YerGWVZaS1t3BgOjdNRGnK0VBe2YxzV3QjOnEFclTjOnQFcvAK5ugW7lxxn1d5ZTOK81NZ/T93RYfyyuYJdy4e97AuWuAWFRURoVtXV4fS0lKYTKZRH+OJHBkZ0ydNVS8enhsdf6FKACUYQtGW/Tk3WewuyKTiSattc/v83DvnUbQgFSpFtF8nLn8e38zrf33gDCAQQNtDWf+4Dl7BBCfTGS2YlYJ5jvu8qi914+7bctDSYcBzb5+H3mTz24/rzbgIbHqfuaioCMXFxZDL5SgtLcW+fftYe9IAxnSMh4eHZzLC9Tpu7DTB6XCH9KLnChC70x1QcIXDeJrYuX1eXaDG2qWZAELz/OZe/5MNCwCvN6yELOEudJiCnfm8Kk63IFERjR+szUVvrxmHz7bi0NGrIfdjPBlXp7OJ4pvkJT4Wpuq4gKk7tqk6LuDGHxtTINKexpqWfrzxaQM69JRfTagezR+caCL/NlucOHaOykEwFs/kvQdrAVB76pGC+cyYfRYIBMhRUyGjoS4QuNczRZNAIAhpn5rpxT0zNR7Xuqi+Mefdn5d5TqYSG1fMBgA8tv804mVReGbnt9Hba/bpVyT2y4MRbA+bz3TGw8PDEwGYGhttps2dkYAHNi4IO3UnM3vXS+/XjSnt8fWKH+ZmHItE7elw47OZmrLZ4iQCmznvgbKr0fPUobcAegv+86VKrB+yEuRmJpCF2EQi+s1vfvObCe3BOGCxOCLaXmysNOJtTgam6riAqTu2qTou4MYdm6alHwc+uoQGrQF6kw2aln6oFNFQKWMAAEdr27BkXgpmp8ajU29BbubIAowl1AUCrLp5OnJnJMBscRLhojPayD2CoVLGYFZaPNHSd25ehNnpkQnfZD4z86ADOqMNOqMNFWdaA85HIJjjWpSlIu2plDEhLXLoa2lNuVrTjeL8NORkKn3m/WhtG3Iylaxz3Hn69f3fQlpCDLXYaTWgZGGaTz/CeQ6hEhsrDXiO17B5eHh4xsBI+bAzkuKwfsUc9PaaR6Wh+XOwCkV75ZromVq6OUxzdSjQfdq9ZUlADXakezLHFa6GXl7ZDIvNidxM6toBi5MhvNnzHij/OHOe3jt2FW3d5qCWietdMITfww6BG31vLRBTdVzA1B3bVB0XcGOPbaR9TubYxuL8FU6WL+aeNTeT19Ev28i5sZCUFI8TNa0+fUpUREOliAYwPB/B9tC544qRimC1u0cco79rhQLAMyTV1EmxuHt1dshzzZynhg4T4iRCn8xm/mLNI5FtjWZC4rB5eHh4vimEUwBjLMUjQilU4S8OPD5GQs4d/bKNdW6s8d3++rQoS0Xmw+v1+vSHe09uG/fdPj/oGLnXMuOmPQwVVCQSspKwjDTW+BgJ+U7Jogy/hUlCjTUfD3gNOwRu5JV/MKbquICpO7apOi5g6o/NnyY6Gq0sFK/lYDmvI5EPmxZqKwoycaKmFZV1nT4adaj98TeuhlYDcjKVrPaCWSb+58OLsNic+OqqHgCgjIuCYYDaW89RK7F4rgonL3RBJhUHtSowrQBJSfH4+EQj0bg/ONGEhlYDeX6JcilyMhOgUkRH1Huc17CnMJs3b0BJSUHAal2PPbYbJSUFYdes5rJt29aQ2ygpKUB19Zkx3Y+HZ6oRbhnHQISizQcrWRlqOctgMK0E5ZXNuNZpCtqnUO7JHNeM1Hif9oJZJq51mqDtGcAdxTMxO12OxXOTyLni/DSUn2yGtmcgoIYfKDsdU+PeuGI26/mtLlDj/tvnh11WdCzwTmdThPLyd/Hoo7/yOR6skhfP9eNGyAvNM/6EUsZxJEJJjxnIqWqkcyPB3S8u+9VHsNhcAIadsgL1KVBoFP23QQvH3BkJuGvVXHI+PkZChCnzPsyYajrOvaHVgH/99hwcrW3DHcUzoTNaUV7ZTPbDAaA4P9Xv32Fxfiq5x9a1OcjPUuGR576g+j70febzszmG27xeaUp5gT0FyM7OxdGjR3wE9rFjR/ga0pOE6+1NyjM5GYuwDIdgQn0s+bC5HvEP370ET782cox5Q6sBDa0GVow6DVdr5v6NBPPCD3TObHWS+zAzla1cnAG9ye7Tv/LKZuiMVla2s0PHrvosEq7X8wsEL7CnAKtWrcHLL7+IhgYNqScNAJ9//hnuuONOv1r273//NI4f/xwejxe5ufOwa9cvkZExHYD/WtRcgtWy5hlmMhS955k8jCQsJ7Mlhu6bprWfCLbyLxqDWgwC/f4B+Hha0+w9WEuqY/nTbLn38XeOObdWuwsFOUmIkgjhcLpZ5mtu/xpaDdhQMgtmqxN5c5NICU9maBptBZiI4h/8HvYU4ZZbVqG8/F3WsePHP8eqVbf5fHfnzgdRU3MWzz//PJ566ncAgPvuu4ec//7374DJZMJTT/0Ou3b9EgcP/gWXL2t8rt+165c4cOANpKdn4L77tqK9vW2cRnfjEql9S56JhelhPJ6Vs8biQT4WQhkT3TfmXrMiLgq5mQkB93ED/f79eVozv0NXx2K1xbgPt7/B+pCRFIcH78yH3mhHc6eJJWgD9S8+RoL3jl3FHcUzUbQgBRWnW1hzMFHwGvYUYcOGTXj88f8gZnHaHM7VehsaNKipOYtDhz7ATTfNQ2+vGYWFy1BauhLl5e9BLpdjYMCM559/GfHxlDn92Wf/hLKyjQCA9vY21NScxSefHCPnH330V9BoLuH48c/9auPfdCKxb3mjEgmNcaxthFpjORhjSegRah+51aqK89Nw21BqzPEmWIUrbt8Aal85d0YCjIMOlFc2B9U4uYU1dEabT1UvndEKAQRIVEj9VsfimtRpb+7cGQlBze10XwPtgTP7pzNaUXG6BffdPp9KwuJw4Yl7l+LXr56FccAetI3rBS+wpwiFhcsAANXVZ1BYuIyYw7loNBcRFxdPzN80ubnz0NBwCXFx8cjOziXCGKDKb6anZwAA0bTXrVvp0zb9HR42E73vNZFEQriFWi4x1D6E0yeusPrps18QByZ/plvmdeH0k7sXKxIJce6Kjgjs8TKVa1r68ebnV1jVwPrMNkRHifHEtqXkvltvy2btE5sHHSELMO7vPz1RRtpaOEeFwtxkmK1OANQ2AXPPuTg/jdU/btlN7vPw1wfu3Bbnp/n0kcqOZoDOaGWNi9n+NPmwkxlzH/16bmPwAnsKceutq3Hs2BEUFi7D8eOf48CBNyJ+D9qJja+fHTqRKnqvaelHl9GOVEXgXMOThWB7l8DohGW4daHDrbHsD+7L/r7b5+O/360DQL203zjcgHNXdD5tjGahUq3pQdGCFDS0GkjRCrqPY134BBIquTMSIBIKyGe7041ew7B2S893TqaSZSXaUDIraDpWJtzf/wcnmkhbJ+u60KEbZMVGW+0ucl5vsrH6xy27yX0egfrA1PLLK5uhUkSTuTh3uRef1WiJYI6XRZHrEuKksNopD/RegxV5MxMwJ0PBspRdT4dSfg97CrFy5WocPXokoDkcAHJz52NgwOyz36zRXEJOzjzMmzcfly9rYDYPJ7Bob29DRweVEJ/Worlx3zt3Pohjx45Eekg8DMorm/G3T/3H208mmFoZDXPvMpQ9QH9t0HWhQ83Qxd2f/MmGBfjJd0PPoEXDjCH+7KyW7Gs+9/Z5n+xd/uJ5Pz3birqruqBj1bT0IyMpDvffnoed319EzhXnp5FFR7AY4lD3n7nX7T1YSxYHcpkEvQYrOX+ty0zmu0bT47NPXK3pwd235eCO4pmoON0S0r4+XSxDJhXj3BUd9Cabz7joPfLczARUX+om/UuUS1FxugWfnGlF0YJU1vMYKaY8IykOuZmU+Zy+5+6Xq7D7/6uCtneQFfbFnAPaNA8AXgAut5e1jz5SBrdIwwvsKQRtFt+792m/5nAAyMnJRXZ2Lh5++KeoqqpCQ4MGO3c+CIDaB1+5cjXS0zPw0EMPoLr6DKqrz+Dhh39KNOuMjOm45ZZVeOihB3Ds2BFUV5/BY49RHuUFBcuuz0C/YTBfDBca9dflxTAWaOHAFHQVp1vCern5a2NW2nCFqZGELS3EuAk7RpM0xF9Cj/tvz8PqguFtpa1rc4b/zVmonLuiC7rQosfKNBvTfdSbbCM6LQZbBAUTKtwFza4fLMHM1OGtsEVZieTfD9yZT+5L9zMjKQ4/WJuLjStmo7nTxOpDICc9ytmsF+eu6FiaM3Nc3NKkNDs334SFWSo0tBqg7RnAxhWzES+TBHU4oynMTfYZb2yMBLHREvJ55eIM3FE8k5XaNEkZg6IFKUiUS2GxuVipXifCoTSk1KSbNm3C3XffjXXr1iEuLm5cOxQJvkmpSTdv3oANGzYRZ6/f//5p/OMf7+PAgTeIhs39Dv29cMK6BgbMyM7OJYuCYGFdJSUFePbZP5HvTgST+ZmNhkikkxxvuCbo9EQZtt6WQ5JlMPcu/Y3BX/pOZhsvvV9HXsojpYKknZJuvXl60BrLgbYoQtmX5KYIpa9JHErRCXjR0GokWho3FWmgQh6a1n5WkpH23gHWfehx+7ueuadOEpIwTNfceeeOob13gKVBO5yU5lmyMN3vfHcZ7fjLh/U+Y2BW7tp7sBYWG7VHre0d9GkjUS5FbmYCK3+4v/7ph0p3Mu9Fm+y56UYDPb8PTjRBZ7QOadpUPLZcJoE8NgrK2Cg43V7SflJCDKLFQty9OjvgHIaSJjZcgqUmDUlga7VaHD58GIcOHUJmZibuuusurFmzZswdGy++SQJ7LEzVcQFTb2z0i4GuPxypvMWRJtjCItSKVoHa4FacChTHHIl83cEqS9HQfdC09OONTxtItq0osRAp02R4YttSlgOVv0WKv7Fy7/3BiSaWAKcLeeTOSPC5/o3DDeRauh16AUHnvM5hlNbkzikAMqb9H15Ev5kSasyFE5OkpHh8dbGT9GHb+nk4Wdfpt+rWzNR4Yt5m8tT9y9ChG/T7PJn9++BEE75u1JM2pBIR7EMLCnVSLMurPtDzo9tjztvM1HhIJSKYLQ48sHEBOf6nXSvx9KtnEC+LInnNAXZuc01rP6uEZyRis8cssJm8/fbb2LNnDwQCAdauXYtHHnkE06dPH/nC6wgvsENjqo4LmHpjo18G3IIEk41gQnkkgUs/s7FqLWOxRoxW4DPvSZOjVkIsEkARJ8WMdAUsFgdLWAIja5BcbRVgCyP6+sZ2I7Q9AzBZKE2WKSiZC4hwSmuGMo9JSfHY/9558lkgEKAgJ4lc97Pv5ROnsJWLM9CuGyBjVSlikJOpDOsZ/+9hDY6d6wAALJ2XjLOXqEUGLXTp+Rrp+f3hrXOs+aKRyyRYMDsRgBdX203o6af2s+kFCwAipENZ1I2GYAJb9Jvf/OY3IzVw6tQp7N+/H7/97W9hs9nw85//HE8++SSSkpLw8MMP45577vG5ZseOHdizZw+MRiOKi4sBAPv27YNAIEBFRQVuvvnmMR8LhMXiGGlIYUFrNVONqTouYOqNjX5ZxsZKoZRJRvj2xGG2OrHq5unInZEAs8XJeskH+jcN/cyCtREKR2vbkJOpRE6mEp16C3IzQ9euVcoYzEqLx7FzlJPlzs2LMDtdPsJVwNvHrkKdHIc5GXKiAe7cvAgnzndsNwJ6AAAgAElEQVTA7fHiF1sL0KUbQMWZVjS0GlCykAotYo7V7fGiOD+V3HtDySx8VqNFg5ZylKpt6MHxrzrQ1GmC3mSDpqUfKkU0Nq6YjRPnO+F0eWCxU3m9/8/GBUSYuT1emAYdqG3oweU2o8/1KmVM2PNIO4/NSFegWzeIDFUsVIoY6IxWvHPsKlks1DXqsXReChbPVUFvsqNkYTrKbp2LPpMdN+ckY9XN08kzpttUKWNY/2Zysq4TBbnJyMlU4lJLPxbOmQaLzYVOvQV6E7XgKc5PxbkruqDP7+NTLYiTDVf0opHHRmH3D5ZgSXYyomVRONfQCwB4dMsSzE6X48BHl6DtNuPspR7yXEaax3CJjQ0cBRJSWNdbb72F9evX48knn2QdLyoqwi9+8Quf71dVVeGFF14AABQWFuKuu+5CfX09lEolioqKcPLkSVRUVADAqI+VlpaG0nUeHp7rSCRC2Mbaxljj3keT6OZap4mYTuk0mCQBiN6Cn//hGNwuj09IWbCQJ9rhjNZWuSFN/uKh6ZKP5f9sQtGCVERJhDg+pJH6uz7YYsjfPDKdxwCqvCYzkcnuLUsQIxWTbYDF2Um4f2hvmmlVYRb3oI+FkpymcF4K+b7N4cZdq+ayLAHF+WnQGa0oWpACgHp+ZoZVg1ssJFEuhUAgwKDNCaudCmmjn43V5gqY8IXpnDfSPEaSkEziBw4cwH333TfiMX/ce++9eO2117Bnzx4UFRWhtLQUFRUVqKqqAoBRH+MuHpi4XG6IxaIR+8bDcz2gQ3rys1QT3BOeUKg8346SRRk+//ZH3VXKA/xCI1WHWZ0Shwc2LUJ+lgrvf3EVr/6jHgC1H+oFSG7qP+1aicxUX82Pe+9Wxp6vAEBXHyVoUqbJIABw99pctHSZWO0aBxz43V+rkZkaj/w5KrR2mZCZKg94fSjQv+FXyuvQ02chFbpmplPtNneYAAAL5iQiOUGGlGky0ueR7lF3VYf95XWkDVm0mLS/YE4ifnBbbtC/nWffpBYLFxqpPq4qzMTpC52IjZHgO8Wz8NFJSvj/9sESAPCZr9ZuM3oNVvKsHiq7CauXzvB5FuqUeHLdsrwUxMZEhT2PYyWohk07mtXX16OqqgperxcCgQBerxdtbW0jCmyTyYT586nVlVarhVqtBgDI5XJotVoAGNOxQPT3W4KeD5epth9KM1XHBUyusf3lQ+pFEIm9rsk0LiaRSB+qVMomRVKYnHQ5mWPmv/2RqpCi7JY5RGD/5Lt5SFVI0dtrhk4/OJwg5HwHBgbtKFqQCgD49NQ1n31beg6Z9zaZbCwt9/iXesTLorB19VxUa3rQ22vGp1XN5D5/++QSaq/0wmp340KjHn0GK8tT39/1ofDy37+CzmRjxSsDwK4tN6Ovb5BouGUrs1gOZKHcI1UhhZcRS7XtO/PIvnfZyiwyn4E4f7kHFruL9O3QZw0kNKuxzUCOP/LcF8QLn54v+jlcatThjuKZOFnXiTc+0WDRrGkoWZTBehYfnKCsFsYBO76+okNmSnzY8xgKwfawgwrstWvXYvny5Xj88cfx9NNPg6mMM1NXBuLQoUPYtWsXAErgarVa5OXlwWQyEQE8lmM8PJOZb1KlrrFmeyqvbIYkSoR/ZyQNCZfRLBpCuYb5HX/fD2RCZ5qUGzpMMJkkaGilfguL/GiM/uaQvl7T0o+jX7ZRply9hZU5jr4P7a3OFKp0LHWw60ean/LKZp9wrJuyVJBFi/DesauIlYp8sqAx+x9o/jQt/aRiFzM5yqEjV1C0IBUqRXTQLQk6rSq3XCYzjrp0aSbeP0HNq7/Sm8wtE2ao196DtVixeDqmxQ77i2QkxeHc5V606wbh8YLEZF/PiI2gAvsPf/gDfvGLX6C4uBh//vOffc7727+mqaqqQllZGQBK0y4qKiKacV1dHUpLS2EymUZ9jIdnshOsju9UYayLkpGuD0cI+xN4IxX+CCVPeaC9VWZmLn8CgLk/rYiV4gNGfWVguIDGp2dbcfJCV9CUqcF+S9xEI/R3Vi7OwNlLvWjqMGP3liUBrw80bvp4cX4q6ffKxRmo1nRD2zMAlSIalt5B3L58ZlCfgUDzR/+buU+/c/NN+ONb56A32nD/7fOD+iBw06oqYqMAeGEcdJK+1mh6fIS/Px+JjStms/bCt67NwaFjV+F0UIufQHHkxfmp13UBHtRL3Gw2IysrCwKBAMnJyVCr1T7/+aOiogJPP/00qqqq8Ne//hXJyckoLS3F0aNHIRAIoNVqcddddyErK2vUx4LBe4mHxlQdFzB5xna0tg2Jimiok+NgHHCE5bHsj7GMK5Dn7VgYyat6JM/fka4/8NEl4lUdqP+aln7qe368dpnXM9tTKaJ9rjl3RYcLTXqkJcaS/jK/82l1K7r6rKzvN3WYsPnWLNKXQAuyWeoEJMulfsf59+ONsNicGBjatw3k2RyK9/vR2jbEyySYmRaPi9f6oTfZSH8vXuvD3OlUchX6elpLbeowoWRhGj4924rGdiPmZCjIXJ270guZVAx1chyudZphHHTAandBb7LBNOiAedBB5ps5fu78VZxpQXe/1Wcuv2zoQVaGAnOnK/BRVQv6zHbSZ2VcFBrbjXC6PKznTrfdOLTvLRRSTmh2pwfRUUIsnZeCafJoXGkzQCgQoGTIGQ1AwL8Ben7FIgE+Od0Cbc8AmT+TxYlp8miWV/nKxRno6bdCJhVH9G8qmJd4UKezZ555BgKBINDpoBr2RMLHYYfGVB0XMDnGRidWoM2gzKxbo2kLoLxyRzuu8YobDRY3zbxnoPtzk8LkqJXEVBpKVivAN2bYPFT2kb5enRTLKvyRo1aiOD8Vr35MpQxlJvWIkYrw800LkTsjAZVfd5DvMGOKmd8PJVbbX7xyjlrJqkQlFgmwdF4KkpQxfs2soSSOYR7nJm15+f06xMuisHvLEpKA5cX3viYm9By1Ete6TPACmJUqZ1kD6PNpKhnxOqfZtn4eq5gGE+b8pSfKiHc2cy6TlNGYFh+N3VuW4K+HNaR9OhFMa48Zmcnx2L1lCT492woAuG1pJt45egWfnNX6tEc/f2YymxipCCpFDGRSyqjs7zdEz93eg7XoM9nQa7SxztNx5GaLE3JZFOUXcKmbzGmkGHUctkAg8KtVj6RhTzS8hh0aU3VcwOQY24vvfo0LzX1Ek2BqIuFCazu3fWtm2OMKpoFGAmYsMb2w0BltQbVT5v3p67+1MIPEKnf1WbD1tmyikcbLJGjtHgjYf6b2WdeoR2ZKPApzk8n1u36wBDdnJ7E03KttRuiMlIaUkiAjHtQut5fSwhTRePPzK5BJxVhToMbx2nYsmJ2IxXNVkEVLWPHWI8Vqx8ZK0a0bZMWXL8pS4dOzrRgc0qw9XgBeL8RiAZZk+wpjf3HsXKsD8zsXmvTIyVQiUSHFR1Ut6DFYWZrrx2daWfu/JosDDpcHbo8XRosdHg/7/js3L4LeaEOiQgqd0QaXm9L16pqo7GMrF2f49OnFd7+G3emGSCggpmqxSID2ngEM2JzwegGLzUX6dUVrQFyMBIvnqvDBP5vQobfA5fZSceiXe/FlQw8utxnwneUz8fzfv4bHS+17d+ktKM5PJdaHkoVpyMlUkuftclOx6EyLg0oRDd1QshqVMgbmQQf5zVrsLiQlxCBWKkaiXIpbFmeQOPJ71uZAZ7SiRtODjqH470j+TQXTsIMK7GCC+vTp08jLyxtz58YDXmCHxlQdFzCxY6MFZFOnmbzUgNCTcPhrixZ8dY06JMRJw3oxjDYZSKgwhUR5ZTMaWg1D6Rq9JIHFd4so71x/96evb+o044MvGslYaVPp8rzUEQUkc9Hw3heNaO40w+50E4EsFglxvlEPdXIcFs9Voa5Rj4vX+tBjoDyfB6wOzJ+ZgK4+ymy6dF4yPj7dAp3RTjTQHoMVYpEQ998+HyfrOjEzTR7yVgc34Q0tHFq6B1jfmyaPhsPpISb8kbYxgm0Z0HOyJDsZXq8X9c19ZP4Wz03CifMdxMS7cnEGEuXRaNdRe7SxUgniZRIkyqOhiI3CLYsz0Km3ICMpDt8tmgWxSEja83gB06CDCK3yymacu9yL4+fa0dVvhdfLdgTzeAGThRLWTOJlEnT3W2G1uyGTSpCdqWSlMrUPmbxdbi/KK5tJm1a7GxabE99ZPhMlC9NhtjhhHnTgaG07Fs9VIU0l80mJunPzIjicbrz2ySVoWg1YuTjD5+9k389X4OT5Dni9wK1LpsPudGPVzVRWz9wZCYiSCEdM0DIaRm0Sp3nnnXfwyiuvwGQyQaFQwGg0Yvr06Xj33Xcj0sFIw5vEQ2OqjguY+LExTbQrF2cgXiYZdXEAplnxT7tWIkYUeJsqEONRpICJv5SenfpBSMRCFOen4bNqLWTRYhTnpwW8PzcvNdNU+tL7dYiSiIaO+zcZc/sQJRbC4aLUxPREGexOD1SKaGISZhYjKbs1C9ahLGEdukE0d5oxYHXA7qSu55rAczKVZKsjJ1NJcn0Hwt/vkflcb8pS4Wq7EQNWSgtVJ8Xi7tXZrLSkTOcwrqPaSFsGwVKgioTAt+an4lJrP0wDDni8XiIMo8RCxMsk2PdgsU9ebwBweYGPq64B8M0jnpoQg67+4VKVcTFiDFipOV4yV4XaK8MlRxPlUmQmx+HcVSo87qn7l6H6Ujc+q6ZM3pkp8RDAC43WCACIjxHDPNQW/X3mwnHvwVqYLQ781/Zv4YMTTejQDZIFxppCNfRGGwl/o+dvQ8ksVNZ14kIT1YdoqZikJqVN6k9sW0ruseulkwAQ9Dc9GsacmnTPnj1488030d3djeeffx633HIL7HY7ioqKItLBSMNr2KExVccFTPzYaBNtvEyCJGUMNq6YPao0mwBlWqfNst19VsxOC38lP9Z0nyPB1U6Y2lJrjxlWh5u8HBMD7HfGxkrx2ocXIZdJ4PF4oTMOOx/NSpND2zMAq92FhVkqksqy9nIvcUiiNMthrf6BO4fTcyYqokn6Sk1LPxbOScTXjXpiRnd7vMhIikOGKhan67vQ1W+Fe0hqyWUSpCfKiPa9oWQW2V/Xm2xo7TGjqcOElYszWE5b3LFxf4/0c82bOQ3qlHhsXjmHzF+MVIyWLrOPUxztHMZ1VJOIhejQDfqYZ2mtWxolZqVATVfJyDxRZmknDAN2uD1U3Wcat8cLq91N5oyrvbfrLXA4XMjJTKD25TOVpF2RSAiv1wuRSAC3x0sWTwDQ2WeBSAiiZf/yngJc1hpI2lHaAtI/4IDL7YVUIoS2ZxDioTbphZRYKEBcjARSiYg40dEWKbPVObTI8cIw4EBXvxUutxcWmxOtPQNkKwLAUBERL85c7IbFTmny8bESssCgTeqaln5YbE78/Xgjq452oN/0aBhzalKz2Yz4+HgUFxfj9OnTWLNmDf74xz9GpHM8PFMRf6E+4Tqc0Rojvc/Y0GrAknkpo+pPJFKGjkS1pgeJcuplMyttOKf2fbfPJw5BI4W20Sk+d35/ESvdJFNzA6iQqPLKZuKQxAzTSpRLKa3+rJbEB5stTtKfQPG49L/XFKqJ5gtQ+9/lQVKGWu1uaHsGsPdgLa51mSAUCnDb0syAIWUAWPHDKoUTXq8Xn5xpQaJcCr3Jju5+K2Kkw69nu9NNtOmfPvsFERRikQAyqZhYB+j5ouEWDgGo5/8/H9ZDESsh+8oDVhdrz/qmofjtr4Y0XjoNqqalH7kzEshcZabGo/ZSN/RGGxZlqfD20SsQCgGhQADjILVAkUqE8IfbQ/U/SRmDGk0PK+1otaYHpcsyyRx7vIDLw7GtA1iSo4IyLpqUBA0U/sa0eHFTtALUguX0xW7WomKQocHT0O3lzZo2IeGaIZnEH3roISiVStx///146KGH8J3vfAcnT57Eq6++ej36GDa8STw0puq4gKkzNq73803zUifluLglJuNixFg4RwWVIhoNrQZSnpBbzxkYjmn++GwryRhG58SmS0IyK0BtWz8Pn1W3smJik5RUCclew3Dt6XiZBA/emQ8AYdXS3vXSSVhsLqhT4iCAALkzEuD1ev3WqDZZHMSrmVnuMUYqQlyMBNESEe5enY0VBZl45LkvAFDC88nXq8kCgjbncs30yrjh4hSK2CgiADf+yyx88M9mn36LRQJEiYUQi4RQxEZBFi0JWLWqWtMDTUuf38pXeTOnIWu6ApVfd8DudOPWJdNZ9b5p4c99ZlESIRxOjqcaA4EAEAkFmJEST8Kx6L7lZCpJmUoa2ux+sq4TLreXjJ9GJBRAIABe2bXS73XUPSlv/Mq6TqiGyozqjTZ4AZy92E0tAvywcnEGDIMOnLvcOzQnCZiToSC/HfoeOqMVAgj81vIeLWM2ia9btw5ZWVnE4ezKlSv42c9+BoVCMdKlEwJvEg+NqTouYOqMjRt7u2ReyoQ60zGdmujPuqHwl+L8NGLSvfNfZmPzyizkzkhAW+8A/vWWLB9zPNNhSme0ITNNgTP1XQCAO4pnkevNFic0rf1kHqwON75bPJPcCwDuunUuGtuNxIS5c/Mi4iAEABAIRtwSoM2p2t5BuNxeUv4xIykONZoe0tcMVSw0rf3IzaTaatcNIl4mwbwZCcRpi/I6t8JkcaK6oQcnz3fgaruReMzTc5Yol8JgtmNJdhIyVLGs5+3yeNFroMzwWRlyLM+jvKA1LQbkz5nm40jl8QJON2UuNlmckIgEPrHd9DNblKXyqXyVm6lEQe6wg1qPwQaHywOLzYnGDiO0vcMmd4vNCUWcFKuWzSB72MwtCIASeiKhgNTUpvtotjggEQvJlsPOzYuw1I/liA6J1PYOwu50QywSsBRs2pntkzMt6DdTY6LHt3HFbPKsK860oqffggfvzEfujAT8tUKDa11mv8I6b+Y0LM9LQXSUGE63B4uzVENx7XJsXDGbLFroLYbPqtvQ3W/BrTdHrsT0qL3EmdDCWa1Wo6ioaNIKa4AX2KEyVccFTI2x+Xv5zJ0xbcLGFSgJCf2f3elm7QnTntNUfWEKeu/ZX9KSc1d6sfrm6T7X0+ZrpsDVtPbDCy8SFdGQScU4eaGT1DamhaBMKiYLjEClPbmLDmYo2M7NiyCXRVElMTkhcSfruqg0lb2D0JsoT/IegwXpibFITZTBaneRPVK3xwuVMhp9QyZwZunLX95TAFm0hPSJOc6vrvQSgWpzelCYmwyVIgY2hxux0RLkZCrR1GmCVCxE3qxp6O4bdvACqL1ouUyCNQVqkiiF9QwZi5hegxX3rp9H9rhvXZLBColLSYghe9MbSmZR5SVbDXC5vZidGo+cTCXOXuxBokKKREU0pBIRZqcryHbQ7HQ5EdweL4iwjosRY8DixJLsJJ/fW+6MBJZfxIOb8sm8rVoyHc2dlJaeOk2GfrMdaYmx1BaS0Ua85ulnZ7Y6UdvQg4qzreg3O7iWdUSJhVDGReGmuUnE32TujGkoWZCKwnkpxPP8s5o2XGjSI0kZg78fb2TtlV+PsK6QTOJr1qwhCVQMBgPMZjM2b96MJ554YsydGw94k3hoTNVxAVNjbP4SjYx2XGMpzsH1vuYmIaFJT5SxCk0E2ytnmvqnxUvRN/Qyp023zP1lf9CCID5GwjLFA8C29bmQRolx9Ms2AMETxdBzDFD7mOqUeGI6pc2fzL5yPaGZ3uNFC1JJKcn/+msNMlPiiLl8fdFMGEyUQNUb7WSLQG+0oTg/ze9z4SZKYY7ngxNN1EKJUVZTLBKie8irmWlOz1FTsdjankHyzNRJsSjOT8NtSzP9zgvXq7yuSQ+JWAiny4NBm4sIXHVKHO6+dS5yZyTgrc+vYE6GAvExEnx06hoatAZWWKM/nrp/GTp0g4iPoULeaA94gEqO8sGJJjS2GzFodaJDb2HtMQOAUDC8rc38N/07Yu5nb1s/Dwc/ayAOawIMO9hJJSKsXapm+Z4w/9Y0Lf2sJDMxUhHuXjWX+DpwvdTHwqiLf9B89tlnrM9VVVU4fPjw2HrFwzOFYToYAcFzVfu7NtJFQ8ZSnIPryOPPaQcAHrgz3ye/dSDoghkn6zphGBg2mYbqwMNsv3RZJnlxrlycgYZWAytsae/BWiyeq0JmSjyr6AQzLIqmu9+Knd9fRBYdzL4Cvg5ns9LkWDiHsiAws0KuXZqJ9t4B3FE8Ex26QZgGHNAbqXHOSI0n+7WP7T+N1m4z7l6d7fNs4mMk5Ddz9Ms2n99DYW4y6cfqAjWO1GgRFyNGbmYC3G4PCZGi5/TJ16tJ2yIRFUPszzkOYBcUee6d8z6CkuY/flhIwgzpGtf0IkgRG+VTmIPLE6+dxb9+ew5ZjOTOSED5UDnM25ZmIiMpDp/VaOH2eOHmZnKBb3w3TXF+Glq7zWjtGSAVtsorm4mw5mJ3utHQavAbT0//PTKLqljtbpRXNiNvZgIUcVEh100fKyFp2P743ve+x8dh3+BM1XEBEz82puYGBE/N6Q+usxktxMIdl7/46NEIf64jD/3aoONk1xSqQ4pFpYWDprUf5y73shzHYqPFmJUaj3+/a3FIbdBj2PXSSZZzVLoqluW8Rae4BNjPwThoJ6FaTOg5ouF6k9MOZwDQobfgwY0LyDnmQoL+rGnpx5tHr0DbPRwzvXiuipV6lU6HSpM7I4H1e2HGbNOpQOlENDqjFQ2t1P443X5rt5nEveuNNrT2DLBSoNKar7/4beZik/nb4ZIol+Km7GRsWT2XXBfs+zRymYRsX+RkKmGxuUjfWBqzEBBAQLT5QOSolaziJHanCxea+uB0e5AQJyUFQory00iqVpHQt13m3xm33Cvz75Gm7NYsfDW0TTCWtMNcxqxhc3OKt7a2Tuo9bB6eiYIupsDV3JihOKFozIFKNoZL7owE6oU+9EIbbQgKN0ytvXcADa0GWIeqGTE9wYPBDDNiaogAkJ4UR0zjwaps0W3Q7THD3nIylSjMTR6qXZwC44ADz719nggz5nOgEQgoQeEeUr7oECY6LSkzaQk9B7QJl+5b7owEohHTfWZ+3v3DQvxs3zHSfoYqFrJoMXkutMZmsbsAr5fl4b33YC069YMkXK28shkqRTRLSDDzhtudbiru3eEmc0I7wwFU1rn3hxZgzHAx+nf55udXAABPbFvKsqzMTKUEyXDGuZswMPT8/VX28ocAIMI6SizEtU4T0hKHf48rF0/H57WUtp02LRY2h8tHS2easqPEQjR3mpAol0IRJ4XD6Ub1pR44hxYkVvvwVsmRGi1Uimg4nG7SB4AKYROLhKy/s/LKZtjdHmz+9hy0dpvx8ekW8v2YKBESFdE4UtNGflfAcPW18SQkgZ2fn89aVRcVFWH58uXj2jEeHpqx7L9eb7gl/2jCiUUG/Mdxa1r60WW0k1V/qDBjk0cr/Llx3IW5yaj8uiPgQoCblau734IOnYUlhBIV0chWK0gu6SuMc31mG6IlIjxx3zIAwJtHLsPmcGOaPJq08drHlzArbVgbYfYhIymOZCJbXTCdCDPmc6DxegG3l9I8Z6fLUXG6hZUFa+/BWqKF0jHITAtKQ6vBp3QkPe/05/eOXSVlHitOtyBKIsSXDb2sfvQarGTBkpow7LxkHHQQAfNZjRZWuxt6kw0Wm5MICavd5TfmnI5hZ9ab/ujUNSTKpUidJoPdMbx4UcRFsRab9DMqyElCuiqW8hPwerFwTiI6dIOo0fTg/jsXorfXjDePXEaf2YZBG3sxxEUgGE6WIhAIYHe6ca3LTML4jg4JawCsRQbreTH+LYsWwzDggN5EORnWNemJsObicnsRJaHi1un5lEnFmJEqx4aSWajW9ODTs634vLaNhAe++N7XUCliIJWIAFDXfCsvhYoC6LcQH4XrFYsdksA2Go2or68HACxYsABr167FqVOnkJ+fj7i4uHHtIM+Nz1gF7lj2XyPVh1DvUV7ZPFz1KUoElSIai7OT8NlZLUkqEorQpAWkpqWfaHPllc2QRInw799fFFZ/uBpopOAuBOqjqNShty3NZGlp9J7kL7fezEqGojNasShLhcLcZB8NkX5h/vrAGcrBbch0HsvQbE0WJ2qv6IigqjjdAnUy9T5impt1RivkMgkkYiFJpnL4bKvPfqbL7UW/yY62nkGWFs4UpKQ/HAvKA384Ttr79YEzsDHGsPdgLbr7LYiPkeD+2+fjsf2noTfZWIlR8mYmoI2xPWB1uMncfvHVcHWs0qUziHbscHnYNbOH4sRfer8u4J57vIzaV9ab7NSYvCC+BDUNPZCIhpOc0DHlty6ZztrTZy4ky79oxOHT13zqRAeCaYWm2weApfNSUNPQg3D3Z5kbugtmJ+LIl1ryWSISsIS32TLsHU5r6Ra7C9WXupGjpiwzvz5wBn0MrZlOigMM5wb4urEPFns3JCKhT63t8WbEeti33347oqOjUVRUBLVajZqaGvz2t7/Fl19+iXvuuWfcOzga+LCu0Lhe4+KGA4XKWKpMccc22j6EAzc95/e+PQeL5lLxtSfOd6DP7IDV7oZ50IF0VWxI4zjw0SWcu9xLhdFoDejpt4Y8D9xUnYHiXcOF8pitQ1eflZWa8eMzLbjY0o/65j40dZpgGnTg/51shtPlhcvtxYmvOzBvRgKW56Xis5o2WO0uUkv6QpMe/QN2iISU1mVz0N64Yni8XhJf7fUCadNiYLJQKSs9XqC1x4z8WYk4f1WHrxp1sNrdrEpfcTES9A/Nvd3hgmHQQQRwTJQIsdFiImyTE6i26f3NlYsz0KEbJMJl1w+WYFq8lMwpTUpCDMxDecB3/WAJLjT1ES9tiVgInZGqqfzxqWswDjrh9nhhc7gREyVCjlqJrj4r+oec70RCAUnleq3LRMLDxCIBLrcaIBRS8ciDNhcrzMxfuBYzhj1RIYVYKPDZt7+sNcBid1OWBoZEnZkmh2nQwYo/Z2qR5kEH3jl+Fd19Fp9CHlyEAvgVxisXZ2DhnEScON8Bi8094vv/KGQAACAASURBVH41Fzo+vVM/6OuVLmAL9EAtP7plCRxON/a9dQ6dfdaAY7l7VTbOXe4lKU7tTg+69INYNEeF5ARZxDTsUcdh7969Gz/60Y9w//33IysrC1lZWSgoKMBbb72F1NRUbNiwISIdjDS8wA6N8R7XWMs6jqXKFD228S4tyYWZ+MLt8SJ9aH/ulsXDca2Pblky4jhoodjUaYLJ4kS8TEIEQDjzwMxDTsfijhY6Zjl3RgKr0tPSeck4WtsGq5164TI1FJUimtqXBaV5r1iYjpN1XdD2DrCScJyq70JXnxV2pwcCAciLd+n8FKgU0cRqkTVdgf/cejM+On2NpNL8bvFMfPFVB7qHXqR6kw1nLnYhW63E8rxUKOOk5PrHf1zIKrP5vW/PgUAgwMy0ePQaqHhsWmjIpCJ091l84ruPn2uH2zNcIEMsGi4dKZdJ8M+v2lm1lOmCHgDtsDc8p9OT4nDnitlIVEST4hRp02RE+CvjosiiiM7MyfaM9uCy1oimThMrTzrJ+T3oILH85ZXX0No9gKXzkqEzDJfHTFJGw8LIq71ycQb6TDZoewehM9r8/t3QYU4Gs2NEYZ2SEE0WXEzEQgE8Hg869Bb0GmwsYR0XIw7omc7EOOCA2epEr8HGmhem89pInLnYjdRpMpy/qmf1ITMljpVdrbHdiO8Wz2Qt1jxeoLHDiBUL0yP2Thl1LvGLFy/iueeeYx0TCAR4/vnnsW3btoh0bqpzI+2/RppAeX3DYazOV5HoAxD6c+TuPdPmfLFIQNIbhjIO7l44HT4UGysN6fpA5vCx/B7LK5thsTlZpS4T5VKYBh247zvz8N/vXQAA5M2ehgtNlPBxuDzIVlMOqs2dJihjo6CIHTZrF+enoWRhGis38+w0OSmeQe+b0r+B2su9+OmzX8Dpol6sAgFw8usuxEZL0IthIRkbI4HL7cXGFbPxu4NfsvaPvQBpz+ZwkxzWXE/g/7ynABea9MRUv6FkNsorm1gOSwCVCIXeFy/OT4PV4SJ7m1y4GiQd0qbtMZM84h16C/Fi1pvsEIt803nSOJweLJiXQMy23N83/cwaWg3kO3qjHS73sDBkPiO90YavG/U+Y6TzkzND4rjOezRMD2yREOjut/n9XoJcCrPVCQ9D4sukYsxKi0f9tX4IAFI4JNCiQBYtxpqC6azc7wBQkJvMyromEg47FXLJnq7AyQtdLBN9bLQYnYzYfgBwON0sh0ca2mEQGP/3fFCB7S/iKy4uDnl5eZDLI1dPd6pCewzTzirfNCjt9iL5/Nzb53Hfd+aFNRf+nK/CJRIe16H+QTL3npmxswBVBOEXdy0ecRxcT/NEuRQdukFsKJkFpVIGbadxxP4GWqjQzlLhPANuf5gOURtKZkMaJcKn1Vry0r/WaSLzffpiN/5jy80AgLc+v4L3TjTB4fSQ/fy/H78KlSIalXWduLVAjTipiMTDMvN2M8tXioQCvH+Ceh7F+an46ooePUMpPKPEVDWnXoMNvQYb9h6shd5ohwACsn8MgCQ44SYnoccggAAVp1tI6BQw5EMgHt7jpcum0vviJ+s68VmNFjKpmFXaMxgZSXGU6drqxLplM8jzio0edoxyub1o6TZDIACiJSIIBIBlSFh6AZzV9EAuk2DB7Gnk9811npKIhxd/17rMZG83US6F3elGbmYCNq6YjWpND/pMNrJIWbk4A1836lBe2QxZNCUuZFIxHM7AzmXMRQlXSDL3lem+MbHYXai/1k/GNlLiFcOAA3893EAEckyUCGsK1T5/Y4GENQBcaulHQvywVpueKIPOZIOT8/xsTg/cXv8WyeL81Ovyjg9qEr969Sp0Oh3y8vJYx9955x1YrVasW7duvPs3KiaDSZw2GVEOHuNvih0N420SVylj0NVnIdrYHcUz8a281LDaYMZFSiWikOePOTazlcodrVLEQBolCkvDHq1Jnbt/DFAvsk+rWzEjJd6n/CL3WqbJ+e5V2ZBFi/FZTRuutBnwr9+eE1Lfmeb5ukY9PqvWhj0OWiO/1NJP+iMSCREXI4ZMKkZDqwE/XpeLtt5B/J8NC1CyMB0NWgN+vI5amA1YnWjtNqPqQif++VUH0YytdmqP1u70oPZKL0yDTgiFAty7bh4qzrSgudNM9k3LK5vR0GqAShGNz2racO6KDmmJMqQmyqBpNSBZGUP69pttS3H+qo6Y4SViIXoNVpLDu9/sIGkqnS4P/mVROqsE5ex0BcpunQtplAgp02RIS4wlpuq4GDF6hrTFuBgxkhQx+MGabHzdqMPVNiO6+q3weAG708MSWkIhIB6qJZmaKINQAMhlUVhdMJ3S8HOTSR7xRIUUFpuLjIeGbs7l9sLt8UIsFsLj8cLp8pB51BltkMdKoDPY8HltGwwDdrJtIBYO5+7Omq6Afshk/8t7ClDXqIfZ4iTzfaGJKjvqhRfXOs1DqVddMA06YBp0QG+ysUpThopYKIBAKIAnzH3qkfB4h/eqXW4vuvoG0Weyh+zA5vZ4WSZ7ajtiWKtnxnx4POzPNK3dA8hMjhv31KRBBXZBQQEeffRR1NbWwmw24+LFi3jllVdw+PBhvPLKK4iKivJ7XX19PXbv3o2NGzeSY/v27YNAIEBFRQVuvvnmMR8LxkQLbNocyXTu2FAyE0tyxqes4WgZT4FNvwT/38lrcLm9mBYvRboqLuw9VLod+qUdqtMYS2APxdQ2dZiIk1OocPfRN5TMwpIc37zHzP7SNZr/eriBtdcJAN8tmon1y2cGvf7AR5eI+VMsEuBSSx8aO0zQm+xhOZ3RCxWLzQWJWIh1yzJD9geg5/3NI5dxtLaNmNaZDlHUf5Tj0+w0OalLrYyTkhze5ZXNOH2xC8ZBh48go3G5vbDaXejptxKhqjfZSO7n1h5qv/tUPdWO2+OFzemGVCKG3mSHYcCBRLkUs9PkOFLTBp1puLCGxeYimhIzh7csWoJ+sx0lC9Pw4rtf40KTHltuY4SFDTlY0cIrJ1PpsxdO5wAvnJcCiVjo44hGIxJQiUro3N59ZjtsDjfuKJ5Fabd0rHlrP9weLzr7LMTpLjZaDI/HwzIJS6NEfqtiudxedPdbcb5RD4uNXS6TOe96xv76sdp2mK1Olj+B0+3BxhWzUbIwHV19gz5FRrgoYiVBq3TRJCljfEztTKQS0YhOZzKpiOX57U94PvajQtRoegJmNRP4u4hDKOnEohgFTEajjARi1AJbKpXihz/8IQQCAS5fvgyj0YiSkhI88cQTAYU1ACQnJ+Ppp5/GT37yEwBARUUFHA4HNm7ciKNHj8JsNuPq1aujPpaVFfylO9ECm/uSvylLhaihAuvhwq2QFEnGU2A//b81OH6uHTYn9bKx2t3QGazITI4PeSyaln78+R8XcOJ8B3oM/p1fAsF0OhurpYPWVKmMUgbcVqgO+N0DH13CqYtdqNb0YNDmYglriUgAk8WJlYszAl7P/e3QFZiYpsFQF3/0QuXclV64XF5WcQ6uAxr3d/biu1/j+Fft0JvsrJco0yFquD+zcPyrDjR1mKBSROPNz6+gRtODI19q0dozALfHCwsj/3QwmEKV623NvNrtoRaB9Llf3lOAtUszSaUpgKqYRZtd5TIJzlzsJn0YGBJSFWda0Gem4pxprZtp/aCLcQBAtaYbxflpSFRI0dhmxO1FM8ncvVx+we9Lnrt3arY64QUlEE7Vd0FvskGdFIfay734vLYNje1GWO1uMlany+PTrosjsBLio4iAD9fLekjxB0At4j49q2UVz3j/n02IkYrhcvv2gyaQYOTicHkCatdioQBisZC1t+4Pl9sLsTC4Q9mJ8x0koU8g6Jrdo8vzScEcS5fecl007JDisNeuXYu1a9eGdVNmJrSqqioUFRUBoJKwVFVVAcCoj5WWlga9d0KCDGKxKKz+jkSwdHFM6q5Sq+xj5zshixbD6fKgQz+ItUUzQ26DyX8O7bn9zy/XhH0tt0/5WSqfc6Pp00j3+tunGuI1y2TruvlYUeC/2IC/dv77/TqW9yoA7LhrMTJTQ/Of6DLa8c4XjSznmNKiWSH3gSY2Ngp1jXqiZf7xnfP4wW25rPmsu6rDK+V1uObHMYgmWipGZmr8iHP+xpEruLVAjegoESldSLO+aCZsLq9PG8xnXHdVh6Z2I/73k4vkZdqgNaB/wI6ddy1BfpYKlefbWW388Z3zAAClUoa/farxG1crEQtZRTZoXvv4EhEwz7x1jrxM/SWQAYAVN6XjxFBs8TS5lFSxUsRFYf8/hn0enny9mmjHzOxWNCmJsVi+MB0AcKnVgJvmpUIoFuHu23IAAMYBO9EO1yzLxPkrvbjaxn4+zBe/zmTDGU0PFuYMh76p0xTkd+Rye7B90yL8/A/H0G+yostoR1O7ER+ebAq4RxpM/ni81D7us++c99kvDZWSm9Lh8XiRmSqH5poe5y771/IDQfdPEReFvQdriUf7L146CfOgY2hxEFz4BUMoBNH0g43R5fFC6B75PinTZOju9/0NMpFFi2Eaev/EySQYtDh9fjtUn/xLa6EAiI4SQxolYpUG5cK8etmC1LDfK6MhJIE9VrRaLdRqSiuRy+XQaqng9rEcC0b/CA80XMLJ3/zyu+epdHrG4TCDnn4r/lahgZdh/hoJ2qzePfSCfOS5L0aVA5rp+MbNYR3quMLxLP66oRuXhjQcJqkJMbjWbkBv78htaFr68donl3yEtVwmwd8+uRRSsfikpHgYDBZYbcMLh5WLM0LuAxN1YixixEJcaKSKKZStzEKqQsqau1SFFPety2V5GdOhJbSjjXlIiztR0xpwLjUt/Th/uRcqRTRyMpWYnhTLSqhx+HQLls1P8Xluf/mQSmy0e8sSvPz3r9BjsPpoPrcvnwmDwYLeXjNy0uXo7TX75H/+y4f1KM5PJWNdmpuMs0MOPP+2Ic8nSxjAfnExBWAgbe/MhS5kqxWQy6IQL4si1oT/+/+zd+bxcVRXvv/1vqoldUuyZS22sY0XWcYY2wQ5IQPEWGQmy0DAkBBCkmFmMu89yCT4MW8ghoRh5mUEmcBkXiY44MCExQSYrCCbLSSxABtswBaWvGBLbVvW3motvaiX90frlm5VV1VXb5K6Od/Pxx9Lrapb91ZV33PPcs/5xiY88eIRtB1O1MSeX24TFg5xAGajDuEpH/i8cit8/iA2f2YVOrqG0dE9jP7+UZTZTUL+7neOnEPT6nk4/OEQfvWHD2G3GFHqNGOE+eL1OpEQCYSiOHnWj3sffRNOmwlWsxH+8VDCrD6l1V79v38t/HzPT9+AzWwQhEOmZCqsDXqg49QQltWWoc5jR9dZ9Rzealxz6RI89/px4ffhFEU71DDqdTAZ9TAZ9aItbanQ6XTwuMzwlFpx1JsIrLQY9QhN3R+Py4JzQ+pzu14P0fMIhSKafdlscRGLJ342m7Qrfb8/cBpfuPQ8zcerkXUu8Wypq6uD1+tFQ0MD/H6/IICz+WyuwSY+aQYkxl9/tkFzsFNH1zD2d/SKfEenzo2iu3c07eheviRcplWftEZIsxSU0sT6VlPC17NA4/hXLCyHQcbRtO2L63BWIV2hlEPHB0Rj97gSeYa19oHnV386iYGRgGqkuTQiHpgWXrzPTW4LCF9sgfV50B9Ez+A4LGbxpGE1G4QIZ3YuL3DlcmUvml+CaDSGp185Co/LKqT7ZH2QRpPvP9IrVJkaGQ8LP7+0z4um1fMwEYzg3ePJAr3EZsSozH5bKeFIDOuWVeLKjfWirFx73zuLilIrPrtpEV54swve/nE4rEYhwMlmMWEyEkYcwKA/BINeL1qQJvpgwp593dh7+BxGxkI4cWZE0CKl/lN+Fww/Duafl4N/luHJmCbfrRpq241SUVlmR2gyioPH+uHtHcWgRBvU0vai+U44rCY81tqhyZzOC1Alaisd6OodS2mW5tEhYVoPTYZEWi0faZ+q8hcASAt6KaUplaOqzCbEHTmsJgyPym9Hk2MiGMlJVb1UzIjAbmpqEjTjQ4cOobm5GX6/P+PP5iohha0ORoNO83Yipl1azUZUlVkFDeOCpR7F2rVybXT3juLgsQHRxJPu1gPpdp5ULyTbsiMlHI1hZDysqZqNELA3VdeXN4W+3dGXshoUa+OFfd2isS+pccFTak2roo50a8zBo/244VPnJ/lwmcBtXOJR3H/Ls6lxvnAey0HN9jfzfbZbjUlWu3FuYmDwqSe3XFyPX/5heq+ozWwA4olgpHAkhon+8aTzO7qHRYsRYCrNZX05fvmnhGZ6+bpavHrgNLx945gITuK8BS6sXuzGwaP9sJoNCIajGA1EYDLqEI3GUyateO71E6ifV4INK+cJqVd1JgN27T4Db/+4IDz4aGQ+iUUkGsfZwQm0PH1Q8EPe/chbGBoNClm7UlHqtGDJAhcWVDjQ2e3D+uWVmIzG8N7UYiTf2C1GhCbTj7YGEtaBodGg8H2Tc2EwYW2cWkAbuCpdAFBiNwFxoKPbp9n3nUpYA8CpXnmlRQlpClG+K7mNJ1dn5UI3zlsQwbvHBoW64ukwE/nE8yKw29vb4fV60d7ejoaGBjQ3N6OlpSXJJ53NZ3MJNmEr7U2MRONCvlo1gSnViBlSs2gqmAbHT+SXXVijaYXKI03eofRC7tnXLSusWRm9WAwIx2L4H//2Oj63abHiwoMtNPiFD/+FffHNLtjMhpQLlxULyxGKQTDrOqxGvNM5gKU16ZkuDx4bgJ8TEgaDXrV61MBIAHaLATqdTnHbS8Oicgz6Q9h76JwgpJl2zKohAQlzulzpR2B6Yciuu7y+TBC47xzpw9qlHkEDdjnMON0/jgg3C46Mh5LO5/c9/3bvSbxx+BysFiMGRgLw9o3BaTXBPxEWzOwVpTasqC/H2YFxbN5QJySuYCbFVExG43jqlWO44YplwoLlG19Yi15fIK3AKV4wnxkcT9Kw1AiGovi7v2wEAPy//z6Ey9fV4k+HzooKVOQK3lLAYFvPMiFxj7R1kj176Z7m8cAkRlWitmeKdLRgNeTiHLSi1wPHT/tww6fOTyrKogWPy4LWN7s0ueuyIeN62HOZma6Hfcd/tmEiOKlaqeZrn16Bj08FyMjR0TWMp14+KrtS1gFYVleGf0hRR1lqHnXZTUKVocvW1WJBhUOkYaqNi5kV+eQdK+rLFV/IR3/7Af405XtU4i8/sRif2aSsIbOkHqMTIZwdTBZWqxe78a2ta1WvwQTpztYOjIwmskRNcAuguionbrhiWcqFk9KzYLWSea1YWk5QbeJgRSjY4mnR/BLB9bF2aQU6vcOK5ljG0gUuGAx64boelwWb19fhyo31uPXBP0Cv02H1ee4kCwtj7bIKBIIRUS3milIbbrhimWxpUDmuurgO1162DN9/4gAGRgIYHg1pTgXJmFdug9lk0HQ9OeaX2wRLjNWkRzBN87TTasRfNC3ClRvrExXC/EGEJKUXM0WHxH0NhqOIxbUJk0zGQExTV+nQXIREytqlHhz1JsrFZiIRv/bpFbCYjTmpia3mw9Yr/oVISUfXMO5+dB/6fepl5dYu9WjSblcvdst+HkfiC8/7OuVYsbAcN155vvD7WGASkWgc/olJ7D/SK6rhm4qDxwYwyWm637xuLdbIRJl3dA1jz77ulMJ6XrkNHyj0v6NrGN9/4gA6vT50en2ywtps1OPE2dQZvp56+Sj+/fn30c9MwBKBtXl9bUphDSRiDuRg9Yg7vT54+8dl3SBxTG0bkTn/vAUufJOrtrW42oXPblqEz25ahEAoAqtZPdBl5aJylLmsoudsMhqw91AP7n50H8YCEfgnJjE4EkKpY3rrZanDDKNBB6Neh4lgRHQ+q0j01MtHNcUIOKxG7D10Dnc/um8qEYt2YW3hMoVVVziwenFm/r7aSgdqq5w4b4ELDYvKNWUVk2I2GfDKO6fxrR/9CZ1eH/qnCnQk9dmU/jQZRyIbGbsv0tujY/+4UA0S1pmzaL4TvcMBmAwaNlnL8O7xQUVXyrzy1Fu1fvWnU2nNr5kyIz7sYmV/R68m7cDbN476ecqrJuYr5RMaSGFmzNR96oPFpEdoMiaaRLUGgEi1dEbLkwfwt59bnXTsT359WHYLl5RINK74QksDn6ToMBV8ElH2owtBfyorbJNBN5V8RdnSwczEnlIrHNZEggpmsnPZTejsHha5GsKRKOwWY5J5U8k0e/jkEOxWk2DCPjs4gQ0rqrC/oxfHz4ykNAf3DIzj0gsWYH9HH5pWz8O+I31C5Cw/VX3Y4xeij416ncj/e6rHj6dfOYam1fNhNukFv/vkVBYtKQZdol40IDbtKsVsqBGaylvtspvR0e3Dh2dG0jZlGg06zPc4YDbq4bKbcKRrOG3tXgcIFbtUr6XXad5nrBUj70suOvtmbkjnndDrgJ7BiYwWbVroHQ7AqNeJ3EpSUu0fzxUksDNkz75uYftJKtYscePzn5AP+e/oGsauV4+nfDlXLnKn1Ay7e0fx+4NnZCcYfsJWY8XC8kSCEInA1ut1out3dA3jh8++pzlKNhiexOXramX7DQC/e+OU4peUD5ZR8qOnEvqJMegVrQQsSI+N23g2cU2LSS9IK7fLijVLK/DiW91oWj0fQBxvtvdqypzEiETj6OweRv2UCfuXf/wQP/n1YfgnJjWZ4kYnJrGivhx/OnQW3r5xkV+SP91o0IHFM0knmnAkhiNdwyixmxIpM6fo901vmSl3mjE8tf1pxcJyBMJRuEssOHZ62sqhRZCZTfqkd6R3aAKBUDStLT/8uJw2E8xGHd764FzGEdZahYHaJJ0pqfJjzzb895BfrKmRTnUsLaTTVDyuPXmLVlghFiA5KE6OdcsrZz+XeKGS70xne/Z147nXTwh7QlMx5A9i0XxXUhacPfu68fiezpQvg9Ggg8mgV03L+cjvjuDtzl7ZMnZA4qUeGAmKsnwpZTr7xWvHYTLoRdHQwXAUB472o9ptT6SsfOWYalIBKdFYHF/782T/N6v3fPKcX/ELzz5n5Q3XnT+dGpTP0LXjN+0IhaOKX95oLI7R8XBSprNHfncE54YmRDWU2TV5jdM3Fkbv4Di8fWMYGQvj2OkRUR5jrdjMRpwbDqDabcfPXzqKCRV3ipR4HOjuHcXp/jHodTqMTWm7JZJyhGyxoaSxx5GY5MxGvXC/4lO+VpvZAKvZCLfLgvPrymAyGdC42I19HX3Cws9q0msSZnLX1+t12LiiCsfPKCeZUSIWB0LhKLx94zkPDPuoIi1laZrKUw5oF5zF8CgsJn3CauMwC8l8AG0LkQFfEIurk+f4TMg60xkxTUfXMP77jx+mFdko1U4ZV26sx+GTgzh8Ut03rUMiklepP3ImbDlSbTtgmuaR7mFZky6/l3wykp45NBaDKEo8nX4zNq6swqJqcdEMvtzdyR5/yudy45blGB0PC5o934eWpw5i0TwnhsdCimb+Pl9wyhScWWCSXpcwxQ6NhvBvz7ybUYTs6YGEsBoZnxSSPcjtf9ZiIgxNRrHAYxPiBuJIuE8C4SgsJj2+9/WLsb+jD8++dlxUkCIbf2sgFMWL+1InQFKiGISDFkxGnVAsRY1soqOBxPMosZswHphMpMLNwLScSR/0uoT1TMsYldBqAdACW7ims3+cMVOx26Rha4DPS73zxSNpR5Hy2ilfAP4nv25Hpzd1IBVb4cnlwK4os+HwyQGcGVDPAOS0GTE2MSnSTqUadstTB7Gvo09Wc9HrEpqN3WLEvz/3vmyKylTYLUZccVEdKspsqCizoWdgLC0t69S5UfzZ2hpUlNmSqmi90X4O4UhctRKQXg+EQtGE6bvbN+WmmK6oZbMYUeq0oJJLoCAl3VzNUvizDQa9qL/pmNYZVrMhKxOrQa/DyJj8+xyNxfG7N07h/eMDOYmcLgRS5ameSdLZopYNC6uciXzvKXYn5BqXI5EDPZvbne2jWuBJzo2fCasXufHnU7nls4U07Bwy5Nee/Yan2m0Tadn/8d/vq0aWS5HTjplG/HZH6n2D1122FBaz/ONmC5F+laC3WDyhwY4HJzNKKgAkTK4rFpYL/d69Pz0tKzK1d3fT6vmon1ciCv7SMsnGYomgPKZ53v3IWxj0B4Xtb76xcFJZw3wizZOdySI9EIqm3Psth14HGDlzOM/SWpeQc9tuMRa1sBb5a/VAnuKW8oYW/6oael1Co+TrfM8UWuNq8oXJoMOQP1GSdGxiMuOFmtVsgGGG7h8JbI3s2deNF9/qyjjIpbrCCSAhHNMV1kBCuFx32VIhaQgTskP+oKYX7dnfn0iK8j50fAC+qUCjgIYkDs0X10+lMEyr6wKTUyfufEF9caDG5vW12HvoHA4eG4Cn1Aq7xYDQZFRzn0rsJiGYZNq8rf1Z5NIEx8i2uYlQFAp1NhSJxYG45MVxWI2oqXTgVM8oSuymnO1JnsvwSVIymbClaXhnEh0SuxmUrEGa2tAh4wV4oTMZjWMyGs/KvWMyJPKm52L/tRZIYGvg0PEBvHLgtKbtS0qwLF0A0hbWALB+eaVIWMtlRFNDTiA/uacDI/6gbKEIOVg2q0wJhqO45V9fzVjgA8DPXuwQJladN31h5xsLCcFYmWxLmqsBvpnIDGld4Xgc+IcvXYRb/vU16HVApNDUzQwosZvxybULcOLMCNpPqceSyDFbwhpIvPvZCGsg8zzmRILJaByTE5MzsgcboExnKenoGsYDu97N+ov58cZqjAXCQsrIdLFZDPhfV68BADz8m/aMTLeVZVZ89aqVAKA5o1WuydaElw/YvvVCIxcpNK1mPfQ6vbCPvFDvRSZ4XBbYLUYsrS3F7989S1HnRFZcduECfHnLiqzbmfVqXYUKi2TOVlgbDTp0eoeFIhKZEAhF0d07iis31gvlAdPlM1xQRLpR3rlirglrIPd7OGeCXO17DYZjAKbHXyj3wigpZJEJg/4QBhHKOJ0lQfAcP+MXCvrkCxLYwnNJYgAAIABJREFUKnT3juLY6cxrzDIi0XhWwhpIbGnq6B7G068eT32wAs++/iEsJj18o6E5KTgJ7cyVSObZYq4nHyE+eqRKe5wLKJe4CvXzSubMxLi/o09UHzsTwpNR9PuCJKwJgiByTGd39spdKkjDVoCVjJwrxOPIestRMIOEAARBEIQ6DYvdsmmPcw1p2GqQIkoQBJEx+ikJo0MiwDEbjFOVuExGHUwGXUaJhvi2DDmQfkaDDlazATZLbkprprxe3q9QwOSr+gtBEESxYjTooNfp4LAasaS2DO4SC84NTWBTYzV+98YprKgvh7vEoikeR6dL7Fxwl1ixoNKJ8GQUmxoTNRX27OvGibPp5aN32oxw2c2wWoxwl1hw4Gh/Rm5Pk1EHh9WEpVPjW1JTmvqkHEACmyAIgsgJdqsRqxa5RUKM1zzZzx1dw6KKWDw6ABazHlazEWaTAdd8cknS+QCw69VjKftjNeuxoMIplL/880sWCX8rsZlw1OtLKznQ1suX4kjXsLBokPYp35DAVqB3OP1c2QRBELmA32PPLL9MEUxVm1m2Pe5no8aiIkrt6HSJgkZAItNbTaUT/vEw7FajIBBTCbEVC8vh5LIOMsxGPWqrnNgylSRKqa2OrmFMqOQA1+uAZbVlWLGwHAsqHIr9Wbe8UqgHr9ZWeYkFFaU2BMNRfPPaC1SPzycksBWYV26f7S4QRYLZqP/IulfYfnGWOnWu7LrIJUwYGg06WMwGweQKAE6bCfPddthsJpw8M4L5bjuOnxnB2f4x2ZSYBn2iDafNhLVTQUznhhLKw3x3Yk4a8gdx4Fi/YnEQpqG6HBZYzQasqC8X2pnvtsNqNuC3b5xKWVyEjctiNsDlMGO+2y70AQCW1JTi059Yghf+eCIjLdOg16PaY8fwaBCRaByVZTbYLEZs2VivSeDfedN6oZYAT8NiN2oqHLIaPs+efd0phXVtlRPuEotIo55NCkZgt7S0YNOmTWhvb8ctt9yS9+vVz1PONpMKq9mA8hILejKoaAUkihAYDHpEo7GCTB2o1035nswJP9HpDBNT6HUAdDNXtSjXmI16mE0Jk9wxry+jGMZsSyfmE9Y3QeviooD0eh3MJj1WLHQjPBkVJvrX3z2jmJyFCXWdTod55TZYLUYsnZp0mdA6OzCOAZU89Ho9YDEZsay2FL6xhPZW5rQI1z9+ZgQfavR7GvSJd9hpM8E6lVZ4RX250BfGfLdd8NGeODOCJTWl2LCiCvs7+gRhUVlZImRg3N/Rh6dePopIbBKxWBzxOGA26VHqtGDt0oqUggYAtj/yVtL3ymE1wsEJetYPKR1dw3jpba9sauNEIJYOLrsJS2rLsGFFlWhMcmRqEua16Eza29/RB6fNiNBkDNFoDAaDDhWlCaF//RXLUp5/5cZ6+MZCaJWUejXoE+9MdYUTVrMBf/f51QotzDwFIbBbW1tRVlaGpqYm7N27F62trWhubs7rNVcsLIfVbEi5FcpsSvhaypxm0cSwpKYUv3jtuOrkwmM06GAxGeAptYr8LD/+5WHNfTbodXDYTEJfxgKTmicnKXo9YDMbEYvHU+YsN+ghTDruEiusU6tkxn/+6rCmtI8mgw5xAHarCR9bNQ9A4j7+5FeHM9LMrFN+sHAkhok0Klkx2IKhotSGIX9QU7IOs1EvBNvwk11FqRVth88pnscL5sSCJ6FpMX8gALz8jleTVhTn/uc1tr4MijwwYWw2ibUspq2x/wGxJrikplR2oucFL5DweQ6PhlBeYsGFy6sQmDJzKgmtf3r8bfjGQojF4sI7YTHpUVPphNNmEjQhtUn/tof+iFEZv6VW36kacv5auWPYvQEg+lnrdea77YjG4hgYCSAWi8NmMcLlsOBzH1+sSTs9r9qVlDvdbNSjosyK1Ys9ov7ky0ebbbs1lU40ra4W3jUAaQd/fdjjFxVw0esBl8OC6y5fBpfLCn+G1RnzRUEI7La2NjQ1NQEAGhsb0dbWpiqwy8vtMBoNWV934fwSnDzrF8yZOiRKEtbNc8LtsgIArtiQEEwfv6Am6fxXD5zG4EhQVkNyOc2oKLViYCSIy9bVYsUiNzpODeGvPtcoOi6VwDYZ9Sh3WVA/rwQ1lU6sWOQW9eUz3/6VprFWuRN1toPhqKg/Kxa58dNfHsboRBiTkRh0+oQpy2kz4dILa3Cmfww1lYlKZCsWuYX2+D7sfOGDqRSYyVjNBrhdVkRiMXz1Lxpkz//Vnz7EWYV63zoktDmr2YB5noTgkD6bjlND+NUf1PfU63SJCau0JHEvh/xBNC6pEMb0p3fPovucH71DE4hEY7CYDHDYTFi0wAUAwj3g7wM/Bt/EJN4/MYjwZBSxWBzRWByWqbFbLQY0LqkQ3Uu5dt5sP4exwHQZQL0uYYlhz3/IHxTGLtfOD558R9F3aTLqYbMmpoOKUivcLiu6e0dxyepq0bsg956nw31/9/Gszr/2U+cLfek4NQRA/n6rsaDCgZ6BcUwEI4ghDovJiPISCzasmqf4DucCPkf0/7p+XVZtXXHxQgAQ7gX7+dOfWKJ2mkCZy4blU2vq0YkwNkwtkAEkzUGpUMt9nU8+nYPrrlzsgctpEb4v7Duo9T7ONAUhsL1eL+rq6gAALpcLXq96HeXhHAWMXb6uFntNPVhcU4oFbjv2HurBfLdd1twiV3Dk8nW1sJgM8I2FMBGMYEGFQzDT3fPVjUnHL1/gSmqnfp5T0A5HxsMwTNkNq8ptgm9K2h++DZfDhLHAJOJxcaEIvQ74zCfOE/xqcmNaPiWMLlpembY2wPfh0qmJ79zQBHxjIZQ5LfCNhbCivlyxLf78z338POHenxuawMkePxZXu4S/y2lVvAly+QIX9r53FvYpgdQ3HIDJqMdkJIbz68pEJk2lMfn9Qaw5z52kGamZ3vgxlNlN+PKW5cJ5vCaQStNg7SyqduGqTYtx8EivyASrxfwHACsXugVt+MDRftitRuEdApLfIx72LuSysI4U/pmp9YP1hf3P0Nq3y9fVymq4au9gtmgZWzrI3QO5+UOJNee5hfHypnsgvXHnelwzzee4+go8/f2jszY2tQVQQVTr2r59O5qamtDc3IzW1la0tbXhe9/7nuLxub7JhfxS7u/oE36WCopPf2JJwY4rFYX8zNQo1nEBNLZCpFjHBcze2Aq+WldTU5OgVR86dCjv/utiQotPjSAIgpj7FERq0ubmZvh8PrS1tQGA4M8mCIIgiI8KBaFhA8C2bdsAkLAmCIIgPpoUhIZNEARBEB91SGATBEEQRAFAApsgCIIgCgAS2ARBEARRAJDAJgiCIIgCgAQ2QRAEQRQAJLAJgiAIogAggU0QBEEQBQAJbIIgCIIoAEhgEwRBEEQBQAKbIAiCIAoAEtgEQRAEUQCQwCYIgiCIAoAENkEQBEEUALp4PB6f7U4QBEEQBKEOadgEQRAEUQCQwCYIgiCIAoAENkEQBEEUACSwCYIgCKIAIIFNEARBEAUACWyCIAiCKABIYBMEQRBEAUACmyAIgiAKAMM999xzz2x3Yq7S0tICnU6H1tZWXHTRRbPdHc20t7fjjjvuwOc//3nhM7mxaP1srnHrrbdi+/btGBkZwaZNmwAUz/haWlrw05/+FP39/WmPY66Pzev14jvf+Q6uuuoqAMUzrh07duC+++7Drl27cNVVV8FisRTN2Hbt2oXTp0+jqqqqaMa1a9cubN++Ha2trfjRj36EqqoqLF26tCDGRhq2Aq2trSgrK0NTUxN8Ph9aW1tnu0uaaWhowOHDh4Xf5cai9bO5RltbGx566CHs378fzzzzDLxeb9GMz+v1Ytu2bdi5cydefPFFAMX17Pg+FdO4fD4fnn/+eTz//PNwuVxFM7bt27dj9erVaG5uLqpxNTU14fnnn8fOnTuxZcsWNDU1FczYSGAr0NbWhrq6OgBAY2Mj2traZrlH6VFaWir8LDcWrZ/NNZqamoSfV69ejbq6uqIZH+tbe3s7tm7dCqB4nl1rayuam5uF34tlXF6vFx988AGWL18uTN7FMLa2tjZ4vV54vV6hX8UwLmD6ewYAo6OjcLlcBTM244xerYDwer3Cg3G5XPB6vbPco8xRGovWz+Yifr8fq1atAlBc4/N6vfjJT36CDz74AFu3bi2KsbW3t6OhoUG0iCyGcQGJvu3cuRPt7e24+eab0dTUVBRj27t3L1atWoWmpibcdttt8Pv9RTEuHvZeAoXzPpKGrUBdXZ3wMPx+v2hVVmjIjUXrZ3OVXbt2Ydu2bQCKa3x1dXV46KGHsGrVKrS3txfF2O6//37s2LEDd911F9544w3s2rWrKMbF09DQgKuuukqY+At9bKOjo2hsbITL5cLWrVsFzbLQx8XzwgsvCPEUhTI20rAVYCtlADh06JDInFdoyI2FrZhTfTYXaWtrE0zGfr+/6MYHQJgcimFsO3fuBJB4VnfddRe2bt2K1tbWgh+XHA0NDUXxzBoaGkR9kgqrQh0Xj9frhcvlAlA4cyRp2Ao0NzfD5/MJPgredzrXaW9vh9frRXt7OwD5sWj9bK7R2tqK7du347bbbsPVV1+Ntra2ohkfG1trays2bdoEl8tVNGOTUizj2rFjB7Zv3y5aRBbD2NhYWltbcejQIdxyyy1FMS6G1+tFY2Oj8HuhjI3qYRMEQRBEAUAaNkEQBEEUACSwCYIgCKIAIIFNEARBEAUACWyCIAiCKABIYBMEQRBEAUACmyAIgiAKABLYBEEQBFEAkMAmCIIgiAKABDZBEARBFAAksAmCIAiiACCBTRAEQRAFAAlsgiAIgigASGATBEEQRAFQlPWw+/tHc9peebkdw8MTOW1zLlCs4wKKd2zFOi6AxlaIFOu4gNkbW2VlieLfSMPWgNFomO0u5IViHRdQvGMr1nEBNLZCpFjHBczNsZHAJgiCIIgCgAQ2QRAEQRQAJLAJgiCIvBAMR3Di7AiC4Uhez/moUJRBZwRBEMTsEAxHcGZgHB6XFS1PHUTP4ASqPXZ85yvrYTWri5xgOIJ7H3s7rXPU+lBT4cjo/LlK8YyEIAiCmFV4getxWTDoDwEAegYncGZgHEsWlKqef2ZgHD2DE2mdo9aHbIT+XIRM4gRBEERO4AXuoD8ET6kVAFDtsaOmwpHy/JoKB6o9dtE56ZrI5YR+sVAcyw6CIAhi1mECl2m32264EIP+oGbTtNVsxHe+sl4wZwNIW1uW9kHLQkFKMBxBZ9cQ7EbdnNLO505PCIIgiIJGKnCtZiPKnJa022Bm8BNnR9I2kcsJ/RNnRzQvGuaySX1u9IIgCIIoCniBmy2ZasusD5kI31z40fMFCWyCIAhiTiKnsadDJsI3Fyb1fEECmyAIgpizZKOxZyJ82SJhIhInHzZBEARBzASZauhWsxF1NSU5LySVLSSwCYIgiKIllz712Yb2YRMEQRCEAnMpVSpp2ARBEAQhw1zb4kUaNkEQBEHIMNeyppHAJgiCIAgZ5FKlziZkEicIgiAIGbLdB55rSMMmCIIgCpp8BoaxKPPZFtYAadgEQRBEATPXAsPyCWnYBEEQRMEy1wLD8kneBPatt96KDRs2oKWlRfispaUFbW1t2LFjR04+IwiCIGYfLSbpfJmtcxUYNpf2WyuRF7tBW1sbHnroIQDAhg0bcP3116O9vR1lZWVoamrC3r170draCgAZf9bc3JyPrhMEQRBpoMUkneqYYDiScWBXOoFhSteR699cJC8Cu6mpSfh59erVqKurw44dO4TPGxsb0dbWJjo23c/UBHZ5uR1GoyGnY6qsLMlpe3OFYh0XULxjK9ZxATS2QmQiEheZpCcicdTViMfa2TWkeEwgFMH2H76O031jqK1y4gff/CRslvRFU11Nuerf1a4j1z9g7j2zvHrm/X4/Vq1aBQDwer2oq6sDALhcLni9XgDI6jMlhocncjqOysq5lwQ+FxTruIDiHVuxjgugsRUilZUlsBt1oopYdqMuaaxqx5w4O4LTfWMAgNN9Y3iv41xecn+rXUeufwBm5ZmpLRLyKrB37dqFbdu2AUgIXK/Xi4aGBvj9fkEAZ/MZQRAEMbtoMUmrHZNOCcxsTOdq15lr+62VyFuv2trasHXrVgAJTbupqUnQjA8dOoTm5mb4/f6MPyMIgiDmBloqYikdo1VYZrt9K9V1CqGqV16ixFtbW7F9+3bcdtttuPrqqwWfs8/nE/mks/mMIAiCKA60JCfJxfatuZQEJRN08Xg8PtudyDW59jsUs/+pGMcFFO/YinVcAI2tEJnJceU7QYrU3D5bz2zWfNgEQRAEkQvy6WeWWwwEQol92XPJpz03ekEQBEEQKciXn1lqbj/Z48c///wATveNJWnz2QS+ZQsJbIIgCOIjjTSCHICwBYz5y5csKJ31vOUksAmCIIick4kmOlvaq9TcDgC1VU5Bw2afyQW+zWRkOQlsgiAIIicwgetxWdHy1MG0NNFcaa+ZCn2puf0H3/wk3us4J2onnT3j+YAENkEQRB7gBUe258+VoCc1eIHrcVkw6A8B0K6J5kJ7zYXQZ/f9ApdNuD7/LGYzwcrcfwsIgiAKDKngePDbl2V1fiHUeOYF7qA/BE+pFYMjQc2aqJz2mu6iJVuhz9/32ion/vHGdQCQ9CxmK8HK3H4DCIIgChCp4Og+54fbbsr4/Jn2lWaCVOBuu+FCDPqDisJWKozl/MjpVviS9sHjsqa1NYu/76f7xoTkLHPlWZDAJgiCyDFSwVE/34UxfyDj89MVPLOB3D7pMqdF9lglCwLvRz5xdkRRUKqdz/qQiR+dv++1VU5h4TCbfmueufnkCYIgCgypxscLL5vFiLE02spW8MwWWvdJa7EgqAV4qZ3P+qAm8NX6z+77BSvmC4usuVIYZG4+dYIgiAJCSePLxnSajeBR6uNMCh2162mJts62wlemEd3svvOLrLlSGIQENkEQRJak0hizSXOZi61EMxHEJo2KV7uekjCW82trrfAVDEdwsscPAFhc7cpZKtO5FK1PApsgCCJL1IRqMBzB9h++LpvmUgu5EDz5DmKTLghuvPL8lNeTCuN0FxX8+cFwBN/92X70DiVM2PPcNtx984a0NWPpomOuReurXvn++++HTqdT/Pu3v/3tnHeIIAii0FATqmcGxmXTXKbbfjYCNt8JP6QLAiD9QC25fN5mk0HTIuXMwLggrAGgdyiAfUf6sHFlVVpZ1qRb8eZatL7qSDZt2jRT/SAIgiholIRqTYVDNs1lvlELgsu1lihdECyudqV9Pb6NeW4bHt/did6hgCbN1uOywu2yYGgqWYtBr8PPXuzAb/aexJ03rZeNVpfeH7mteLOd2UxKxvWwf/GLX+Daa6/NdX9yAtXD1kaxjgso3rEV67iA4h6b02VLSnOZT2bKlMs/s1z4elkb4ckoWp56V/j8zpsuUtRs+bG6S8zYvL4eu147LvzdU2rFvV/fmLSHW3p/ACRp2GP+wIz7sLOuh/2LX/wCDz/8MPx+P0pLSzEyMoLa2to5K7AJgiDmEjbLzEYZz6Qplxdo2V6DWSmC4YhmzZYf69BoGPXznaLUqIMjwaTxK5nf+WQvLEp8rkSIAxoF9tNPP43nn38eLS0t2LZtG7q7u9Ha2prvvhFE0TCXIk2JwifV+5TKlJur9zEQyl3ubr4v6Zjw5czxd960Hvf91zuKqVGzMb/PJpp6NTo6ipKSEmzatAlvvvkmNm/ejB/84Af57htBFAVzLdKUKGy0vE9qAi+X72P3Ob9mTV5OMKv1RatmKzdWq9mIe7++UVHg8+fw5ne5jGpzaaGt13LQypUrcffdd2PVqlX48Y9/jEceeQQZur4J4iOHnHmSIDJF6/vEBJ5U0OTyfayf70K1xw4AqqZrJpjve/wd3PvY2wiGIznti9xYpZ8Fw4m98Oza7O+Lq+XHwKwH0j7z7UjbzDealgwPPvggvF4v6urqcPvtt6OtrQ3f/e538903gigK5lqkKVHYZPs+5aIqFsNm0Wa6VvKpS03T4ckoguFIXpK6qGnycmOQsx7UVDiEdua5bQAwo6Z0za3X1dUBAJqamtDU1JS3DhFEsZHvLTVEYZGtmTXb9yndqlhqY3C6bJpM10qLDNaXkz1+PL67Ey1PvavYB6X7puV+nuxRN91bzUbUVDhE9yQUjmKe2yYIZPZ31g6/73um9mhretKbN28WEqj4fD6Mjo7iuuuuIy2bIDQylyJNidkjV/7jXOUpB9SrYsnBj2FBhQNf2rxMSAWqdj2lRYbVbITZZBAEoFwflO6blvsZDEfw+O5O4fd5bltSJjq2YOgdCog053luG7bdsFYYn9QiwI6bKcuZpjflpZdeEv3e1taG3bt356VDBEEQxUqq7VazEeTkcVnhKbUKEdWpSnnyYzg7MK6qFfOoLTJSmfmV7puW7WvSLGg3bVkuG/TGkGZMM5sMiuZz1v5MPa+MrtDU1IQHHngg130hCIKYk+RKkKbKOT7TuwmC4QhanjqIwZEgPC4Lbr1mTcpSnvwYGJmahPn7qmbmV7pvmVTtWlztEv7GC3yGVHOWLmCkC4+ZtJxpehukOcW7u7tRWkrmPYKYSYLhCDq7hmA36sgPPoOoCdJ0BXmqnOO8tphuLuxM4K856A+h0+tT1FilwvVkjx9PvnwMZ/rHU5qEtW7pUtpOpXTftPjzeT+5FKmJ+6Yty7G42oVgOIqTfWNYUGYTLWD4xCqz8R3UdMXGxkbR701NTbjkkkvy0iGCIJKhvdyZk612rJQVy+OyKmqjatdUyznOhIfBkMiFvXtfd16ftVT7XF5XBoNBh2g0DoNBB4/LKoxH+v6tXOjGv/39n6VMuar07iqZs5WOV7pvWv35P99zVLZNuTKd7LnyGdN6BidEyVhm4zuoerWxsUSFGbkiIGNjY3A6nfnpFUEQIuZa1aCZJBuBm4uFjlJWLOlkriZwtI7vO19Zj31H+vCzFzuS2tVyfrYR42cGxhGNJnJsRKNxDPqDKHNaFN8/LSlXtWzp4jV0ueP5CO5cLLr4eyqNEOcjygf9IRj0OkRjcej1iTSncm3MFKojv/XWW6HT6eDz+fDBBx+grq4O8Xgcp0+fxtatW3HPPffMUDcJ4qPNR3Uvd7YCNxeTv1JWrEF/SBSspSZw6mrKNY9v48oq7N7XrelZ52JBwmuo0veM+W89LmvG71+qLV3SZyHXh1wuutRiB+a5bYhxScH4CmCxGGSf90yiOupHH30UAPC1r30Nzz33HFatWgUA8Hq9JKwJYgZhk9tEJF5QPuxMtT++alM2loVcTf5KRSl4nyaAtIWbkuYnZ6bV4vdWuj9anwMvRKUm/0z9t6m2dEn7K6f1Z2td0ho7wEeIA8CXrzwfz7x2orB82KdPnxaENZBIouL1ehWPb29vx/3334+dO3cKn7W0tGDTpk1ob2/HLbfckvVnBPFRw2o2oq5m7pegZMJBzceb6nxe45Emr0inD9Lo42wnf7mJv8xpSdJ0tU7sWrKOqWnRqTTHTJ4DE6LS/dmD/mDGJmCpYE61gFDT+pXuUaoFiZbYAT5CvLbKieX15bLPe7bQJLBXrFiBb37zm9i6dSuARPWulStXKh7f0NCAw4cPC7+3trairKwMTU1N2Lt3r1DpK9PPmpubMxstQRB5hRcuSj7eVEg1nm03rIXZZBBpsekGOalN/kptpBM0Jl0IaBVuWrKOpfK/ykVA5+I51FQ4hMWSNNmIlHQsKema8VPdo203XJi0IAG0749W2lt9wYr5GBgYFX2e6t3LN5qu+tBDD+GnP/0pdu3aBQD4+Mc/nrIWNr/tq62tTUhn2tjYiLa2NgDI+DMS2AQxN5FuE8rE5ye3b1ZrVitpH5QEnNpknolfWC0ndqrteKmyjmlZZLAIaLY1iZ0PZP4ctJLu/UrXyiFdDEjv0fsnBpOi+OUiwpXaA5IXYezn2cwbLofqFR944AF8+9vfxjPPPAOfz4fa2loAQFdXl/A3LbDCIQDgcrkEc3o2n6lRXm6H0WjQ1DetVFaW5LS9uUKxjguYO2MLhCLoPudH/XwXbJbsv+RzZVxyOF021FY5cbpvDLVVTvzzNzahb3hCGHuqe8HG9uC3L0s6rrNrSDQxT0TiqKtJvhfSPlywYn7StZSCwNK5jpQHv30ZjnUP4/899x5annpXGP8/P7pP6MsPvvlJ1XdAqe9y90Ouv71DAbQ89S4WVDhQU+nAmf5x2eeghc6uIcGn2zsUULwPE5F4WvdLy/NhBEIRbP/h66L7d4Hk/Ms3LsTL75wWfi8rsyv2R649dm3puym9rwy5NnP5/VZDtfXVq1cL/zOhmQnM593Q0AC/3y+0lc1nagwPT6Q8Jh0qK+e+3zATinVcwNwZW673T8+VcanxjzeuEzSYSGgSbrsJY/4ABlLcC+nY2HljU7/bjTqRpmk36hTvBd8Hvg0tpHMdBh8kd6Y/USLydN8YXt3XhdN9Y8Lv73WcS2mOlvZ9gNMI5cbC95dxdmBccCV4XFZ0nhxI+15ouQ+VlSWyx3nPDKtaMeSej5zme+LsiOz9k75j/3jjOsEtYDMo91upPbnvKSsdKqdhszbzkR9BbUGu2vKWLVsAAPX19Whra8OVV16JZ555Bm1tbfibv/kbzR1oamoSNONDhw6hubkZfr8/488IolD4KO6fVgruyUfAV7p9yPV1APUguTVLPGlvh+L7rkUg8H5saQGLTAP/0rkPVrMR2264EO+fGMSaJR4AwHd/tl/ox903b0gZB6A0TrUtYfw94seuFvSn5LqQS45TV1Mu69vm25zp77emJ/eVr3wF119/PbxeL3bt2oXbb78dd911F5577jnZ49vb2+H1etHe3o6GhgY0NzejpaUlySedzWcEUQh8VPdPy5GLe5GNIJZDKVgqneuoBcllux1Pq0Cwmo1YudCNu2/eICpVKRdwJt2HnklWNh4+M1i1x47rLlsia6lVAAAgAElEQVQqMqV3dg/DaTerXk8pm5w0yl96PjDtZ2aoBf1JFzescMm2Gy4UFloA8PjuTmxorEkav/TezfT3W9PbMzo6imuvvRYPPPAArr/+elxyySUYHVU2ETU0NKCzs1P02bZt2wCIBW42nxFEIZCutlbMzLV7kStzplKQHLsGizge8wdStJS6bbktTTzsdyZ4pAFn0n3ochHW2S4qBqaygTH+a89RDPlDqPbYces1a3D/rneT0nvyFcP4bHJyOcZ57f2mLctFwhpIFO8YmwjjSNeQbNlPqzm5nOegP4ibtiwXkuL0DgXQfc4Pt90knKf0vszkO62pdafTiQceeAC7d+/G888/j2eeeQZxLhsMQRCEFnKtIWdDrsyZSpM2P8HXVjnxjzeuU53QlaKXU237kkZAS2s/3/HFdYJ5WDpmaYS11nvA+up02ZIWFRctr8TL73jROxQQZQrrGZzAvzxxAP7xsOh6NRUOUcWw6y9fhgeffV+2Tyd7/CLtPTwZFZm4r798KZ585RgefPaQ7PhT7V/nP6uf7xItspTel5l8pzUJ7Mceewy7du3CD3/4QzidTnR3dwtZ0AiCUCZXWhw/QRYjs1GJLBd7shmp9maf7htTFYZq70mqbV98m2cGkms/lzktQrIP6ZiX15VpqoWtZIZmCxHpguXumzckJW3hzfNAIs2ndBEx6A/BbNKnZWa+8crzAQCLq104MzCO/uFpDb93KCBbsENpkSVdHPH3Qilt60xaizRdpaSkRPBhA8Dtt9+e104RRLGQCy0uXU1NrZ18me5mu0BHJn2QC5Y60jUEAGnv/ZaDn+Brq5yq5myt70mqRYaceV46Zj716PefPIDBkSDcU7Wwv//kgaRgMWlQ3acuqpVdiPD95RcZ0usxzfv2rWuFz6V9VjIzL652Cb7mynIrnn71uCjQLDwZRWW5VRDabpdFsWAHE8L8dVi/g+EIvjW1/Ysvu6mUtnWm9mVrusLu3bvxne98BzqdDm+99RY2btyI++67D5s3b853/wiiYAmGIwhPRjNKrcmTjqam1pd8lefMR4GOXI1PTYjzwVKs6AOb6JnAUuublvSabIJnWbOU+qj1PUnlM031d77PvHl5yB/Cu8f6ZYPF+HzuvUMBPPHSMaEEZ2W5TSjBqQQvBBkGgw4/fPY92ahuQDlLGa+984VYeganS1/Oc9tw2xcaYTYZUO1xiAQra18uspx/HvuO9Anbv9jedtbPxPVmZ/eHpm/Vww8/jFdeeQW33XYbAODll1/GF77wBRLYBKGAVCvZdsNa2QAYLchpaumSz+0n2badi0hbuT7UVDhUFxLS6G6e3qGAbJYxvnqVFg2LCSubRb7+s8dlFQSNu8SM276wBsvryxTfEy05tJV8qtJFzXWXLRH9XRqVxILF+K1qjGg0DpfDhP7hAFqeOqi6SOP3qLM2eLM186VvXFkFQN1Hz4+vd0gcbMY06d6hAJx2c5KGz98zuchy6TtjNOgQicZFx/CLgmwX4pmgOUq8pKQEOp0OQCLjGAWdEYQyUmFgNhmy0miZn25DY03Ooo1zRbZtq2190mrmlutDqoWEdE+uVMNm42D3ntfWMsnPLSf873v8baGdodEwfv7SUdz79Y2y58sVGEm1aODvn/R+mE0GwXxcWW7Fxxrm47V3z6B/OIiyErMQLNY7FMCXNp+PilILnnzlmHCP/OOTKcevtEe9styKoZEQorGEHPnZix3Yva8bN155vuoz48fT6fWJrlXqMGNkPJykSattIWMYDDp4XFbR3yLROL60+XwhgM7Dmdfltu/NBJqucuWVV+JrX/saTp8+jV/84hd48cUXhaQqBEEkkysBKZ2kNzTWZNROPref5KJtq3m6ElkmVaakvlk536icD1kaZMSyZVV7HEkmU16YZJKfW3q9MwPjoiAsIKElKgm/dCO85QQ8fz+qPQ7op5Qw6f8mo14QrgaDDk+8dBTVHju+eMX5QgQ3Q238SnvUeXM2gx2n9L2RjufWa9YIpnmDQYd/+NI6jAUnZTVppWQsjGg0jp7BcSyuns5uVlvlxKbG+djUOD/pfXSXmBGejGVsNcsUTVe6/fbb8cYbb2Dv3r3o6urCLbfcgksuuSTffSOIgiVXAlI6SUv3hqbbp3z52Xg/JR85m24gWDZVpthkrFTmEpA3t/JtrlzoVjSZAmJhcus1a9Dp9WHNEk9Gz5ffe2zQ6xCNxVWFn3QRyGdR87gscFpNonsvfXcG/cGkBQPvs36ns09ksv7S5vMRiUSx67UTQht8BPc8tw23br0Q5Taj4vhrKsQVv/hgPrmocbWAM+l4xoKTaPlGkxA0WOa0YB53benx+470YePKKlHylJ+1dggWg8d3d+KOL67DdZctwcBIEFuazsPYaEDUl203XIh/evxtDPlDePDZ9xUzueULzVe55JJLSEgTRBrkQkBKJ2np3tC5RCYmWynSLT6ptFjpgkA6SfcMjsNsMiS1rbYAkDOZMo1UzjyudWxS8zAAYe/x7ddfKNIO5ZCzCFx32RL8156jGPSHcNcjbyEajQv9kbPy8O+k1CWw5+3pwkoGfUKr1uunr88ELm/JiOn1CIajqpnIlMZy503rBZ+wx2XBnV++SLj/cvdBaTyXXrBA9hr88QaDTjC7s2e1cqEbNzevECVLufex/RgeTewTf/XgGcRicVFQ2qA/KLgK2DkzFXAGpBDYp0+fxo4dOzAyMoJNmzbh2muvxSOPPIKuri68+eab2LNnz4x0kiA+qkgnaZvFmFYhi5kklcmWTzepppFJE2GYTQZhexKvQfrGQkl7bNUyZklNwkoChW/DXWLGl7eswML5JSIBncrXmur+8AFcg/4QxoKTgoVCurWMh7dkSK0A0akAKb4/WiPKpSZq5luOxaaPv2nLclHiEXZ9ZpaWW6TdeOX5Ii2ev09lTgvu/fpG1aQwwLSbQk37loONb9+RPvzsxQ7h3vCadrXHIUruwoQ1APQMyAel8QF4qeqE5xrVEd92221oaGjAVVddhRdeeAE7duzAxz72MWzatAnXX3/9TPWRyJJ87r8l8s9cyg6mhprJVi7dpNK7eOOV5yM8GcXTrx7Hg88ekhUE2264UBSwxRYEP99zVDFjVs/guCjJhpzJnm31YmbqodEwnnnteJKABpR9rVruj7T6E/Ovs7SbAFTNrbKBU1NmdV6I8O+OWkR5MByR7RsvjPk93fz1+YWCdJHG3ydpnXBp/6RJYfgYAgCoKrfhnq9uSLtgzMaVVdi9rztJ02bv1JAkjoBRXWFHNBpH33BAuKdsWxkf6zCTc6vqFbxer1Dg45JLLsGnPvUpfO9738t7p+YSucjAlK3AnAtJKQgiFdJEJFazQRCQAER7ZuU00kBI2X8tFQTvnxhM8n+yvwHJGbPkFgxy3w2RIIrFRW0qJfdwWk3Yd6RPGLPWPdKA2Px74uyISPNWM7fywt9dYsbWK5bh2ddPoH84iFg8jpM9/qSc5mrzgFIAXngyKlg4+OOl5mYm1Ncs8Yh81u4SKy5fVwuXw4Tn//ChUJDkzpvWC9nX5Nqs9tiFe8DoGw7gZI8fKxe6k+6HFLngQqmmzb9TDN6q03j+PGz79z8AQNI9lcY6zNTcqtp6aen0i+JyuUS/fxSQeyCAso9FSxtKZd/ydb7S/tQzA8Wb5pKYHaSJSACkZY7uPudX9F9Ly1RKA67u/PJFsJoNikJVmmTjzMC48DP/mZIgUjLHelxWbPtxG6LROPR6oKLUhr5hZSuC1Foi3WYmNbcqpb9kiyPmEnjm1ePCAqZ/OChK9DHoD4qSnygtmKTa+M/3HE2a+/hj2Va8yeAkF3xnEI6JxmK466dvIRpL3BtmXh/0J1wZ9359Y8pFg7vEjCHOTD0WCOMP750VLY48LqtoTlQSpLymLX2HmOuD3wPfNzwhCsJj95S1l8/cBkqozvhs37XS78WO9IEwk1s6wlPaBu9zk56vpewci1CUW0DwP/O+JmkkKZ/m8lvXXaApw1Ax8FF2DeRi7KnaUPLRajFHA0AoLM72Jf1+8Np7mdMiqxFed9mSJI1QavJlC4aRsbCwd5eZawFxKk3p91s6Ib9/YlAwCcdiCS2QjTndCVzO3CqXKpQx6A8K+4IH/SGRL5b1IdNEH1qEkdVsREWFDbc98Jqsz3rAN90X3hcOJILt9h46hwUV4upm0gXNthvW4V9+/g78E5OoKLPg4V8fERYAbpcVAz5xhL3USsL3XW7nhmjR89px0cKkfv70Fi/+nrL28pnbQAnVb253dzcuvvhiAEA8Hoff78fFF1+MeDwupCktZqRmp+5zY5qFr1wbdotBlNeWF75yPrpBfxAel1W06uYr33R2D+OZ104oajSsP7deswb/94kDGPSHhJcfSKS5ZH2Qnp+JNSHfFJJrgN9LnI5FRK6NbO9/Lsau1gY/Vjk/aCpztPRYPiscM5tKay7zW7KUfL880sk6GI4KUdU6HRCLxUXmWj4wSo01SzyCJq7XJzTufl8w42AkZm4FEnnN+YAtqTlYKjCuu2ypaI+0y2FWTfSh9n7JCSO543mrCPtfmhVNDr0OeOKlo8Lxcr76YDiCh557H/6JSXhcFly6ZgGe/8NJAIkFwIAvMTbeddHZPQyzySC7OOHfUzYOftEjXZjYLOLa2XKLHekiNN+oXqGjoyPvHZjLWM1GISH+0GgYu147LvxNp4Oq8OUn6b/5bAO+97P9mAhFRe3zwvedzj7FxcDVl56HH/+yPal/O1/oEISvVKPhBfFkJIaRqZJ2/olJ6HQAS1THZzPiz8/EmqCGVJtKVxDl2jWgJWI5U/i+8mbVdARlLhcYuTDdKbWh9lzYeanM0VK/rVxWOLUx8PuJWRupTL77jvQJmnE8DvT7pjVVOXOtEmVOi7AXeHldGX747Hspz+HJZlEmZ0LmrWm3X38hHnrufZGLgF9k8XWlpQJTrm2595HXQqs9drhLrJiMxJI7y2G3GjERnM4rLn1efCpT3kXy2sEzKe/Jj54/jGgsjspyK760eRkuWl6VZCafzoNuxWVra1BVPu3G4IU7i11audAt5C9XM7vPBLOvNs1hguEI7n/6IHxj4aS/STOz8sKXF963XrMG//rUQcRSZHL9bdsplDnN8I2F4bKbRIuBXa8elz2HCWsgsYBwl0wH6sgJYqW+M5ifqTwNawKQWvgqaVPpaPLpuBbkkFpLHmvtFPkatfRBK0oRtOkIylz6x/htSpma7pQ0rn1HxAvNQX8wyUcrZ44GpoVLudME6PQYHg0p9k/N/OhxWUXmYC3a7fK6MsGUqtMBTpsJo1PfJ7lsY2qCtcxpwaUXLBAFjfG5yJXiRaRJYqSBWHxlqnluG6o9jiR/ttTvzGt8gLIGKK0rLRfMpRbBze4P00KZ5vq9x/bDN5o8XzL0OuB/33AhfvTf7wsmc4NeJxQQ8Y2FhPmzstwqpE6VmvuVYNp2/3AQT7x0DK8eOJMcTBidPuaZ105Ar4eQwx0A3jveL+RRl1pzGLPhvwZIYKtyZiA5daAWRAXbf/6OaDWpxMj4tPD1T0wKk4ndYtD0osbjUDxOr9chFotDr4PqwoH5mYY1WhO0Cl8136ZWTd7jsgorY2l/5IS3dIKU+qv4PvCuhVxYE0QZrDgNO536ubnyj/HblDxTJRQzWZioaVypsnTJ+Q4BiLJGAUBlmRXbbrhQtl9KbbDxDflDsoFDSvfkoefeRzQWR4nNCKvFiH5fUPh+SMeh1dohfWZOqwnfeWQfBkeCsmVRpUlipJo982kzYaiWqEVL0ho1zg6MK5p1U1UTY8Jsf0evqrAGEvf32OkRUVGNaCyOQX8QVrNB9D70DwdR5jQnzovFhPdMiqfUjHhcJzv/sTkGmDbVs++k0KcYBAsk71ph58sJ49nwXwMksFWRRm1K4U3LSvBacDqwF1NqRldDqSuxKS0iFgd0KscpthuHJjO6kuBzWk0oLzELSQmYJu92WRCejGnSnHsGx0UmTLYIUYoLYBMkML2A6BkcFwlrBu9ayER755EKSJbBip9wlba18DABxSYbRiAU0Sz02dh5oXD/rndFiUbSFdpyGlc0FofLYVYUttJzGdKsUf2+IAb9QcX7Im1DquEPjYbhtJtSjom/J6OBCEYDiQV1LA7ROORMs2raFL+okBb2kCuLWlMhTtohp9mzMStpuHJj6hlUzjPOxlTtcQim4ERWs2N4+Z3TuGnLclR7HLLpXN0lZnzj8w1w2pJT4wbDETyxp1P1vgPTGdR4XA6z4FeWCl1m3fSNyc+jJTYjDHoD+oYDcNlNSfOt02bEw79ux8h4Imht2w1r4S6x4l+fOiDMRwa9DmuWeJJcK0BiuyBvJs80iUuuIIGtgtVsxE1bliclqWfMtYJlagsI9nmmXdYy1kdf6BDMirzg47d0ANM/D/lDeHx3ByrLEoE6SsK3styKiMQvFlNY0AxxEyS/gJBrgyF1LWiNTZBDKiBZBit+wmXa1J1fvihle1Lrw/ZH9+F031hGrgC+2hCbxJnJNt1Jp6bCIdor7R8PqwpbpTb4BXFNpUOzpsL7YJU0fCUzNn9PWIzHkGQcAFQjrJXa5gUsb52rKrcl9e1kjz8p9aeSFUaq0UmPk/59eV1ZkhtE6pqKT80GTDlgdZ95qxCfMGZoNIyHf/2BkJzlpi3LYbSYcKRrCN3nRuGfkLck6nTA1/98BUbHJ4W85Dz+8TBanjqIW69Zo9n0zeAXXP6JSThtRoxN/a7TQfgZSEStjwXCODswLspo9tefXYkypwVW83T1MiBh8fnWdWvR2T2MnsEJvHrwjBDoxvz+M53QSBcvwjqZ/f2jOWvLNxbCvY+9jeHR9E3jxOxSYp/2S6aD0sKHTSZq5n8l86lvLIR7Ht0nWhzwE6qctnvi7Ajue/wd4febr1ohJH7Q2h/WJ6lZVS4oKV1rglxq0Eyiz5nWkk7p0PeOD4gior+0eRk2NVZrDqriBW4wHBUJ5+svXyr4MBl8hDUA1bZZ+7x/+t/+/s+EQhL8c+C57QuNePrV44rt+sZCouA26XFKz5lZc450DSkqH0psu2EtHv3dEUXXoEEPRGXWwaVOM0am4nH+z40XodRpTjI3S3E5TPCPT6LcaUJoMpa0GC8vMUOnS5i+2SLNXWJOLLL90wK4zGnGlg31Ircew24xyLZ7/RXLMBmJ4ulXjmEsEEVZiRnf++tLcPfDb4iEO8+dN12U8YJXjcrKEsW/kYatQjAcwb888Q6GR0OazN9acFoN0Bv0Qi3ZXLX7UUKrWT8TYQ0oPw+lLXVSYSfO9mUUAml4YS21JshFrUu1pjVLPFhQ4cDZqShrrf3hTcm8H5/f4peJNUGaCzqTCYvfxgRAk7k/GI7g8d3iHSwLJFuVxibCqkFV/D2xmo249+sbhe07Dz57SNS2p9QqmImB5ICtvYd6RIsF1qboPbAYccePkrO4Mao9dgA6Ubud3cNw2s3CIoHtyebdS2xsi6tdIosJb81h/uHHd0+brCvLrdBBh77hAMpKzCLfM3M3sWIffJEOqf9XTlgDwMjY9K6UTq8PpQ6zSFjzY2CwOXF4bBJfaT4fj7ceFb7n5SWWqQVhIlmK02oS3DtSfGNhxAHZmB05F+PwaDhpF45vNIx/+I+9ivFHbklOi5nYKgqQwFals3tYMI/kQqjqAIwFowCmX5p4HCixGTAa0O6rZjisRoyrBLRlshgohAXEXOiekt9bGoh36zVr8C9PHIB/XDw58ROHWtT6thsuRM/guHDsZz9xHv7zv8UCRa0/UuHL7zv1T0yKJjWlnQ5yRRiUEl1oRS6H910/+D3ODoxjntuGO764TnHBcLLHL5rsy0vMWFztEmn8bpd20zwbh9lkSPZhuiy4fetaUQDXdZctER3DRyNLg+HYOX/9+UaREGXWkcpyK25uXoHF1S50dg+L2t3xmw8wEYpOaf3LhL5JBd1YIJwUcMZM+G6XRfAP82P74hXL8PTU7hO+9rW7xAy9QS+YfgHxwszjsqLrXGJho6R5VpRZMOwPC6b2n73YAT2Xc4stFtT4rz3HEEfCR33jluVYVlsmvA+Cy0FGWAMJn/QzEu16ZX0ZjnT7RJ+lCsJVEtZ6feK70vL0QdF3ZtYznX3UGVB4ITJF6d2QCmuTQYfJaGqxpCasgcwEbzwOOG0G6HU6RZ8Uoez3lgbi8VqsHDaLQZR6Uaotu0vMMBj06OcyOsktqnjhK90W+N2d+zEyHhYWEC6HWVhAKE1YUu2dmWoBcWKSTLRrObfByR6/YDnoHQokLRjU2r9py3IAEAV5DXFCkeW0Zikty5wWWR+01KfunjInD/qDogAuPjEHg49G5j9j///o2WlTtF6X0GITP+uELVhPvnJMdD5b1PUOBeDtFbv5+Hfg6VdOCC67nsEJdJ3zC1nbhvwJ687ffLZBeH8MBh3CkZgo7SYz+fN75nnLBJ+k5pnXTiQJ67/8xGIAgNlogLvUkqSx8u9ZQ305fv9eD9RgMSqjgQicNlOSK2dsIowyp0kUjFZiM2LzhjohuQrPkW6fYB1gi4B55Xbcs3O/aj8Y65Z5cODY4FTfEp8N+UOCNYLfmpZPSGCrcNHyKjzx0rHUB+YYLcI6n4wpaPu8KbrUYUI0FhcFdWghkyh1ORwWPSZCMaGtVKvlfKK2MOKFtdmoQzgiPjggY6J75HdHhPvKC3OmscQVov3Z+PltgTrd9JaVnsEJtDx9EP7xsPB3LdYdXnsHps21P3/pmMh/DainyWV0dvtEwkwq6ACoai7S/ckL57uw91BPUjEQFtTntJqErGYGgw7/9PWLFX33fJDp0JQ5WbqPfXG1C3d8cZ1oC1IibWa7sD2zoswiLBgSwX5cms74dJYutl97bCIsWPPkePXgacGMLA3MGh4Va+yP7e4UmbiH/CH8y88PCL9Ho3FRbgdm+maWDn4x8lhrJ+756rQvnTe3M3Q64I/vn8XA1Bj16spzSmHN47KbRDtJBv0h/OPDb8rOIX/7+dWo9jjw+ns9ssGu0WgcVrMBo4EInn39BC5bW6O5H7WVTpzqHUsKiGMLi2gsjp7B8bSCLjPBcM8999yT1yvMAhMT6nsBtWI1G6FHHB0SUwoBhCZjCKfIaKQFvS5Z8FjNetE+TTnYosZu1eOqi+vx8TXV+PDsCILh7PuUL6Ix+fFKycV9VVpEsAVCwpJiFC3OlEoFhCbF/XG7zDjQOSBsuRkLTGJpjQs//mU7XnizG2939uH1d8/ixTe7ceBoPzY1zofRkAiHZv58vn8d3cP41EV16OgexujEJCrLrXDaTBgPJoTHZ5oWCecDgNGgx/rlVaj2OLB5fR1anj6IfUf6hL+77Cbc8cV1qChNaNYHjvbj4NEBYdxGgw7vHh8U+r6/ow979nnxdmcfLruwFu8eH0AglEj28qn1dWh56iD6hgPwuCz4hxsvgtNmxpmBcezZ5xWuGYvFMB6cvpcTwSgCoSjsFgP+x9WN+PCsX4ipkI5v8/o6PPTc+wiGlRdOwXAMwXAUbpcF266/EIdPDmJ8alFXWW6FQa/HeDACi0mfci+0TideKF65vg7nhibgsJow6A9i0fwSvN3RDyBhxVta48J4MAK7xQiX3YwDR/sxFhBbjXgXj9yrV2IzJC1WeSrLrfjMJQvRfkrsFghNxnD8zDCMBh3Ck8rfixK7CUtqXPjJrz8QFkm3feECvH98QPT+snllIhhJupbNYhDNOy6HSTi30zuCQCgKh9UAu8WI0GQMpU4TQtx8s6lxPirLsi+o5HAoC32KElchGI7g9v/Ym9ZeaGL2YCtqm1mP67ecj5/9tiMn/niX3UjuAQ45k7zadhw+wpovcSg9/4d//2c4dLQX4ckonnzlGPqHg7Lmd2B6b7BUi2KwbUeLq10IhqOiilplTjOGpqKKpZYKXiu+86b16BkcF0VW33nTRViyoFTkL08V96HX6/Cfd1yOY6cSiwS+qIfbZcGXr1wuinhPBcu13tk9jIGRICpKbWmdD6hbpKQVslj0tstuwre2rsV4cBLhyWhSJH266HTA334usa+bvyeZtCN3/1PF+Ej5xudX4Zd/PCVYXr7y56vwfx9/W/ZYh9UAo0EvWFQqy6347le1pbNNBUWJZ8iZgXES1gUEm7gD4Rh2/iZ3efBJWIuRmxyH/NM7KXQ6oLIskZRDWvjj1mvWJEUas/M/ODUo7DtnMD8qa4MJYnaMnLBm5/HFPFi+b5vFIPKvSociJDHxh/Cn987i9fen81czPyWfHEcuWYeUWCyOdzp6UVFiEaK4+WI+P/3NYdXzpfQNT+DswAReetuLvuEAKsosaQWLpnIfDY2G4bQZBOsLi972T0wKPl93iRlXf/I8/PKPpzAwEoR9ykWVDvF4ouIWCyLjhfXqxeU4fHJY5Wzgs5vqYTObZPd2A6ljfHhcdhOW1ZZj2w3lwta5B59TXgSNS4KHb25eMfvFPz7qSDMREUQu4ZM8FANCcp44cM0nFyMQiqHUYRa0v57BCZwbGsenL16I37adShKWDz59MMmn73FZMBaYFG13Gh4NKqaplMKn/Lz0ggU40jWkeTzP/1EcvBSNxfFW+znM9ziEBYN/YlKTsHr2lWMYnAqAu+OL60TbmsbTFHSP7z4qEs58GUstaIn1UIpjYQyNhvHT33YIsd7pCGu2R5ulb/3De2exvK4MFWVWwbd/4sxIynbe/KAfn920UPN1ldDpEs/x+08eQCweTyt3OTC9S2EmIIGdAj4TEUEwmJmQN6mmG1C3cWUF3mjvF4RUIWyp08pTLx+HbywRySsEwGG6mpIcUmGt1ycE7tOS6OmnXj4masNi0if52XkStZd7cNHyKoQno1kFKO567UTShqRUwspq0gsBcb1DARw7PSxEcWfCXHpHMupKPI6tly3FykXlQjCglICGWJS+4QAe+W32ljR2P3kNfx7BTIsAABTkSURBVMgfQqnDJKrxoEQkGkcwHJ0RDZt82CpIM00RRL5x2vQYC+QncM5kALTKCR0SWaxyEP+WU5Tuj5bFzmzuJOCxmvVzOjhyppDLOlaouF0W/NNfXZx3H3bB6I8tLS1oa2vDjh07ZuyaNRUOVJTmN0yfIHjyJawB7cIaSGhOMyGsS2zpTXCTCh4ELWrHTAlra3JtDBEkrBMUi7AGEhq53PbEXFMQAru1tRVlZWVoamqCz+dDa2vrjFx3ZCws7C0kCCL3jKbpw1czfc8VgpllxCWIlBSED7utrQ1NTU0AgMbGRrS1taG5uVnx+PJyO4xGQ9bX/c0bp7JugyAIgih+li3yoLLCmddrFITA9nq9qKurAwC4XC54vV7V44eHJ1T/rpX55dlvgicIgiCKn7Z3z+DSCxZk3U7B+7Dr6uoEIe33+wXhnW8az/PAaS2IW0QQBEHMIsvryvJ+jYKQRk1NTYLAPnTokKo5PJdYzUZ8bFX2KyaCIAiiuBkazW2xKDkKQmA3NzfD5/Ohra0NAAR/9kxwxfraGbsWQRAEUZhI86vnA9qHnYJgOIIX3urCb/d25axNgiAIorjI1V7sgvdhzxbBcATbH30rpbBOVU4uHeyW7BtzWBMR8k6bHjZzdtHyl6+rhsWUwwESBEEUIUP+EM5M1XTPFwURJT5bnOzxa8rTq5SQwWbWIRBOz4AxEcre4MHK/OUiCUcsCoQmi84IQxAEkVOqPXahmly+IA1bBbX6q1q46uJFuelICrJUolVJp9g8kT8ctFuBIPKKUj14pc9FxwC49Zo1ec8nTrOACmZT+reHPdt5bhvq5in7InKJSt37WcGZZrpJJUpsRnzxU0sFE3+mlDpS5IosAAKhGNwuc1ZtWDJ4nwmi0MjUgacUzVXuNKPMqf7diwPo9PoyvLJ26BuswuJqF9yu9HKJ3/qFRtx500W4++YNWF5fhspyq+ZzS2xG/NVfrBCuOc9tg8uRnfCzmWfmETusiX7qdMi4ZCS/knVYjRgNRPDky8cFE386VJYlkt6UOc3Y1Dg/o/7wsQmzLfRjcWA8y1KcRoNOk7ZAZMZ51TOzQM8HuYzDmW1y7cAbGg3DNxZOedyi+fl//iSwVbCajbjrpvUoL1EX2q6pybzaY8fy+nIsWVAKq9kIq9mIm5tXqJ5rt0xrj6OBCOa5Hfinv7pYEPpfvWpVWn12OYxCf6s9dtz315fg6ksXaT6f/96WOc34SvP5yteyTwsxViw+0z0HdotBdG46xefl6PclSuX5xsJ44U31zHhyOKwGUWyCljJ7cuRyIsw2j/Z4MDojpRlne/K3mmenA0P+QMEuiPh3faYW+cXGQ8+9j2A4v/Xt6cmkoMxpwff/58dhmJqF9HqItOBqjx33fHUj7rzpInznK+uTfBiLq12Y555OcWo16QQBDwB2m1HUVk2FA1azURD6y+vLVLV8vQ64+tLFwu/+8Qj+6i9WCv0pc1rwqfX18JRqM6e6nNN9842FUVVuR9VUilbL/2/v3oLbOK87gP9xvxBYgoQohdGAHlmWOCZEVY5IZwwm9jRSLNJNXY3sBFQaN1RTupNpLT5UHD2EpJWqbsemJlOp7mRsKoXiuo7gKJy004pQMnGT2ISb0BdJFBgrscpWK5eWJZLA8gaCALYP1C4XIAAuCF6wq/N7EQkRH76DvZzvtrv61IOZm5aXxLI9vchi1MB8p7xie3KPXqddkV51MTzOcS0ISVqrBezW9R2NiOa50HOlhKfiRfWs6uWaiSWXvFplNRtlJr0yWz1rsUqcErYM3FQMiTtn3mQSKb1gISkKCTad2ajHsy31aHuyFuWMCdE5Hnrdwtd+OzyLb/zB/VkT/kIvfyHhSnfnJA9s3VyKSqcVwHzS31LJpNTHbNTja1+sTinXbl34HOmcc2RyDs5Ss1gWMP+geACYjct7sLzFqBXnfMoZE/78j3Zm/LuZGC/rUYNNn3WJiX2tRKbmlt2rXgnFcsqy3lk/sNQJ2m414Cu/vxVffuS+FfnehMZSqU356w+UaKmrVZbTEK2rrpD1d08tMSopF2M15BwhXGkVDvOqrxKny7pkqPoUg0qnFSOj04t6wXKYjXrYrEaMcfOXiI1xs3CWmjEaiYoJNtfqQofNhD/7Ug26f3ARwPwcTfr7O79eh49uT4l1S2c0pC7ceuyz98D/n9cAzM85S8trP/gARrmouPM5GRNGueyXt5WYdZiJJZFM8tBqga6WB1FqM4r1AYByuxFjE0vPAwGAzayDXq9DeDKGSqcVf9iwBTvudYrx5+vA57dgk9OCsz/7EOMy6yCXw2YANx1HUnIGK7XqEJle3ogBU2IENxWD02FCIsHnXV+rSbdotMJRokdkKr6suT3NnaaDw27EXJzHRJZRlcjUHH76zg2MT8xCp9MgkeDBWA3Y//C9eCVwNe/PFRrIWo0GG0pNuB2ZhdWsRXQ2uexRC6tJh0ce2Iy+/7q+6P+0mtQkpNMACRX0ltdCqc2AyGTm/aKcMWGMm8WmcgueeGQr3v/wNhJLfLFldpN4vt1UbkGS53FrPIoNDhPGufnOk0az9PQbNz2HnwzcwLFD9Rj44BPs3OrEC6+9j0SSh1YDPPx7lfj5xfyvghE+O/1Ye7S+atVXidOdzmSoqLCD/Wg8Z0JcSjQWx/HvvyMmfWlSlFPeSrz/22cGcHNsBpvKLTj61c/gO69fwo1PJpcsLzw5i7955R2xwQHM9wClO07bk7WITM1h51YnHLbFQ/iXPryNk+cui7/bzHpMSuapSyw6TM0s7PztB3fBaNCJ9ZHWX45SmxGRyRg2lVvwbEu9WEb/4Aj+5ae/k1WG1OOee/Dm4McYn5iFVjs/0mK36PHFehd6fzks/l2mhAkARr0WsXjm0QStVoNkkofdosOE5Dv4XO2n8Nbgx3nXVWCz6ABek/I9r5USsx5T0XheDbV82cw6TM2u3Ly83arHxHQcdose9fdX4I33lHNJo7AP6bQaJJI87BY9dm3bgDcv595/SkxaQKNZ1sJOwTf31+DHb/6PmGAB4ObYTMbzSnhyFpevjcJi0uG7Pw4tqn+l04rOr9cBQEqDX/g5Gkvg8rVRVLsc+M4PL+LW+NL37/7Wn+wWO1fC5+/c6gQA/NU/9ue1/6Sf94TvW6fToPubnoznvnzlutMZJWwZKirsK1JmNBYvOOmv5PttjAWXPvhYVnmZkl1piRGRqZh4kOUqIxqL41nfr8UDrKLMjC/s2iz28oHUUYNM5S2VcEssekzNxFHptOL5v/w8rg7fXhSbtOFTUWZGIsGLPYADD2+B/41rGONmUWYzABotxidmUem04muPbl92D1+rAY7+8Wfw/GvvIVnAujGHzYjwZAxmgwbRAm9mIySn9J+VQgPgG1+6H6f//TfLLuPxhir8W/9Cb1v4fjMx6TWYjRffqZKx6hBPaDA9G4eTMeFI8wOYjM6Jya39u8GcPdrHG6pQZjfj+4Hf5vwcrQbY4LDgk/EZlNkM4KERvyuhAyAdlVvqPCVtgJczJrRL6i333Pab/x3LekwKPfulzk3BKyN570PS89ThJ3biKhvO2lFZDkrYBVqphF1s8o2r0F5++gHWfnAXXv3Jb/MqT1qHXK151+ayrLFJGy5A6skl2/8BQOfpX+WcGkhXYtZh34NV+NzOT8NhM+Hm2DT+7tV3ZS/WS9f2ZC1cn3ZAm0zi+dfew82xGThsBvB8fqvYN5ZZcHDPtpQRD+EkZLfoceCRe/Gvbw0jnGWYs1iU2Y1I8kBExiU3mezZvRmXro3idjias9GSafjVYTPi4N5tOPeLa2Ij1GbVY7LAhk+Z3VjQtI20Nwkga49WLptZh0fv7MNmo048HoZHuJRjOf1z5ViJDoicc0GusrMlfel2qCgzQ6vR5F32clHCLhAl7AWFHGTpCT996EtuebkSrmA1tll4chbH/unXshNu+8FduP+e8pTXorE4rl4fxysXruZ1Yq4oM+Pbhx4UGyLp38HwCIfYXAJT0Tl87z8+WJRgSm0GfHXvdtgsBmypZAAgZ+NLqOf1m5P4+cX/w/jE/KhDEhpEJmMoLTEgkeSXfc29xaTDjGTqwGEzYGJ6Dok8RyBsFj10Ws2iBkv6nHQupSWGJRs8wtoCKeF7GxmdXxlcbjej43u/QiIhb44VmJ9qeevKx2JvUCgvNpfAaz/7ndgYyDVPLNBqgBN/0ZCxpyc99hirHlyOhsXjDVXYUlkKo0GXdX1NpmN5tedvM5FzLljq/UJPXxjeFrbDTJxHODItHi+FNC7yQQm7QJSwV06hrWq5Vis26Xy+w2YAJEODTIkeOq1OHEbPdRKLxuIYHuEwORMTh+EdJXokeA0mpudQbjfCu2cbjHptyolTTlzhyVm8e/UWmBLDoven10HOtsh2Urx6fRwnzw3m8e3NK7Ob0PbkTpz60WWMcbMoZ0x46tHqlB5/PuxWA7xf2IreXw7PL+hkTHjmiZ34h95BjEYWz3GaDNqc17TbLDo079kmlicMfZ7wX1xUXrYebbXLgb8/dynnmgth3lPac02fvhke4QAAlc6ShVEVuxHhLI29XD1dYTs6GbNYVroNDhP++k/lPXEqGotjOs7DqtesS7JeKdLvRdpoXa/zPiXsAlHCVp7VjC1T7xbAslvi+fQSimmbSXsncnuUdut8TzrT1Qh/++p7uPHJJDaVWzAXT6YsckyX/nnOUjO+9dTuRaMEwyMczgQ+EHuqwtzmUtoP7sKWSmbRdMnwCIdXLlwVh0flNMpicwmc+8V/46NbU2CshpQRmnyGkqWJpfsH72NkdDqlkZdPT1daN0GuHnU2xbQ/rjRK2GuEErY8ao0LUG9sxRaXkERsZoPYA5WbvNOTlXQRZDSWwHP//C5GI1FxVX46YSV6tvKkdZT2VIVkl77iVyrTdEZ6zPk0yoTYpMm2kKHkQoeCV0qx7Y8riRL2GqGELY9a4wLUG1sxx5UpeUuvo01fvJOerNJjk/YohXnds298KL5fOkydb+8yUz15fv4mQdJLAVeKNLa1mhZaC8W8PxaKEvYaoYQtj1rjAtQbm1LiyrXaPluykhNberJbyUsdc9WtUErZbvlSa1xAcSZsZTfvCCFFKf1OgNl+LrTcfO44KKe8QsoiZLXRvcQJIYQQBaCETQghhCiAKuewCSGEELWhHjYhhBCiAJSwCSGEEAWghE0IIYQoACVsQgghRAEoYRNCCCEKQAmbEEIIUQBK2IQQQogC6I4dO3ZsvStRrLq7u6HRaBAIBLB79+71ro5soVAIR48exf79+8XXMsUi97Vic/jwYXR1dSESiaChoQGAeuLr7u7G6dOncevWrbzjKPbYWJZFZ2cnmpqaAKgnrp6eHjz33HPw+/1oamqCyWRSTWx+vx83btzAxo0bVROX3+9HV1cXAoEAXnzxRWzcuBH33XefImKjHnYWgUAADocDHo8H4XAYgUBgvaskm9vtxpUrV8TfM8Ui97ViEwwGcerUKQwMDOD1118Hy7KqiY9lWbS3t8Pn86Gvrw+AuradtE5qiiscDqO3txe9vb1gGEY1sXV1dWHHjh1obGxUVVwejwe9vb3w+XzYt28fPB6PYmKjhJ1FMBiEy+UCANTW1iIYDK5zjfJTWrrwEINMsch9rdh4PB7x5x07dsDlcqkmPqFuoVAIXq8XgHq2XSAQQGNjo/i7WuJiWRZDQ0Oorq4WT95qiC0YDIJlWbAsK9ZLDXEBC8cZAExMTIBhGMXERk/ryoJlWXHDMAwDlmXXuUbLly0Wua8VI47jUFNTA0Bd8bEsi5deeglDQ0Pwer2qiC0UCsHtdqc0ItUQFzBfN5/Ph1AohJaWFng8HlXE1t/fj5qaGng8HrS1tYHjOFXEJSXsl4By9kfqYWfhcrnEjcFxXEqrTGkyxSL3tWLl9/vR3t4OQF3xuVwunDp1CjU1NQiFQqqI7cSJE+jp6UFHRwfefvtt+P1+VcQl5Xa70dTUJJ74lR7bxMQEamtrwTAMvF6v2LNUelxS58+fF9dTKCU26mFnIbSUAWBwcDBlOE9pMsUitJiXeq0YBYNBcciY4zjVxQdAPDmoITafzwdgflt1dHTA6/UiEAgoPq5M3G63KraZ2+1OqVN6slJqXFIsy4JhGADKOUdSDzuLxsZGhMNhcY5COnda7EKhEFiWRSgUApA5FrmvFZtAIICuri60tbXhwIEDCAaDqolPiC0QCKChoQEMw6gmtnRqiaunpwddXV0pjUg1xCbEEggEMDg4iNbWVlXEJWBZFrW1teLvSomNHq9JCCGEKAD1sAkhhBAFoIRNCCGEKAAlbEIIIUQBKGETQgghCkAJmxBCCFEAStiE3AWqq6uX9b5iuw80IXczStiEkIw4joPf71/vahBC7qA7nRFyFwkGgwgEAgiHwxgaGsK+ffvQ3t4OjuPQ0tICjuPAMAzOnDmDjo4OXLlyBT09PWhtbcXhw4cxNDQk3q5SuGVlpvIAiDcUYRgGx48fh9vtRnd3Ny5cuACGYfD0008X7V2wCClKPCFE9bZv387zPM/39/fzdXV14uvCzy+//DL/wgsv8DzP82fPnuX7+vr4SCTCt7S08DzP89evX+c7Ozt5nuf5SCTC79mzJ2d5fX19/DPPPCP+TUtLi/ivUEZdXR0fiURWLWZC1IZ62ITcZR566CHxZ+HhBcJTmQCgubkZLpcLHMel/F1raysCgQCCwSAikUjO8oLBIB577DGxbI/Hg+7ubrAsi0OHDol/z7Ks+MQkQkhuNIdNyF3G4XAses3tdqO3txdVVVVoa2tbNHcdDAbFhH7kyJGUR2VmKi8cDqf8LjwwobW1FT6fDz6fDwMDA5SsCckDJWxCCLq7u9HX1wev14sjR46gv78/5f/7+/vR1NSExsZGsCyb0sPOpKGhAefPnwcw/zCatrY2NDQ0iKvOOY5DfX396gRDiErRkDghBM3NzTh06BB6enrERWIMw4BlWfT09KC5uVlcRFZTUwOXyyU+1zoTr9eLUCiEvXv3AgBOnjwJt9uN/v5+8bXjx4+vWXyEqAE9rYsQQghRABoSJ4QQQhSAEjYhhBCiAJSwCSGEEAWghE0IIYQoACVsQgghRAEoYRNCCCEKQAmbEEIIUQBK2IQQQogC/D/LuG44QEFD9QAAAABJRU5ErkJggg==\nplt.show()",
"_____no_output_____"
],
[
"plt.style.use('presentation')\nplt.figure(figsize=(7,5))\na = plt.subplot(211)\nplt.plot(df.Quality.values, \"*\", label=\"True\")\nplt.plot(model.values, \".\", label=\"Model\", color=\"C1\")\nplt.legend(fontsize=16)\nplt.ylabel(\"Quality\")\n\nplt.title(\"Multivariate Linear Regression\")\nplt.subplot(212, sharex=a)\nplt.plot(res/df.Quality.values, \".\")\n#plt.legend()\nplt.xlabel(\"Instance\")\nplt.ylabel(\"Residual\\nFraction\")\n\nplt.tight_layout()\nplt.savefig(\"linear_regression_test.pdf\")",
"_____no_output_____"
],
[
"# LINEAR REGRESION with columns parameters changed",
"_____no_output_____"
],
[
"df = pd.read_csv(\"random_forest/float_quality.csv\")\n\ndf.columns = df.columns.str.strip()\n#df.Resolution = df.Resolution.str.replace(\"k\",\"\").astype(float) * 1000\n#df.Band = df.Band.str.strip()\n#df = df[df.Band ==\"K\"]\nprint(df.head())\nlen(df)",
" Temp logg [Fe/H] Alpha Band Resolution vsini Sampling Quality \\\n0 4200 4.0 -1.0 0.0 0.90 60000 1.0 3.0 2899 \n1 4200 4.0 -1.0 0.0 0.90 60000 5.0 3.0 1946 \n2 4200 4.0 -1.0 0.0 0.90 60000 10.0 3.0 1141 \n3 4200 4.0 -1.0 0.0 1.25 60000 1.0 3.0 1543 \n4 4200 4.0 -1.0 0.0 1.25 60000 5.0 3.0 1090 \n\n Cond1 Cond2 Cond3 correctflag \n0 12.1 20.4 12.6 0 \n1 18.1 30.6 18.7 0 \n2 30.8 50.2 31.6 0 \n3 22.2 111.9 23.5 0 \n4 31.4 186.9 33.2 0 \n"
],
[
"data_table = df[[\"Temp\", \"logg\", \"[Fe/H]\", \"Resolution\", \"Band\", \"vsini\"]]\nexpected = df[\"Quality\"].astype(float)\ndata_table = data_table.astype(np.float)\n",
"_____no_output_____"
],
[
"\n# Limit broadening\nmodified_data_table = datatable\nmod_expected = expected\nmodified_data_table = modified_data_table[modified_data_table[\"Resolution\"]==100000]\nmodified_data_table = modified_data_table[modified_data_table[\"vsini\"]==1]\nmod_expected = mod_expected [modified_data_table[\"Resolution\"]==100000]\nmod_expected = mod_expected [modified_data_table[\"vsini\"]==1]\n\n[intercept_, coef_, model, res] = sk_linearReg(modified_data_table, mod_expected)\nprint(modified_data_table.columns)\nprint(coef_)\n\n\nplt.style.use('presentation')\nplt.figure(figsize=(7,5))\na = plt.subplot(211)\nplt.plot(mod_expected.values, \"*\", label=\"True\")\nplt.plot(model.values, \".\", label=\"Model\", color=\"C1\")\nplt.legend(fontsize=16)\nplt.ylabel(\"Quality\")\n\nplt.title(\"Multivariate Linear Regression\")\nplt.subplot(212, sharex=a)\nplt.plot(res/mod_expected, \".\")\n#plt.legend()\nplt.xlabel(\"Instance\")\nplt.ylabel(\"Residual\\nFraction\")\n\nplt.tight_layout()\n#plt.savefig(\"linear_regression_test.pdf\")",
"_____no_output_____"
],
[
"# broadening to power 1.5\nmodified_data_table = datatable\nmod_expected = expected\nmodified_data_table[\"Resolution\"] = modified_data_table[\"Resolution\"] ** 1.5\nmodified_data_table = modified_data_table[modified_data_table[\"vsini\"]==1]\n#mod_expected = mod_expected [modified_data_table[\"Resolution\"]==100000]\nmod_expected = mod_expected [modified_data_table[\"vsini\"]==1]\n\n\n[intercept_, coef_, model, res] = sk_linearReg(modified_data_table, mod_expected)\nprint(modified_data_table.columns)\nprint(coef_)",
"_____no_output_____"
],
[
"\n\nplt.style.use('presentation')\nplt.figure(figsize=(7,5))\na = plt.subplot(211)\nplt.plot(mod_expected.values, \"*\", label=\"True\")\nplt.plot(model.values, \".\", label=\"Model\", color=\"C1\")\nplt.legend(fontsize=16)\nplt.ylabel(\"Quality\")\n\nplt.title(\"Multivariate Linear Regression\")\nplt.subplot(212, sharex=a)\nplt.plot(res/mod_expected, \".\")\n#plt.legend()\nplt.xlabel(\"Instance\")\nplt.ylabel(\"Residual\\nFraction\")\n\nplt.tight_layout()\n#plt.savefig(\"linear_regression_test.pdf\")",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09b1de263b0da27ad4b237eace44f8437867db1 | 9,920 | ipynb | Jupyter Notebook | chapter_4/Surname-MLP/SurName-Dataset.ipynb | ManuLasker/nlp-with-pytorch | de32643e86930862fdecf99bf706482a50be7f99 | [
"MIT"
] | null | null | null | chapter_4/Surname-MLP/SurName-Dataset.ipynb | ManuLasker/nlp-with-pytorch | de32643e86930862fdecf99bf706482a50be7f99 | [
"MIT"
] | null | null | null | chapter_4/Surname-MLP/SurName-Dataset.ipynb | ManuLasker/nlp-with-pytorch | de32643e86930862fdecf99bf706482a50be7f99 | [
"MIT"
] | null | null | null | 22.142857 | 100 | 0.434778 | [
[
[
"# Data Preprocessing Notebook",
"_____no_output_____"
]
],
[
[
"import pandas as pd\n# import modin.pandas as pd\nimport numpy as np\nimport os\nimport re\nimport warnings\n\nwarnings.filterwarnings(\"ignore\")\n# os.environ[\"MODIN_ENGINE\"] = \"ray\" # Modin will use Ray\n# os.environ[\"MODIN_ENGINE\"] = \"dask\" # Modin will use Dask\n\nfrom sklearn.model_selection import train_test_split\nfrom argparse import Namespace\nfrom tqdm.notebook import tqdm as tqdm_notebook\n\ntqdm_notebook.pandas(desc=\"Preprocessin Data\")",
"_____no_output_____"
]
],
[
[
"# Define Args",
"_____no_output_____"
]
],
[
[
"args = Namespace(\n raw_dataset_csv=\"data/surnames/surnames.csv\",\n train_proportion=0.7,\n val_proportion=0.15,\n test_proportion=0.15,\n output_munged_csv=\"data/surnames/surnames_with_splits.csv\",\n seed=1337\n)",
"_____no_output_____"
]
],
[
[
"# Load Data suing modin ray backend",
"_____no_output_____"
]
],
[
[
"surnames = pd.read_csv(args.raw_dataset_csv, header=0)",
"_____no_output_____"
],
[
"surnames.head()",
"_____no_output_____"
]
],
[
[
"# Data Insights",
"_____no_output_____"
]
],
[
[
"surnames.describe()",
"_____no_output_____"
],
[
"set(surnames.nationality.unique())",
"_____no_output_____"
]
],
[
[
"# Split data Train/Test/Val",
"_____no_output_____"
]
],
[
[
"train_surnames, val_surnames = train_test_split(surnames, train_size=args.train_proportion,\n stratify=surnames.nationality.values)",
"_____no_output_____"
],
[
"val_surnames, test_surnames = train_test_split(val_surnames, train_size=0.5, \n stratify=val_surnames.nationality.values)",
"_____no_output_____"
],
[
"len(train_surnames.nationality.value_counts())",
"_____no_output_____"
],
[
"len(val_surnames.nationality.value_counts())",
"_____no_output_____"
],
[
"len(test_surnames.nationality.value_counts())",
"_____no_output_____"
],
[
"train_surnames.reset_index(drop=True, inplace=True)\nval_surnames.reset_index(drop=True, inplace=True)\ntest_surnames.reset_index(drop=True, inplace=True)",
"_____no_output_____"
],
[
"train_surnames[\"split\"] = \"train\"\nval_surnames[\"split\"] = \"val\"\ntest_surnames[\"split\"] = \"test\"",
"_____no_output_____"
],
[
"final_surnames = pd.concat([train_surnames, val_surnames, test_surnames], axis=0, copy=True)",
"_____no_output_____"
],
[
"final_surnames.split.value_counts()",
"_____no_output_____"
]
],
[
[
"# Save Data",
"_____no_output_____"
]
],
[
[
"final_surnames.to_csv(args.output_munged_csv, index=False)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09b1deca1b3f9b815b204c979399d55894d9cdb | 131 | ipynb | Jupyter Notebook | 01-Lesson-Plans/04-Pandas/1/Activities/05-Ins_Beta/Unsolved/beta.ipynb | tatianegercina/FinTech | b40687aa362d78674e223eb15ecf14bc59f90b62 | [
"ADSL"
] | 1 | 2021-04-13T07:14:34.000Z | 2021-04-13T07:14:34.000Z | 01-Lesson-Plans/04-Pandas/1/Activities/05-Ins_Beta/Unsolved/beta.ipynb | tatianegercina/FinTech | b40687aa362d78674e223eb15ecf14bc59f90b62 | [
"ADSL"
] | 2 | 2021-06-02T03:14:19.000Z | 2022-02-11T23:21:24.000Z | 01-Lesson-Plans/04-Pandas/1/Activities/05-Ins_Beta/Unsolved/beta.ipynb | tatianegercina/FinTech | b40687aa362d78674e223eb15ecf14bc59f90b62 | [
"ADSL"
] | 1 | 2021-05-07T13:26:50.000Z | 2021-05-07T13:26:50.000Z | 32.75 | 75 | 0.885496 | [
[
[
"empty"
]
]
] | [
"empty"
] | [
[
"empty"
]
] |
d09b1f7d36e74c7aaee4b4a54debf6661bde507c | 98,723 | ipynb | Jupyter Notebook | cs224w/time-lag-clstm.ipynb | kidrabit/Data-Visualization-Lab-RND | baa19ee4e9f3422a052794e50791495632290b36 | [
"Apache-2.0"
] | 1 | 2022-01-18T01:53:34.000Z | 2022-01-18T01:53:34.000Z | cs224w/time-lag-clstm.ipynb | kidrabit/Data-Visualization-Lab-RND | baa19ee4e9f3422a052794e50791495632290b36 | [
"Apache-2.0"
] | null | null | null | cs224w/time-lag-clstm.ipynb | kidrabit/Data-Visualization-Lab-RND | baa19ee4e9f3422a052794e50791495632290b36 | [
"Apache-2.0"
] | null | null | null | 829.605042 | 96,264 | 0.966624 | [
[
[
"import numpy as np",
"_____no_output_____"
],
[
"loss = np.load('loss_list.npy')\nGC_list = np.load('GC_list.npy')",
"_____no_output_____"
],
[
"import matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"loss",
"_____no_output_____"
],
[
"fig, axarr = plt.subplots(4,3, figsize=(50, 50))\nfor i in range(4):\n for j in range(3):\n axarr[i,j].imshow(GC_list[i+j], cmap='Blues', vmin=0, vmax=1, extent=(0, len(GC_list[i+j]), len(GC_list[i+j]), 0))\n axarr[i,j].set_ylabel('Affected series')\n axarr[i,j].set_xlabel('Causal series')\n #axarr[i,j].set_title(100 * np.mean(GC_list[i+j]),'%', min(loss_list[i+j]).cpu().numpy())",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09b2608e0cf3d31503b4f3eaf3e49b472311aa1 | 41,375 | ipynb | Jupyter Notebook | notebooks/v2_resnet.ipynb | dmitry-vorobiev/kaggle-deepfake-detection-challenge | d8b545e1944342ba25209f1f62d9ca70314ab73a | [
"Apache-2.0"
] | null | null | null | notebooks/v2_resnet.ipynb | dmitry-vorobiev/kaggle-deepfake-detection-challenge | d8b545e1944342ba25209f1f62d9ca70314ab73a | [
"Apache-2.0"
] | null | null | null | notebooks/v2_resnet.ipynb | dmitry-vorobiev/kaggle-deepfake-detection-challenge | d8b545e1944342ba25209f1f62d9ca70314ab73a | [
"Apache-2.0"
] | null | null | null | 39.069877 | 266 | 0.521934 | [
[
[
"from __future__ import print_function \nfrom __future__ import division\n\nFASTPART=False\nif FASTPART:\n num_frames = 4\nelse:\n num_frames = 16\n \nis_alchemy_used = True\nfrom datetime import datetime\nimport pandas as pd\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport numpy as np\nimport torchvision\nfrom torchvision import datasets, models, transforms\nimport matplotlib.pyplot as plt\nimport time\nimport os\nimport copy\nfrom skimage import io, transform\nimport torch\nfrom torch.utils import data\nfrom torch.utils.data import DataLoader, SubsetRandomSampler,Dataset\nfrom random import randint\nfrom tqdm import tqdm\nfrom PIL import Image\nfrom random import shuffle\nif is_alchemy_used:\n from catalyst.dl import SupervisedAlchemyRunner as SupervisedRunner\nelse:\n from catalyst.dl import SupervisedRunner\n\nimport random\nfrom scipy import ndimage\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom network.models import model_selection\nimport math\n\nimport cv2\nfrom albumentations import Compose, RandomCrop, Normalize, HorizontalFlip, Resize, RandomResizedCrop, CenterCrop,PadIfNeeded\nfrom albumentations.pytorch import ToTensor\nfrom alchemy import Logger\ntoken = \"d1dd16f08d518293bcbeddd313b49aa4\"\n\nprint(\"PyTorch Version: \",torch.__version__)\nprint(\"Torchvision Version: \",torchvision.__version__)\n\ndef seed_everything(seed=12345):\n random.seed(seed)\n os.environ['PYTHONHASHSEED'] = str(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.backends.cudnn.deterministic = True\n# seed_everything()",
"/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:541: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:542: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:543: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:544: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:545: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n/home/kb/df_env/lib/python3.7/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:550: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n"
],
[
"from typing import Callable, List, Tuple \n\nimport os\nimport torch\nimport catalyst\n\nfrom catalyst.dl import utils\n\nprint(f\"torch: {torch.__version__}, catalyst: {catalyst.__version__}\")\n\n# os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" # \"\" - CPU, \"0\" - 1 GPU, \"0,1\" - MultiGPU\n\nSEED = 42\nutils.set_global_seed(SEED)\nutils.prepare_cudnn(deterministic=True)",
"torch: 1.3.1, catalyst: 20.02.3\n"
],
[
"BASE_DIR = f'/home/{os.environ[\"USER\"]}/projects/dfdc'\nDATA_DIR = os.path.join(BASE_DIR, 'data/dfdc-videos')\nHDF5_DIR = f'/home/{os.environ[\"USER\"]}/projects/dfdc/data/dfdc-crops/hdf5'\nIMG_DIR = f'/home/{os.environ[\"USER\"]}/projects/dfdc/data/dfdc-crops/webp'\n\n# Models to choose from [resnet, alexnet, vgg, squeezenet, densenet, inception]\nmodel_name = \"resnet\"\n\n# Number of classes in the dataset\nnum_classes = 2\n\n# Batch size for training (change depending on how much memory you have)\nbatch_size = 24#24#32\n\n# Number of epochs to train for \nnum_epochs = 10\n\n# Flag for feature extracting. When False, we finetune the whole model, \n# when True we only update the reshaped layer params\nfeature_extract = False",
"_____no_output_____"
],
[
"def set_parameter_requires_grad(model, feature_extracting):\n if feature_extracting:\n for param in model.parameters():\n param.requires_grad = False\ndef initialize_model(model_name, num_classes, feature_extract, use_pretrained=True):\n # Initialize these variables which will be set in this if statement. Each of these\n # variables is model specific.\n model_ft = None\n input_size = 0\n\n if model_name == \"resnet\":\n \"\"\" Resnet18\n \"\"\"\n model_ft = models.resnet18(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n num_ftrs = model_ft.fc.in_features\n model_ft.fc = nn.Linear(num_ftrs, num_classes)\n input_size = 224\n\n elif model_name == \"alexnet\":\n \"\"\" Alexnet\n \"\"\"\n model_ft = models.alexnet(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n num_ftrs = model_ft.classifier[6].in_features\n model_ft.classifier[6] = nn.Linear(num_ftrs,num_classes)\n input_size = 224\n\n elif model_name == \"vgg\":\n \"\"\" VGG11_bn\n \"\"\"\n model_ft = models.vgg11_bn(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n num_ftrs = model_ft.classifier[6].in_features\n model_ft.classifier[6] = nn.Linear(num_ftrs,num_classes)\n input_size = 224\n\n elif model_name == \"squeezenet\":\n \"\"\" Squeezenet\n \"\"\"\n model_ft = models.squeezenet1_0(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n model_ft.classifier[1] = nn.Conv2d(512, num_classes, kernel_size=(1,1), stride=(1,1))\n model_ft.num_classes = num_classes\n input_size = 224\n\n elif model_name == \"densenet\":\n \"\"\" Densenet\n \"\"\"\n model_ft = models.densenet121(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n num_ftrs = model_ft.classifier.in_features\n model_ft.classifier = nn.Linear(num_ftrs, num_classes) \n input_size = 224\n\n elif model_name == \"inception\":\n \"\"\" Inception v3 \n Be careful, expects (299,299) sized images and has auxiliary output\n \"\"\"\n model_ft = models.inception_v3(pretrained=use_pretrained)\n set_parameter_requires_grad(model_ft, feature_extract)\n # Handle the auxilary net\n num_ftrs = model_ft.AuxLogits.fc.in_features\n model_ft.AuxLogits.fc = nn.Linear(num_ftrs, num_classes)\n # Handle the primary net\n num_ftrs = model_ft.fc.in_features\n model_ft.fc = nn.Linear(num_ftrs,num_classes)\n input_size = 299\n\n else:\n print(\"Invalid model name, exiting...\")\n exit()\n \n return model_ft, input_size\n\ndef my_initialize_model(file_checkpoint, model_name, feature_extract, emb_len):\n \n\n model, input_size = initialize_model(model_name, 2, feature_extract, use_pretrained=True)\n# model = model.to(device)\n if file_checkpoint != None:\n print(f'Loading checkpoint {file_checkpoint}')\n checkpoint = torch.load(file_checkpoint)#, map_location=device)\n model.load_state_dict(checkpoint['model_state_dict'])\n \n _ = model.eval()\n \n if file_checkpoint != None:\n del checkpoint\n\n# emb_len = 128\n if emb_len > 2:\n num_ftrs = model.fc.in_features\n model.fc = nn.Linear(num_ftrs, emb_len)\n return model, input_size",
"_____no_output_____"
],
[
"# model, input_size = initialize_model(model_name, num_classes, feature_extract, use_pretrained=True)",
"_____no_output_____"
],
[
"def create_filter(kernel_size = 7, sigma = 3, channels = 3):\n # Create a x, y coordinate grid of shape (kernel_size, kernel_size, 2)\n x_cord = torch.arange(kernel_size)\n x_grid = x_cord.repeat(kernel_size).view(kernel_size, kernel_size)\n y_grid = x_grid.t()\n xy_grid = torch.stack([x_grid, y_grid], dim=-1)\n\n mean = (kernel_size - 1)/2.\n variance = sigma**2.\n\n # Calculate the 2-dimensional gaussian kernel which is\n # the product of two gaussian distributions for two different\n # variables (in this case called x and y)\n gaussian_kernel = (1./(2.*math.pi*variance)) *\\\n torch.exp(\n -torch.sum((xy_grid - mean)**2., dim=-1) /\\\n (2*variance)\n )\n\n # Make sure sum of values in gaussian kernel equals 1.\n gaussian_kernel = gaussian_kernel / torch.sum(gaussian_kernel)\n\n # Reshape to 2d depthwise convolutional weight\n gaussian_kernel = gaussian_kernel.view(1, 1, kernel_size, kernel_size)\n gaussian_kernel = gaussian_kernel.repeat(channels, 1, 1, 1)\n\n gaussian_filter = nn.Conv2d(in_channels=channels, out_channels=channels,\n kernel_size=kernel_size, groups=channels, bias=False\n , padding=(int(kernel_size/2),int(kernel_size/2))\n )\n\n gaussian_filter.weight.data = gaussian_kernel\n gaussian_filter.weight.requires_grad = False\n return gaussian_filter\n",
"_____no_output_____"
],
[
"def k_to_ij(num_frames, k):\n num_in_row = int(np.sqrt(num_frames))\n q = 0\n for i in range(num_in_row):\n for j in range(num_in_row):\n if q == k:\n return (i, j)\n q += 1\ndef ij_to_k(num_frames, i_in, j_in):\n num_in_row = int(np.sqrt(num_frames))\n q = 0\n for i in range(num_in_row):\n for j in range(num_in_row):\n if i == i_in and j == j_in:\n return q\n q += 1\n ",
"_____no_output_____"
],
[
"\n\nclass NetLstm(nn.Module):\n def __init__(self, checkpoint_file, model_name, emb_len, hidden_dim):\n super(NetLstm, self).__init__()\n self.backbone, self.input_size = my_initialize_model(checkpoint_file, model_name, False, emb_len)\n self.lstm = nn.LSTM(emb_len, hidden_dim)\n self.hidden2tag = nn.Linear(hidden_dim, 2)\n# self.out2tag = nn.Linear(self.input_size, 2)\n self.filter = create_filter(kernel_size = 7, sigma = 3, channels = 3)\n\n def forward(self, sentences):\n self.lstm.flatten_parameters()\n \n tag_scores_list = torch.zeros((sentences.shape[0], 2), dtype=torch.float32 ).cuda()\n# print(tag_scores_list.shape)\n for i, sentence in enumerate(sentences): \n \n sentence = sentence.permute(0, 3, 1, 2)\n embeds = self.backbone(sentence - self.filter(sentence))\n# print(embeds.shape)\n lstm_out, _ = self.lstm(embeds.view(len(sentence), 1, -1))\n# print(lstm_out.shape)\n tag_space = self.hidden2tag(lstm_out.view(len(sentence), -1)) \n# print(tag_space.shape)\n tag_scores_list[i] = tag_space[-1,:]\n return tag_scores_list\n \nclass NetRes(nn.Module):\n def __init__(self, checkpoint_file, model_name):\n super(NetRes, self).__init__()\n self.backbone, self.input_size = my_initialize_model(checkpoint_file, model_name, False, 2)\n \n self.filter = create_filter(kernel_size = 7, sigma = 3, channels = 3)\n self.norm = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n \n\n def forward(self, sentences):\n \n tag_scores_list = torch.zeros((sentences.shape[0], 2), dtype=torch.float32 ).cuda()\n# print(tag_scores_list.shape)\n for i, sentence in enumerate(sentences): \n \n sentence = sentence.permute(0, 3, 1, 2)\n sentence = sentence - self.filter(sentence)\n for j in range(sentence.shape[0]):\n sentence[j] = self.norm(sentence[j])\n embeds = self.backbone(sentence)\n# print(embeds.shape)\n tag_scores_list[i] = embeds.mean(axis=0)\n return tag_scores_list\n\nclass NetResThr(nn.Module):\n def __init__(self, checkpoint_file, model_name, emb_len, num_frames=4 ):\n super(NetResThr, self).__init__()\n self.backbone, self.input_size = my_initialize_model(checkpoint_file, model_name, False, emb_len)\n self.emb_len = emb_len\n self.num_frames = num_frames\n self.img_in_row = int(np.sqrt(self.num_frames))\n self.sz_in_row = int(self.input_size/self.img_in_row)\n# self.filter = create_filter(kernel_size = 7, sigma = 3, channels = 3)\n self.norm = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n self.fc = nn.Linear(self.num_frames * emb_len, 2)\n\n def forward(self, sentences):\n \n tag_scores_list = torch.zeros((sentences.shape[0], 2), dtype=torch.float32 ).cuda()\n \n for k, sentence in enumerate(sentences): \n \n sentence = sentence.permute(0, 3, 1, 2)\n sentence_generated = torch.zeros(sentence.shape, dtype=torch.float32).cuda()\n\n for frame_out in range(num_frames):\n for frame_in in range(num_frames):\n for pt_out in range(num_frames): \n\n i_in, j_in = k_to_ij(self.num_frames, frame_out)\n i_out, j_out = k_to_ij(self.num_frames, pt_out)\n sentence_generated[frame_out,:,i_out*self.sz_in_row:(i_out+1)*self.sz_in_row, j_out*self.sz_in_row:(j_out+1)*self.sz_in_row] = \\\n sentence[frame_in, :, i_in*self.sz_in_row:(i_in+1)*self.sz_in_row, j_in*self.sz_in_row:(j_in+1)*self.sz_in_row] \n sentence_generated[frame_out] = self.norm(sentence_generated[frame_out] )\n embeds = self.backbone(sentence_generated)\n embeds = torch.flatten(embeds)\n embeds = self.fc( embeds )\n tag_scores_list[k] = embeds#.mean(axis=0)\n return tag_scores_list \n\n# model = NetLstm('/home/kb/Documents/best0.pth', 'resnet', 16, 16)\n# model = NetLstm(None, 'resnet', 4, 4)\n# model = NetRes(None, 'resnet')\nemb_len = 32\nmodel = NetResThr(None, 'resnet', emb_len, num_frames)\ninput_size = model.input_size\n\n\n",
"_____no_output_____"
],
[
"import math\n# import os\nimport gc\nimport sys\nimport time\n\nfrom pathlib import Path\n\nfrom functools import partial\nfrom typing import Callable, Dict, Iterator, List, Optional, Tuple, Union\n\n# from tqdm.notebook import tqdm\n\nimport cv2\nimport h5py\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nimport torch\nimport torchvision\nfrom torch import Tensor",
"_____no_output_____"
],
[
"sys.path.insert(0, os.path.join(BASE_DIR, 'src'))\nfrom dataset.utils import read_labels\nfrom prepare_data import get_file_list",
"_____no_output_____"
],
[
"def show_images(images, cols = 1, titles = None):\n \"\"\"Display a list of images in a single figure with matplotlib.\n \n Parameters\n ---------\n images: List of np.arrays compatible with plt.imshow.\n \n cols (Default = 1): Number of columns in figure (number of rows is \n set to np.ceil(n_images/float(cols))).\n \n titles: List of titles corresponding to each image. Must have\n the same length as titles.\n \"\"\"\n assert((titles is None)or (len(images) == len(titles)))\n n_images = len(images)\n if titles is None: titles = ['Image (%d)' % i for i in range(1,n_images + 1)]\n fig = plt.figure()\n for n, (image, title) in enumerate(zip(images, titles)):\n a = fig.add_subplot(cols, np.ceil(n_images/float(cols)), n + 1)\n if image.ndim == 2:\n plt.gray()\n plt.imshow(image)\n a.set_title(title)\n fig.set_size_inches(np.array(fig.get_size_inches()) * n_images)\n plt.show()",
"_____no_output_____"
],
[
"def check_len_hdf5(path):\n lens = dict()\n for name in os.listdir(path):\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n with h5py.File(full_path, 'r+') as f:\n lens[name] = len(f)\n return lens\n\n\ndef check_len_images(path):\n lens = dict()\n for name in os.listdir(path):\n full_path = os.path.join(path, name)\n if os.path.isdir(full_path):\n lens[name] = len(os.listdir(full_path))\n return lens",
"_____no_output_____"
],
[
"def sparse_frames(n: int, total: int) -> np.ndarray:\n idxs = np.linspace(0, total, min(n, total), dtype=int, endpoint=False)\n rnd_shift = np.random.randint(0, (total - idxs[-1]))\n return idxs + rnd_shift\n\n\ndef rnd_slice_frames(n: int, total: int, stride=1.) -> np.ndarray:\n idxs = np.arange(0, total, stride)[:n].astype(np.uint16)\n rnd_shift = np.random.randint(0, (total - idxs[-1]))\n return idxs + rnd_shift\n\n\ndef create_mask(idxs: np.ndarray, total: int) -> np.ndarray:\n mask = np.zeros(total, dtype=np.bool)\n mask[idxs] = 1\n return mask\n\n\ndef pad(frames: np.ndarray, amount: int, where :str='start') -> np.ndarray:\n dims = np.zeros((frames.ndim, 2), dtype=np.int8)\n pad_dim = 1 if where == 'end' else 0\n dims[0, pad_dim] = amount\n return np.pad(frames, dims, 'constant')",
"_____no_output_____"
],
[
"class FrameSampler():\n def __init__(self, num_frames: int, real_fake_ratio: float, \n p_sparse: float):\n self.num_frames = num_frames\n self.real_fake_ratio = real_fake_ratio\n self.p_sparse = p_sparse\n \n def __call__(self, label: Tuple[int, bool]) -> Callable[[int], np.ndarray]:\n dice = np.random.rand()\n if dice < self.p_sparse:\n return partial(sparse_frames, self.num_frames)\n else:\n # Stored frames: fake - 30, real - 150, \n # the real_fake_ratio should be set to 150 / 30 = 5\n # stride for fake: 5 - (4 * 1) = 1\n # stride for real: 5 - (4 * 0) = 5\n n = self.real_fake_ratio\n stride = n - ((n-1) * int(label))\n return partial(rnd_slice_frames, self.num_frames, stride=stride)",
"_____no_output_____"
],
[
"# sampler = FrameSampler(num_frames=15, real_fake_ratio=100/30, p_sparse=1.)\n",
"_____no_output_____"
],
[
"class ImagesDataset(torch.utils.data.Dataset):\n def __init__(self, base_path: str, size: Tuple[int, int], \n sampler: FrameSampler, \n sub_dirs: Optional[List[str]]=None):\n self.base_path = base_path\n self.size = size\n self.sampler = sampler\n self.df = ImagesDataset._read_annotations(base_path, sub_dirs)\n \n @staticmethod\n def _read_annotations(base_path: str, \n sub_dirs: Optional[List[str]]) -> pd.DataFrame:\n if not os.path.isdir(base_path):\n raise RuntimeError('Unable to access %s' % base_path)\n parts = []\n load_all = sub_dirs is None\n if load_all:\n sub_dirs = os.listdir(base_path)\n for chunk_dir in sub_dirs:\n chunk_path = Path(base_path)/chunk_dir\n if not chunk_path.is_dir():\n if not load_all:\n print('Invalid dir: %s' % str(chunk_path))\n continue\n files = os.listdir(chunk_path)\n df = pd.DataFrame(files, columns=['video'])\n df['label'] = df['video'].str.endswith('_1')\n df['dir'] = chunk_dir\n parts.append(df)\n if len(parts) < 1:\n raise AttributeError('No images were found')\n return pd.concat(parts).reset_index()\n \n @staticmethod\n def read_image_folder(path: str, num_frames: int, size: int,\n sample_fn: Callable[[int], np.ndarray]) -> np.ndarray:\n img_size = (size, size)\n images = []\n files = sorted(os.listdir(path))\n total_frames = len(files)\n if total_frames > 0:\n idxs = sample_fn(total_frames)\n pick = create_mask(idxs, total_frames)\n for i, file in enumerate(files):\n if pick[i]:\n img_path = os.path.join(path, file)\n img = cv2.imread(img_path, cv2.IMREAD_COLOR)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n if img.shape[0] > input_size:\n img = img[int(img.shape[0]/2)-int(input_size/2):int(img.shape[0]/2)+int(input_size/2),:,:]\n if img.shape[1] > input_size:\n img = img[:, int(img.shape[1]/2)-int(input_size/2):int(img.shape[1]/2)+int(input_size/2),:]\n img = PadIfNeeded(min_height=input_size, min_width=input_size)(image=img)['image']\n \n# img = cv2.resize(img, img_size, \n# interpolation=cv2.INTER_NEAREST)\n images.append(img)\n return np.stack(images)\n else:\n return np.empty((0, size, size, 3), dtype=np.uint8)\n \n def __len__(self) :\n return len(self.df)\n \n def __getitem__(self, idx) -> Tuple[np.ndarray, int]:\n num_frames, size = self.size\n meta = self.df.iloc[idx]\n label = int(meta.label)\n path = os.path.join(self.base_path, meta.dir, meta.video)\n \n if os.path.isdir(path):\n sample_fn = self.sampler(meta.label)\n frames = ImagesDataset.read_image_folder(\n path, num_frames, size, sample_fn=sample_fn)\n else:\n print('Dir not found: {}'.format(path))\n frames = np.zeros((num_frames, size, size, 3), dtype=np.uint8)\n \n if len(frames) > 0:\n pad_amount = num_frames - len(frames)\n if pad_amount > 0:\n frames = pad(frames, pad_amount, 'start')\n else:\n print('Empty file {}'.format(path))\n frames = np.zeros((num_frames, size, size, 3), dtype=np.uint8)\n \n frames = np.array(frames, dtype=np.float32)\n tr = Compose([\n \n CenterCrop(170, 80),\n Resize(input_size, input_size, interpolation=3, p=1),\n# RandomResizedCrop(input_size, input_size, scale=(0.2, 0.3), \n# ratio=(0.8, 1.2), \n# interpolation=3, always_apply=True, p=1.0),\n \n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n# ToTensor()\n\n ])\n frames =np.asarray([tr(image=frame)['image'] for frame in frames ], dtype=np.float32)\n\n# print(frames.shape)\n# kernel = np.array([[-1, -1, -1],\n# [-1, 8, -1],\n# [-1, -1, -1]])\n# for i in range(frames.shape[0]):\n# for j in range(3):\n# frames[i,:,:,j] = ndimage.convolve(frames[i,:,:,j], kernel)\n \n \n return frames, label",
"_____no_output_____"
],
[
"\n\n\n",
"_____no_output_____"
],
[
"def shuffled_idxs(values: np.ndarray, val: int) -> List[int]:\n idxs = (values == val).nonzero()[0]\n idxs = np.random.permutation(idxs)\n return idxs\n\n\nclass BalancedSampler(torch.utils.data.RandomSampler):\n def __init__(self, data_source, replacement=False, num_samples=None):\n \n super().__init__(data_source, replacement, num_samples)\n if not hasattr(data_source, 'df'):\n raise ValueError(\"DataSource must have a 'df' property\")\n \n if not 'label' in data_source.df: \n raise ValueError(\"DataSource.df must have a 'label' column\")\n \n def __iter__(self):\n df = self.data_source.df\n all_labels = df['label'].values\n uniq_labels, label_freq = np.unique(all_labels, return_counts=True)\n rev_freq = (len(all_labels) / label_freq)\n \n idxs = []\n for freq, label in zip(rev_freq, uniq_labels):\n fraction, times = np.modf(freq)\n label_idxs = (all_labels == label).nonzero()[0]\n for _ in range(int(times)):\n label_idxs = np.random.permutation(label_idxs)\n idxs.append(label_idxs)\n if fraction > 0.05:\n label_idxs = np.random.permutation(label_idxs)\n chunk = int(len(label_idxs) * fraction)\n idxs.append(label_idxs[:chunk])\n idxs = np.concatenate(idxs)\n idxs = np.random.permutation(idxs)[:self.num_samples]\n return iter(idxs.tolist())",
"_____no_output_____"
],
[
"\n\ndef get_loader(num_frames=15, real_fake_ratio=1, p_sparse=0.5, input_size=input_size, img_dir=None, sub_dirs=None):\n \n sampler = FrameSampler(num_frames, real_fake_ratio=real_fake_ratio, p_sparse=p_sparse)\n ds = ImagesDataset(img_dir, size=(num_frames, input_size), sampler=sampler,\n sub_dirs =sub_dirs)\n print(len(ds))\n s = BalancedSampler(ds)\n batch_sampler = torch.utils.data.BatchSampler(\n BalancedSampler(ds), \n batch_size=batch_size, \n drop_last=True\n \n )\n dl = torch.utils.data.DataLoader(ds, batch_sampler=batch_sampler)\n return dl\n \nloaders = {}\nloaders['train'] = get_loader(num_frames=num_frames, real_fake_ratio=100/30, p_sparse=1.0, input_size=input_size, \n img_dir='/home/kb/projects/dfdc/data/dfdc-crops/webp',\n sub_dirs= ['dfdc_train_part_%d' % i for i in [1,5,10,15,20,25,30,35]]\n )\nloaders['valid'] = get_loader(num_frames=num_frames, real_fake_ratio=100/30, p_sparse=1.0, input_size=input_size, \n img_dir='/home/kb/projects/dfdc/data/dfdc-crops/webp',\n sub_dirs= ['dfdc_train_part_%d' % i for i in range(40,50)]\n )\n# loaders['test'] = get_loader(num_frames=num_frames, real_fake_ratio=100/30, p_sparse=1.0, input_size=input_size, \n# img_dir='/home/kb/projects/dfdc/data/dfdc-crops/webp')",
"19114\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_42\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_43\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_44\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_47\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_48\nInvalid dir: /home/kb/projects/dfdc/data/dfdc-crops/webp/dfdc_train_part_49\n8394\n"
],
[
"device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")",
"_____no_output_____"
],
[
"\n\nproject = 'dfdc_v2_resnet'\nnum_epochs = 25\n\ngroup = datetime.now().strftime(\"%m_%d_%Y__%H_%M_%S\")\n\nif FASTPART:\n group = f'fast_{group}'\n \nexpnum = 0\nexperiment = f\"exp{expnum}\"\nlogdir = f\"/home/kb/hdd/logs/deepfake/{project}/{group}/{experiment}\"\n\n\nmodel = model.to(device)\nparams_to_update = model.parameters()\nif feature_extract:\n params_to_update = []\n for name,param in model.named_parameters():\n if param.requires_grad == True:\n params_to_update.append(param)\nelse:\n for name,param in model.named_parameters():\n if param.requires_grad == True:\n pass\n\n\ncriterion = torch.nn.CrossEntropyLoss()\noptimizer = optim.AdamW(params=model.parameters(), lr=0.00001)\nscheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer)\n\n# model runner\nrunner = SupervisedRunner()\n\n\n\n\n\nprint(f'----------------Experiment: {experiment}')\nlogger = Logger(\n token=token,\n experiment=experiment,\n group=group,\n project=project,\n)\n\nlogger.close()\n\nrunner.train(\n model=model,\n criterion=criterion,\n optimizer=optimizer,\n scheduler=scheduler,\n loaders=loaders,\n logdir=logdir,\n num_epochs=num_epochs,\n verbose=True,\n monitoring_params={\n \"token\": token,\n \"project\": project,\n \"experiment\": experiment,\n \"group\": group,\n }\n)",
"----------------Experiment: exp0\n1/25 * Epoch (train): 100% 796/796 [51:56<00:00, 3.92s/it, loss=0.507]\n1/25 * Epoch (valid): 100% 349/349 [20:36<00:00, 3.54s/it, loss=0.684]\n[2020-03-09 12:27:47,754] \n1/25 * Epoch 1 (train): _base/lr=1.000e-05 | _base/momentum=0.9000 | _timers/_fps=6.7718 | _timers/batch_time=3.5455 | _timers/data_time=0.9787 | _timers/model_time=2.5668 | loss=0.6262\n1/25 * Epoch 1 (valid): _base/lr=1.000e-05 | _base/momentum=0.9000 | _timers/_fps=6.7853 | _timers/batch_time=3.5381 | _timers/data_time=1.0160 | _timers/model_time=2.5220 | loss=0.6964\n2/25 * Epoch (train): 100% 796/796 [52:19<00:00, 3.94s/it, loss=0.434]\n2/25 * Epoch (valid): 20% 69/349 [04:08<16:36, 3.56s/it, loss=0.669]"
],
[
"# num_frames = num_frames\n# img_in_row = int(np.sqrt(num_frames))\n# sz_in_row = int(input_size/img_in_row)",
"_____no_output_____"
],
[
"# for sentences, labels in loaders['train']:\n \n# for k, sentence in enumerate(sentences): \n# print(f'showing {k}-th video')\n# sentence = sentence.permute(0, 3, 1, 2)\n# sentence_generated = torch.zeros(sentence.shape, dtype=torch.float32).cuda()\n \n# for frame_out in range(num_frames):\n# for frame_in in range(num_frames):\n# for pt_out in range(num_frames): \n \n# i_in, j_in = k_to_ij(num_frames, frame_out)\n# i_out, j_out = k_to_ij(num_frames, pt_out)\n# sentence_generated[frame_out,:,i_out*sz_in_row:(i_out+1)*sz_in_row, j_out*sz_in_row:(j_out+1)*sz_in_row] = \\\n# sentence[frame_in, :, i_in*sz_in_row:(i_in+1)*sz_in_row, j_in*sz_in_row:(j_in+1)*sz_in_row]\n\n \n \n \n \n# for j in range(sentence_generated.shape[0]):\n# print(f'---showing {j}-th frame')\n \n# plt.figure()\n# img = sentence_generated[j,:,:].permute(1,2,0).cpu().numpy() \n# img -= img.min()\n# img /= img.max() / 255.\n# img = np.array(img, dtype=np.uint8)\n# print(f'max {img[j,:,:].max()}, min {img[j,:,:].min()}')\n# plt.imshow( img)\n# plt.show()\n \n \n# break\n# break",
"_____no_output_____"
],
[
"# sentence_generated.shape",
"_____no_output_____"
],
[
"# sentence_generated[0].max()",
"_____no_output_____"
],
[
"# sentence[0].min()",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09b34950af924723ed491a032ed039dd16b74cb | 2,279 | ipynb | Jupyter Notebook | Spectrogram/ipynb/spectrogram.ipynb | mdazharuddin1011999/audio_event_detection | 74d484e4981df4be07db123989d5759f524653da | [
"MIT"
] | null | null | null | Spectrogram/ipynb/spectrogram.ipynb | mdazharuddin1011999/audio_event_detection | 74d484e4981df4be07db123989d5759f524653da | [
"MIT"
] | null | null | null | Spectrogram/ipynb/spectrogram.ipynb | mdazharuddin1011999/audio_event_detection | 74d484e4981df4be07db123989d5759f524653da | [
"MIT"
] | null | null | null | 23.020202 | 111 | 0.549364 | [
[
[
"import pandas as pd\nimport librosa\nimport numpy as np\nfrom tqdm import tqdm",
"_____no_output_____"
],
[
"root_audio = '/media/azhar/DATA/iSmriti/internship/audio_event_detection/audio/'",
"_____no_output_____"
],
[
"data = pd.read_csv('main.csv')\ndata.head()",
"_____no_output_____"
],
[
"spectrogram=[]\nfor audio_name in tqdm(data.iloc[:,0]):\n audio = f'{root_audio}{audio_name}'\n y, sr = librosa.load(audio)\n S = librosa.feature.melspectrogram(y=y, sr=sr, n_mels=128, fmax=8000, n_fft=2024, hop_length=1012)\n p = librosa.power_to_db(S, ref=np.max).astype('float32')\n spectrogram.append(p)",
"_____no_output_____"
],
[
"import matplotlib.pyplot as plt\nfrom librosa.display import specshow\nplt.figure(figsize=(20, 4))\nspecshow(spectrogram[0], y_axis='mel', fmax=8000, x_axis='time')\nplt.colorbar(format='%+2.0f dB')\nplt.title('Babycry')\nplt.tight_layout()",
"_____no_output_____"
],
[
"df = pd.DataFrame(columns=['spectrogram','label'])\ndf = df.assign(spectrogram = spectrogram)\ndf = df.assign(label=data.iloc[:,1].tolist())\ndf.to_pickle('spectrogram.csv')",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09b3658297e7f2cc0584cc09ca1b1f8296cac49 | 18,172 | ipynb | Jupyter Notebook | tests/scikit-learn/03_getting_started_with_iris.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | 1 | 2019-05-10T09:16:23.000Z | 2019-05-10T09:16:23.000Z | tests/scikit-learn/03_getting_started_with_iris.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | null | null | null | tests/scikit-learn/03_getting_started_with_iris.ipynb | gopala-kr/ds-notebooks | bc35430ecdd851f2ceab8f2437eec4d77cb59423 | [
"MIT"
] | 1 | 2019-05-10T09:17:28.000Z | 2019-05-10T09:17:28.000Z | 27.785933 | 226 | 0.424554 | [
[
[
"# Getting started in scikit-learn with the famous iris dataset\n*From the video series: [Introduction to machine learning with scikit-learn](https://github.com/justmarkham/scikit-learn-videos)*",
"_____no_output_____"
]
],
[
[
"#environment setup with watermark\n%load_ext watermark\n%watermark -a 'Gopala KR' -u -d -v -p watermark,numpy,pandas,matplotlib,nltk,sklearn,tensorflow,theano,mxnet,chainer",
"WARNING (theano.tensor.blas): Using NumPy C-API based implementation for BLAS functions.\n"
]
],
[
[
"## Agenda\n\n- What is the famous iris dataset, and how does it relate to machine learning?\n- How do we load the iris dataset into scikit-learn?\n- How do we describe a dataset using machine learning terminology?\n- What are scikit-learn's four key requirements for working with data?",
"_____no_output_____"
],
[
"## Introducing the iris dataset",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"- 50 samples of 3 different species of iris (150 samples total)\n- Measurements: sepal length, sepal width, petal length, petal width",
"_____no_output_____"
]
],
[
[
"from IPython.display import IFrame\nIFrame('http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', width=300, height=200)",
"_____no_output_____"
]
],
[
[
"## Machine learning on the iris dataset\n\n- Framed as a **supervised learning** problem: Predict the species of an iris using the measurements\n- Famous dataset for machine learning because prediction is **easy**\n- Learn more about the iris dataset: [UCI Machine Learning Repository](http://archive.ics.uci.edu/ml/datasets/Iris)",
"_____no_output_____"
],
[
"## Loading the iris dataset into scikit-learn",
"_____no_output_____"
]
],
[
[
"# import load_iris function from datasets module\nfrom sklearn.datasets import load_iris",
"_____no_output_____"
],
[
"# save \"bunch\" object containing iris dataset and its attributes\niris = load_iris()\ntype(iris)",
"_____no_output_____"
],
[
"# print the iris data\nprint(iris.data)",
"[[ 5.1 3.5 1.4 0.2]\n [ 4.9 3. 1.4 0.2]\n [ 4.7 3.2 1.3 0.2]\n [ 4.6 3.1 1.5 0.2]\n [ 5. 3.6 1.4 0.2]\n [ 5.4 3.9 1.7 0.4]\n [ 4.6 3.4 1.4 0.3]\n [ 5. 3.4 1.5 0.2]\n [ 4.4 2.9 1.4 0.2]\n [ 4.9 3.1 1.5 0.1]\n [ 5.4 3.7 1.5 0.2]\n [ 4.8 3.4 1.6 0.2]\n [ 4.8 3. 1.4 0.1]\n [ 4.3 3. 1.1 0.1]\n [ 5.8 4. 1.2 0.2]\n [ 5.7 4.4 1.5 0.4]\n [ 5.4 3.9 1.3 0.4]\n [ 5.1 3.5 1.4 0.3]\n [ 5.7 3.8 1.7 0.3]\n [ 5.1 3.8 1.5 0.3]\n [ 5.4 3.4 1.7 0.2]\n [ 5.1 3.7 1.5 0.4]\n [ 4.6 3.6 1. 0.2]\n [ 5.1 3.3 1.7 0.5]\n [ 4.8 3.4 1.9 0.2]\n [ 5. 3. 1.6 0.2]\n [ 5. 3.4 1.6 0.4]\n [ 5.2 3.5 1.5 0.2]\n [ 5.2 3.4 1.4 0.2]\n [ 4.7 3.2 1.6 0.2]\n [ 4.8 3.1 1.6 0.2]\n [ 5.4 3.4 1.5 0.4]\n [ 5.2 4.1 1.5 0.1]\n [ 5.5 4.2 1.4 0.2]\n [ 4.9 3.1 1.5 0.1]\n [ 5. 3.2 1.2 0.2]\n [ 5.5 3.5 1.3 0.2]\n [ 4.9 3.1 1.5 0.1]\n [ 4.4 3. 1.3 0.2]\n [ 5.1 3.4 1.5 0.2]\n [ 5. 3.5 1.3 0.3]\n [ 4.5 2.3 1.3 0.3]\n [ 4.4 3.2 1.3 0.2]\n [ 5. 3.5 1.6 0.6]\n [ 5.1 3.8 1.9 0.4]\n [ 4.8 3. 1.4 0.3]\n [ 5.1 3.8 1.6 0.2]\n [ 4.6 3.2 1.4 0.2]\n [ 5.3 3.7 1.5 0.2]\n [ 5. 3.3 1.4 0.2]\n [ 7. 3.2 4.7 1.4]\n [ 6.4 3.2 4.5 1.5]\n [ 6.9 3.1 4.9 1.5]\n [ 5.5 2.3 4. 1.3]\n [ 6.5 2.8 4.6 1.5]\n [ 5.7 2.8 4.5 1.3]\n [ 6.3 3.3 4.7 1.6]\n [ 4.9 2.4 3.3 1. ]\n [ 6.6 2.9 4.6 1.3]\n [ 5.2 2.7 3.9 1.4]\n [ 5. 2. 3.5 1. ]\n [ 5.9 3. 4.2 1.5]\n [ 6. 2.2 4. 1. ]\n [ 6.1 2.9 4.7 1.4]\n [ 5.6 2.9 3.6 1.3]\n [ 6.7 3.1 4.4 1.4]\n [ 5.6 3. 4.5 1.5]\n [ 5.8 2.7 4.1 1. ]\n [ 6.2 2.2 4.5 1.5]\n [ 5.6 2.5 3.9 1.1]\n [ 5.9 3.2 4.8 1.8]\n [ 6.1 2.8 4. 1.3]\n [ 6.3 2.5 4.9 1.5]\n [ 6.1 2.8 4.7 1.2]\n [ 6.4 2.9 4.3 1.3]\n [ 6.6 3. 4.4 1.4]\n [ 6.8 2.8 4.8 1.4]\n [ 6.7 3. 5. 1.7]\n [ 6. 2.9 4.5 1.5]\n [ 5.7 2.6 3.5 1. ]\n [ 5.5 2.4 3.8 1.1]\n [ 5.5 2.4 3.7 1. ]\n [ 5.8 2.7 3.9 1.2]\n [ 6. 2.7 5.1 1.6]\n [ 5.4 3. 4.5 1.5]\n [ 6. 3.4 4.5 1.6]\n [ 6.7 3.1 4.7 1.5]\n [ 6.3 2.3 4.4 1.3]\n [ 5.6 3. 4.1 1.3]\n [ 5.5 2.5 4. 1.3]\n [ 5.5 2.6 4.4 1.2]\n [ 6.1 3. 4.6 1.4]\n [ 5.8 2.6 4. 1.2]\n [ 5. 2.3 3.3 1. ]\n [ 5.6 2.7 4.2 1.3]\n [ 5.7 3. 4.2 1.2]\n [ 5.7 2.9 4.2 1.3]\n [ 6.2 2.9 4.3 1.3]\n [ 5.1 2.5 3. 1.1]\n [ 5.7 2.8 4.1 1.3]\n [ 6.3 3.3 6. 2.5]\n [ 5.8 2.7 5.1 1.9]\n [ 7.1 3. 5.9 2.1]\n [ 6.3 2.9 5.6 1.8]\n [ 6.5 3. 5.8 2.2]\n [ 7.6 3. 6.6 2.1]\n [ 4.9 2.5 4.5 1.7]\n [ 7.3 2.9 6.3 1.8]\n [ 6.7 2.5 5.8 1.8]\n [ 7.2 3.6 6.1 2.5]\n [ 6.5 3.2 5.1 2. ]\n [ 6.4 2.7 5.3 1.9]\n [ 6.8 3. 5.5 2.1]\n [ 5.7 2.5 5. 2. ]\n [ 5.8 2.8 5.1 2.4]\n [ 6.4 3.2 5.3 2.3]\n [ 6.5 3. 5.5 1.8]\n [ 7.7 3.8 6.7 2.2]\n [ 7.7 2.6 6.9 2.3]\n [ 6. 2.2 5. 1.5]\n [ 6.9 3.2 5.7 2.3]\n [ 5.6 2.8 4.9 2. ]\n [ 7.7 2.8 6.7 2. ]\n [ 6.3 2.7 4.9 1.8]\n [ 6.7 3.3 5.7 2.1]\n [ 7.2 3.2 6. 1.8]\n [ 6.2 2.8 4.8 1.8]\n [ 6.1 3. 4.9 1.8]\n [ 6.4 2.8 5.6 2.1]\n [ 7.2 3. 5.8 1.6]\n [ 7.4 2.8 6.1 1.9]\n [ 7.9 3.8 6.4 2. ]\n [ 6.4 2.8 5.6 2.2]\n [ 6.3 2.8 5.1 1.5]\n [ 6.1 2.6 5.6 1.4]\n [ 7.7 3. 6.1 2.3]\n [ 6.3 3.4 5.6 2.4]\n [ 6.4 3.1 5.5 1.8]\n [ 6. 3. 4.8 1.8]\n [ 6.9 3.1 5.4 2.1]\n [ 6.7 3.1 5.6 2.4]\n [ 6.9 3.1 5.1 2.3]\n [ 5.8 2.7 5.1 1.9]\n [ 6.8 3.2 5.9 2.3]\n [ 6.7 3.3 5.7 2.5]\n [ 6.7 3. 5.2 2.3]\n [ 6.3 2.5 5. 1.9]\n [ 6.5 3. 5.2 2. ]\n [ 6.2 3.4 5.4 2.3]\n [ 5.9 3. 5.1 1.8]]\n"
]
],
[
[
"## Machine learning terminology\n\n- Each row is an **observation** (also known as: sample, example, instance, record)\n- Each column is a **feature** (also known as: predictor, attribute, independent variable, input, regressor, covariate)",
"_____no_output_____"
]
],
[
[
"# print the names of the four features\nprint(iris.feature_names)",
"['sepal length (cm)', 'sepal width (cm)', 'petal length (cm)', 'petal width (cm)']\n"
],
[
"# print integers representing the species of each observation\nprint(iris.target)",
"[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1\n 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2\n 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2\n 2 2]\n"
],
[
"# print the encoding scheme for species: 0 = setosa, 1 = versicolor, 2 = virginica\nprint(iris.target_names)",
"['setosa' 'versicolor' 'virginica']\n"
]
],
[
[
"- Each value we are predicting is the **response** (also known as: target, outcome, label, dependent variable)\n- **Classification** is supervised learning in which the response is categorical\n- **Regression** is supervised learning in which the response is ordered and continuous",
"_____no_output_____"
],
[
"## Requirements for working with data in scikit-learn\n\n1. Features and response are **separate objects**\n2. Features and response should be **numeric**\n3. Features and response should be **NumPy arrays**\n4. Features and response should have **specific shapes**",
"_____no_output_____"
]
],
[
[
"# check the types of the features and response\nprint(type(iris.data))\nprint(type(iris.target))",
"<class 'numpy.ndarray'>\n<class 'numpy.ndarray'>\n"
],
[
"# check the shape of the features (first dimension = number of observations, second dimensions = number of features)\nprint(iris.data.shape)",
"(150, 4)\n"
],
[
"# check the shape of the response (single dimension matching the number of observations)\nprint(iris.target.shape)",
"(150,)\n"
],
[
"# store feature matrix in \"X\"\nX = iris.data\n\n# store response vector in \"y\"\ny = iris.target",
"_____no_output_____"
]
],
[
[
"## Resources\n\n- scikit-learn documentation: [Dataset loading utilities](http://scikit-learn.org/stable/datasets/)\n- Jake VanderPlas: Fast Numerical Computing with NumPy ([slides](https://speakerdeck.com/jakevdp/losing-your-loops-fast-numerical-computing-with-numpy-pycon-2015), [video](https://www.youtube.com/watch?v=EEUXKG97YRw))\n- Scott Shell: [An Introduction to NumPy](http://www.engr.ucsb.edu/~shell/che210d/numpy.pdf) (PDF)",
"_____no_output_____"
],
[
"## Comments or Questions?\n\n- Email: <[email protected]>\n- Website: http://dataschool.io\n- Twitter: [@justmarkham](https://twitter.com/justmarkham)",
"_____no_output_____"
]
],
[
[
"from IPython.core.display import HTML\ndef css_styling():\n styles = open(\"styles/custom.css\", \"r\").read()\n return HTML(styles)\ncss_styling()",
"_____no_output_____"
],
[
"test complete; Gopal",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
]
] |
d09b3a5ca021b5fde2deee37c8b27dce4786cc73 | 500,920 | ipynb | Jupyter Notebook | examples/forecast/1_ForecastFeatures.ipynb | ankitakashyap05/Merlion | 7dc95fbf64002e22bfce89625bdb76b7a3cbfbfc | [
"BSD-3-Clause"
] | 1 | 2021-09-24T11:03:42.000Z | 2021-09-24T11:03:42.000Z | examples/forecast/1_ForecastFeatures.ipynb | ankitakashyap05/Merlion | 7dc95fbf64002e22bfce89625bdb76b7a3cbfbfc | [
"BSD-3-Clause"
] | null | null | null | examples/forecast/1_ForecastFeatures.ipynb | ankitakashyap05/Merlion | 7dc95fbf64002e22bfce89625bdb76b7a3cbfbfc | [
"BSD-3-Clause"
] | 1 | 2021-12-01T16:20:23.000Z | 2021-12-01T16:20:23.000Z | 577.096774 | 82,696 | 0.942648 | [
[
[
"# How to Use Forecasters in Merlion\n\nThis notebook will guide you through using all the key features of forecasters in Merlion. Specifically, we will explain\n\n1. Initializing a forecasting model (including ensembles and automatic model selectors)\n1. Training the model\n1. Producing a forecast with the model\n1. Visualizing the model's predictions\n1. Quantitatively evaluating the model\n1. Saving and loading a trained model\n1. Simulating the live deployment of a model using a `ForecastEvaluator`\n\nWe will be using a single example time series for this whole notebook. We load it now:",
"_____no_output_____"
]
],
[
[
"import matplotlib.pyplot as plt\nimport numpy as np\n\nfrom merlion.utils.time_series import TimeSeries\nfrom ts_datasets.forecast import M4\n\n# Load the time series\n# time_series is a time-indexed pandas.DataFrame\n# trainval is a time-indexed pandas.Series indicating whether each timestamp is for training or testing\ntime_series, metadata = M4(subset=\"Hourly\")[5]\ntrainval = metadata[\"trainval\"]\n\n# Is there any missing data?\ntimedeltas = np.diff(time_series.index)\nprint(f\"Has missing data: {any(timedeltas != timedeltas[0])}\")\n\n# Visualize the time series and draw a dotted line to indicate the train/test split\nfig = plt.figure(figsize=(10, 6))\nax = fig.add_subplot(111)\nax.plot(time_series)\nax.axvline(time_series[trainval].index[-1], ls=\"--\", lw=\"2\", c=\"k\")\nplt.show()\n\n# Split the time series into train/test splits, and convert it to Merlion format\ntrain_data = TimeSeries.from_pd(time_series[trainval])\ntest_data = TimeSeries.from_pd(time_series[~trainval])\nprint(f\"{len(train_data)} points in train split, \"\n f\"{len(test_data)} points in test split.\")",
"100%|██████████| 414/414 [00:00<00:00, 513.64it/s]\n"
]
],
[
[
"## Model Initialization\n\nIn this notebook, we will use three different forecasting models:\n1. ARIMA (a classic stochastic process model)\n2. Prophet (Facebook's popular time series forecasting model)\n3. MSES (the Multi-Scale Exponential Smoothing model, developed in-house)\n\nLet's start by initializing each of them.",
"_____no_output_____"
]
],
[
[
"# Import models & configs\nfrom merlion.models.forecast.arima import Arima, ArimaConfig\nfrom merlion.models.forecast.prophet import Prophet, ProphetConfig\nfrom merlion.models.forecast.smoother import MSES, MSESConfig\n\n# Import data pre-processing transforms\nfrom merlion.transform.base import Identity\nfrom merlion.transform.resample import TemporalResample\n\n# All models are initialized using the syntax ModelClass(config),\n# where config is a model-specific configuration object. This is where\n# you specify any algorithm-specific hyperparameters, as well as any\n# data pre-processing transforms.\n\n# ARIMA assumes that input data is sampled at a regular interval,\n# so we set its transform to resample at that interval. We must also specify\n# a maximum prediction horizon.\nconfig1 = ArimaConfig(max_forecast_steps=100, order=(20, 1, 5),\n transform=TemporalResample(granularity=\"1h\"))\nmodel1 = Arima(config1)\n\n\n# Prophet has no real assumptions on the input data (and doesn't require\n# a maximum prediction horizon), so we skip data pre-processing by using\n# the Identity transform.\nconfig2 = ProphetConfig(max_forecast_steps=None, transform=Identity())\nmodel2 = Prophet(config2)\n\n\n# MSES assumes that the input data is sampled at a regular interval,\n# and requires us to specify a maximum prediction horizon. We will\n# also specify its look-back hyperparameter to be 60 here\nconfig3 = MSESConfig(max_forecast_steps=100, max_backstep=60,\n transform=TemporalResample(granularity=\"1h\"))\nmodel3 = MSES(config3)",
"_____no_output_____"
]
],
[
[
"Now that we have initialized the individual models, we will also combine them in two different ensembles: `ensemble` simply takes the mean prediction of each individual model, and `selector` selects the best individual model based on its sMAPE (symmetric Mean Average Precision Error). The sMAPE is a metric used to evaluate the quality of a continuous forecast. For ground truth $y \\in \\mathbb{R}^T$ and prediction $\\hat{y} \\in \\mathbb{R}^T$, the sMAPE is computed as\n\n$$\n\\mathrm{sMAPE}(y, \\hat{y}) = \\frac{200}{T} \\sum_{t = 1}^{T} \\frac{\\lvert \\hat{y}_t - y_t \\rvert}{\\lvert\\hat{y}_t\\rvert + \\lvert y_t \\rvert}\n$$",
"_____no_output_____"
]
],
[
[
"from merlion.evaluate.forecast import ForecastMetric\nfrom merlion.models.ensemble.combine import Mean, ModelSelector\nfrom merlion.models.ensemble.forecast import ForecasterEnsemble, ForecasterEnsembleConfig\n\n# The ForecasterEnsemble is a forecaster, and we treat it as a first-class model.\n# Its config takes a combiner object, specifying how you want to combine the \n# predictions of individual models in the ensemble. There are two ways to specify\n# the actual models in the ensemble, which we cover below.\n\n# The first way to specify the models in the ensemble is to provide their individual\n# configs when initializing the ForecasterEnsembleConfig. Note that if using this\n# syntax, you must also provide the names of the model classes.\n#\n# The combiner here will simply take the mean prediction of the ensembles here\nensemble_config = ForecasterEnsembleConfig(\n combiner=Mean(),\n model_configs=[(type(model1).__name__, config1),\n (type(model2).__name__, config2),\n (type(model3).__name__, config3)])\nensemble = ForecasterEnsemble(config=ensemble_config)\n\n\n# Alternatively, you can skip giving the individual model configs to the\n# ForecasterEnsembleConfig, and instead directly specify the models when\n# initializing the ForecasterEnsemble itself.\n#\n# The combiner here uses the sMAPE to compare individual models, and\n# selects the model with the lowest sMAPE\nselector_config = ForecasterEnsembleConfig(\n combiner=ModelSelector(metric=ForecastMetric.sMAPE))\nselector = ForecasterEnsemble(\n config=selector_config, models=[model1, model2, model3])",
"_____no_output_____"
]
],
[
[
"## Model Training\n\nAll forecasting models (and ensembles) share the same API for training. The `train()` method returns the model's predictions and standard error of those predictions on the training data. Note that the standard error is just `None` if the model doesn't support uncertainty estimation (this is the case for MSES and ensembles).",
"_____no_output_____"
]
],
[
[
"print(f\"Training {type(model1).__name__}...\")\nforecast1, stderr1 = model1.train(train_data)\n\nprint(f\"\\nTraining {type(model2).__name__}...\")\nforecast2, stderr2 = model2.train(train_data)\n\nprint(f\"\\nTraining {type(model3).__name__}...\")\nforecast3, stderr3 = model3.train(train_data)\n\nprint(\"\\nTraining ensemble...\")\nforecast_e, stderr_e = ensemble.train(train_data)\n\nprint(\"\\nTraining model selector...\")\nforecast_s, stderr_s = selector.train(train_data)\n\nprint(\"Done!\")",
"Training Arima...\n"
]
],
[
[
"## Model Inference\n\nTo obtain a forecast from a trained model, we simply call `model.forecast()` with the Unix timestamps at which we the model to generate a forecast. In many cases, you may obtain these directly from a time series as shown below.",
"_____no_output_____"
]
],
[
[
"# Truncate the test data to ensure that we are within each model's maximum\n# forecast horizon.\nsub_test_data = test_data[:50]\n\n# Obtain the time stamps corresponding to the test data\ntime_stamps = sub_test_data.univariates[sub_test_data.names[0]].time_stamps\n\n# Get the forecast & standard error of each model. These are both\n# merlion.utils.TimeSeries objects. Note that the standard error is None for\n# models which don't support uncertainty estimation (like MSES and all\n# ensembles).\nforecast1, stderr1 = model1.forecast(time_stamps=time_stamps)\nforecast2, stderr2 = model2.forecast(time_stamps=time_stamps)\n\n# You may optionally specify a time series prefix as context. If one isn't\n# specified, the prefix is assumed to be the training data. Here, we just make\n# this dependence explicit. More generally, this feature is useful if you want\n# to use a pre-trained model to make predictions on data further in the future\n# from the last time it was trained.\nforecast3, stderr3 = model3.forecast(time_stamps=time_stamps, time_series_prev=train_data)\n\n# The same options are available for ensembles as well, though the stderr is None\nforecast_e, stderr_e = ensemble.forecast(time_stamps=time_stamps)\nforecast_s, stderr_s = selector.forecast(time_stamps=time_stamps, time_series_prev=train_data)",
"_____no_output_____"
]
],
[
[
"## Model Visualization and Quantitative Evaluation\n\nIt is fairly transparent to visualize a model's forecast and also quantitatively evaluate the forecast, using standard metrics like sMAPE. We show examples for all five models below.\n\nBelow, we quantitatively evaluate the models using the sMAPE metric. However, the `ForecastMetric` enum includes a number of other options as well. In general, you may use the syntax\n```\nForecastMetric.<metric_name>.value(ground_truth=ground_truth, predict=forecast)\n```\nwhere `<metric_name>` is the name of the evaluation metric (see the API docs for details and more options), `ground_truth` is the original time series, and `forecast` is the forecast returned by the model. We show concrete examples with `ForecastMetric.sMAPE` below.",
"_____no_output_____"
]
],
[
[
"from merlion.evaluate.forecast import ForecastMetric\n\n# We begin by computing the sMAPE of ARIMA's forecast (scale is 0 to 100)\nsmape1 = ForecastMetric.sMAPE.value(ground_truth=sub_test_data,\n predict=forecast1)\nprint(f\"{type(model1).__name__} sMAPE is {smape1:.3f}\")\n\n# Next, we can visualize the actual forecast, and understand why it\n# attains this particular sMAPE. Since ARIMA supports uncertainty\n# estimation, we plot its error bars too.\nfig, ax = model1.plot_forecast(time_series=sub_test_data,\n plot_forecast_uncertainty=True)\nplt.show()",
"Arima sMAPE is 3.768\n"
],
[
"# We begin by computing the sMAPE of Prophet's forecast (scale is 0 to 100)\nsmape2 = ForecastMetric.sMAPE.value(sub_test_data, forecast2)\nprint(f\"{type(model2).__name__} sMAPE is {smape2:.3f}\")\n\n# Next, we can visualize the actual forecast, and understand why it\n# attains this particular sMAPE. Since Prophet supports uncertainty\n# estimation, we plot its error bars too.\n# Note that we can specify time_series_prev here as well, though it\n# will not be visualized unless we also supply the keyword argument\n# plot_time_series_prev=True.\nfig, ax = model2.plot_forecast(time_series=sub_test_data,\n time_series_prev=train_data,\n plot_forecast_uncertainty=True)\nplt.show()",
"Prophet sMAPE is 3.087\n"
],
[
"# We begin by computing the sMAPE of MSES's forecast (scale is 0 to 100)\nsmape3 = ForecastMetric.sMAPE.value(sub_test_data, forecast3)\nprint(f\"{type(model3).__name__} sMAPE is {smape3:.3f}\")\n\n# Next, we visualize the actual forecast, and understand why it \n# attains this particular sMAPE.\nfig, ax = model3.plot_forecast(time_series=sub_test_data,\n plot_forecast_uncertainty=True)\nplt.show()",
"MSES sMAPE is 4.377\n"
],
[
"# Compute the sMAPE of the ensemble's forecast (scale is 0 to 100)\nsmape_e = ForecastMetric.sMAPE.value(sub_test_data, forecast_e)\nprint(f\"Ensemble sMAPE is {smape_e:.3f}\")\n\n# Visualize the forecast.\nfig, ax = ensemble.plot_forecast(time_series=sub_test_data,\n plot_forecast_uncertainty=True)\nplt.show()",
"Ensemble sMAPE is 2.497\n"
],
[
"# Compute the sMAPE of the selector's forecast (scale is 0 to 100)\nsmape_s = ForecastMetric.sMAPE.value(sub_test_data, forecast_s)\nprint(f\"Selector sMAPE is {smape_s:.3f}\")\n\n# Visualize the forecast.\nfig, ax = selector.plot_forecast(time_series=sub_test_data,\n plot_forecast_uncertainty=True)\nplt.show()",
"Selector sMAPE is 3.768\n"
]
],
[
[
"## Saving & Loading Models\n\nAll models have a `save()` method and `load()` class method. Models may also be loaded with the assistance of the `ModelFactory`, which works for arbitrary models. The `save()` method creates a new directory at the specified path, where it saves a `json` file representing the model's config, as well as a binary file for the model's state.\n\nWe will demonstrate these behaviors using our `Prophet` model (`model2`) for concreteness.",
"_____no_output_____"
]
],
[
[
"import json\nimport os\nimport pprint\nfrom merlion.models.factory import ModelFactory\n\n# Save the model\nos.makedirs(\"models\", exist_ok=True)\npath = os.path.join(\"models\", \"prophet\")\nmodel2.save(path)\n\n# Print the config saved\npp = pprint.PrettyPrinter()\nwith open(os.path.join(path, \"config.json\")) as f:\n print(f\"{type(model2).__name__} Config\")\n pp.pprint(json.load(f))\n\n# Load the model using Prophet.load()\nmodel2_loaded = Prophet.load(dirname=path)\n\n# Load the model using the ModelFactory\nmodel2_factory_loaded = ModelFactory.load(name=\"Prophet\", model_path=path)",
"Prophet Config\n{'add_seasonality': 'auto',\n 'daily_seasonality': 'auto',\n 'dim': 1,\n 'max_forecast_steps': None,\n 'model_path': '/Users/abhatnagar/Desktop/Merlion_public/examples/forecast/models/prophet/model.pkl',\n 'target_seq_index': 0,\n 'transform': {'name': 'Identity'},\n 'uncertainty_samples': 100,\n 'weekly_seasonality': 'auto',\n 'yearly_seasonality': 'auto'}\n"
]
],
[
[
"We can do the same exact thing with ensembles! Note that the ensemble saves each of its sub-models in a different sub-directory, which it tracks manually. Additionally, the combiner (which is saved in the `ForecasterEnsembleConfig`), keeps track of the sMAPE achieved by each model (the `metric_values` key).",
"_____no_output_____"
]
],
[
[
"# Save the selector\npath = os.path.join(\"models\", \"selector\")\nselector.save(path)\n\n# Print the config saved. Note that we've saved all individual models,\n# and their paths are specified under the model_paths key.\npp = pprint.PrettyPrinter()\nwith open(os.path.join(path, \"config.json\")) as f:\n print(f\"Selector Config\")\n pp.pprint(json.load(f))\n\n# Load the selector\nselector_loaded = ForecasterEnsemble.load(dirname=path)\n\n# Load the selector using the ModelFactory\nselector_factory_loaded = ModelFactory.load(name=\"ForecasterEnsemble\", model_path=path)",
"Selector Config\n{'combiner': {'abs_score': False,\n 'metric': 'ForecastMetric.sMAPE',\n 'metric_values': [5.479063255045728,\n 8.611665684950744,\n 17.72980301555831],\n 'n_models': 3,\n 'name': 'ModelSelector'},\n 'dim': 1,\n 'max_forecast_steps': None,\n 'model_paths': [['Arima',\n '/Users/abhatnagar/Desktop/Merlion_public/examples/forecast/models/selector/0'],\n ['Prophet',\n '/Users/abhatnagar/Desktop/Merlion_public/examples/forecast/models/selector/1'],\n ['MSES',\n '/Users/abhatnagar/Desktop/Merlion_public/examples/forecast/models/selector/2']],\n 'target_seq_index': 0,\n 'transform': {'name': 'Identity'}}\n"
]
],
[
[
"## Simulating Live Model Deployment\n\nA typical model deployment scenario is as follows:\n1. Train an initial model on some recent historical data\n1. At a regular interval `cadence`, obtain the model's forecast for a certain `horizon`\n1. At a regular interval `retrain_freq`, retrain the entire model on the most recent data\n1. Optionally, specify a maximum amount of data (`train_window`) that the model should use for training\n\nWe provide a `ForecastEvaluator` object which simulates the above deployment scenario, and also allows a user to evaluate the quality of the forecaster according to an evaluation metric of their choice. We illustrate two examples below, using ARIMA for the first example, and the model selector for the second.",
"_____no_output_____"
]
],
[
[
"from merlion.evaluate.forecast import ForecastEvaluator, ForecastEvaluatorConfig, ForecastMetric\n\ndef create_evaluator(model):\n # Re-initialize the model, so we can re-train it from scratch\n model.reset()\n\n # Create an evaluation pipeline for the model, where we\n # -- get the model's forecast every hour\n # -- have the model forecast for a horizon of 6 hours\n # -- re-train the model every 12 hours\n # -- when we re-train the model, retrain it on only the past 2 weeks of data\n evaluator = ForecastEvaluator(\n model=model, config=ForecastEvaluatorConfig(\n cadence=\"1h\", horizon=\"6h\", retrain_freq=\"12h\", train_window=\"14d\")\n )\n return evaluator",
"_____no_output_____"
]
],
[
[
"First, let's evaluate ARIMA.",
"_____no_output_____"
]
],
[
[
"# Obtain the results of running the evaluation pipeline for ARIMA.\n# These result objects are to be treated as a black box, and should be\n# passed directly to the evaluator's evaluate() method.\nmodel1_evaluator = create_evaluator(model1)\nmodel1_train_result, model1_test_result = model1_evaluator.get_predict(\n train_vals=train_data, test_vals=test_data)",
"ForecastEvaluator: 100%|██████████| 169200/169200 [00:12<00:00, 13577.69it/s]\n"
],
[
"# Evaluate ARIMA's sMAPE and RMSE\nsmape = model1_evaluator.evaluate(\n ground_truth=test_data,\n predict=model1_test_result,\n metric=ForecastMetric.sMAPE)\nrmse = model1_evaluator.evaluate(\n ground_truth=test_data,\n predict=model1_test_result,\n metric=ForecastMetric.RMSE)\nprint(f\"{type(model1).__name__} sMAPE: {smape:.3f}\")\nprint(f\"{type(model1).__name__} RMSE: {rmse:.3f}\")",
"Arima sMAPE: 2.015\nArima RMSE: 143.416\n"
]
],
[
[
"Next, we will evaluate the ensemble (taking the mean prediction of ARIMA, Prophet, and MSES every time the models are called).",
"_____no_output_____"
]
],
[
[
"# Obtain the results of running the evaluation pipeline for the ensemble.\n# These result objects are to be treated as a black box, and should be\n# passed directly to the evaluator's evaluate() method.\nensemble_evaluator = create_evaluator(ensemble)\nensemble_train_result, ensemble_test_result = ensemble_evaluator.get_predict(\n train_vals=train_data, test_vals=test_data)",
"INFO:merlion.models.ensemble.forecast:Training model 1/3...\nINFO:merlion.models.ensemble.forecast:Training model 2/3...\nINFO:merlion.models.forecast.prophet:Add seasonality 24\nINFO:fbprophet:Disabling yearly seasonality. Run prophet with yearly_seasonality=True to override this.\nINFO:merlion.models.ensemble.forecast:Training model 3/3...\nForecastEvaluator: 26%|██▌ | 43200/169200 [00:06<00:21, 5956.61it/s]INFO:merlion.models.ensemble.forecast:Training model 1/3...\nINFO:merlion.models.ensemble.forecast:Training model 2/3...\nINFO:merlion.models.forecast.prophet:Add seasonality 24\nINFO:fbprophet:Disabling yearly seasonality. Run prophet with yearly_seasonality=True to override this.\nINFO:fbprophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.\nINFO:merlion.models.ensemble.forecast:Training model 3/3...\nForecastEvaluator: 51%|█████ | 86400/169200 [00:20<00:14, 5592.43it/s]INFO:merlion.models.ensemble.forecast:Training model 1/3...\nINFO:merlion.models.ensemble.forecast:Training model 2/3...\nINFO:merlion.models.forecast.prophet:Add seasonality 24\nINFO:fbprophet:Disabling yearly seasonality. Run prophet with yearly_seasonality=True to override this.\nINFO:fbprophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.\nINFO:merlion.models.ensemble.forecast:Training model 3/3...\nForecastEvaluator: 77%|███████▋ | 129600/169200 [00:32<00:06, 6221.19it/s]INFO:merlion.models.ensemble.forecast:Training model 1/3...\nINFO:merlion.models.ensemble.forecast:Training model 2/3...\nINFO:merlion.models.forecast.prophet:Add seasonality 24\nINFO:fbprophet:Disabling yearly seasonality. Run prophet with yearly_seasonality=True to override this.\nINFO:fbprophet:Disabling weekly seasonality. Run prophet with weekly_seasonality=True to override this.\nINFO:merlion.models.ensemble.forecast:Training model 3/3...\nForecastEvaluator: 100%|██████████| 169200/169200 [00:44<00:00, 3763.04it/s]\n"
],
[
"# Evaluate the selector's sMAPE and RMSE\nsmape = ensemble_evaluator.evaluate(\n ground_truth=test_data,\n predict=ensemble_test_result,\n metric=ForecastMetric.sMAPE)\nrmse = ensemble_evaluator.evaluate(\n ground_truth=test_data,\n predict=ensemble_test_result,\n metric=ForecastMetric.RMSE)\nprint(f\"Ensemble sMAPE: {smape:.3f}\")\nprint(f\"Ensemble RMSE: {rmse:.3f}\")",
"Ensemble sMAPE: 2.893\nEnsemble RMSE: 210.927\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09b3b3cbbbacabcc759e6594426870b8ecb2dff | 28,982 | ipynb | Jupyter Notebook | Lecture-Notes/2018/Day2.ipynb | unmeshvrije/python-for-beginners | d8943130bfd2499a458d92d5f6db97170fd53810 | [
"Apache-2.0"
] | 7 | 2019-08-13T15:36:50.000Z | 2021-09-09T20:37:21.000Z | Lecture-Notes/2018/Day2.ipynb | unmeshvrije/python-for-beginners | d8943130bfd2499a458d92d5f6db97170fd53810 | [
"Apache-2.0"
] | 2 | 2019-07-04T08:30:38.000Z | 2019-07-16T13:44:45.000Z | Lecture-Notes/2018/Day2.ipynb | unmeshvrije/python-for-beginners | d8943130bfd2499a458d92d5f6db97170fd53810 | [
"Apache-2.0"
] | 4 | 2019-07-29T10:57:24.000Z | 2021-03-17T15:02:36.000Z | 18.86849 | 497 | 0.454662 | [
[
[
"# Strings",
"_____no_output_____"
]
],
[
[
"name = \"Robin\"",
"_____no_output_____"
]
],
[
[
"## Multi line strings",
"_____no_output_____"
]
],
[
[
"paragraph = \"I am thinking of writing something that spans\"\\\n\"multiple lines and Nobody is helping me with that. So here\"\\\n\"is me typing something random\"",
"_____no_output_____"
],
[
"print(paragraph)",
"I am thinking of writing something that spansmultiple lines and Nobody is helping me with that. So hereis me typing something random\n"
],
[
"# \\n represents Newline\n",
"_____no_output_____"
],
[
"paragraph = \"I am thinking of writing something that spans\\n\\\nmultiple lines and Nobody is helping me with that. So here\\n\\\nis me typing something random\"",
"_____no_output_____"
],
[
"print(paragraph)",
"I am thinking of writing something that spans\nmultiple lines and Nobody is helping me with that. So here\nis me typing something random\n"
]
],
[
[
"## String indices",
"_____no_output_____"
]
],
[
[
"sample_string = \"Sorry Madam\"",
"_____no_output_____"
],
[
"# Subscipt operator : []\nsample_string[1] # sample_string of 1",
"_____no_output_____"
],
[
"sample_string[2]",
"_____no_output_____"
],
[
"'''\n*******************************************\nExample of a multi-line comment:\nTo access the first character of the string\nyou need to use the index 0\n*******************************************\n'''\nsample_string[0]",
"_____no_output_____"
],
[
"'''\nTo access a part of string, use a colon notation in the\nsubscript operator []\n\n'''\nsample_string[0:5]",
"_____no_output_____"
],
[
"# give me the string madam from the sample_string\nsample_string[6:11]",
"_____no_output_____"
],
[
"# Slice the string from index 6 and go until the end\nsample_string[6:]",
"_____no_output_____"
],
[
"# give me string \"Sorry\" without writing 0 as index\nsample_string[:5]",
"_____no_output_____"
],
[
"print(sample_string)",
"Sorry Madam\n"
],
[
"# Negative index: -1 will access the last element\nprint(sample_string[-1])",
"m\n"
],
[
"# access first element with negative index\nprint (sample_string[-11])",
"S\n"
],
[
"# This index is invalid\nprint (sample_string[-12])",
"_____no_output_____"
],
[
"sample_string[11]",
"_____no_output_____"
],
[
"# Python tries to slice the string\n# by reading from left to right\n# Indices in the statement below are wrong\nsample_string[-4:-10]",
"_____no_output_____"
],
[
"sample_string[-10:-4]",
"_____no_output_____"
],
[
"sample_string[0:5]",
"_____no_output_____"
],
[
"'''\nSlice the string from index 0 to 4\nwith the jump of 2\n'''\n\nsample_string[0:5:2]",
"_____no_output_____"
],
[
"sample_string[-5:0] # This will not work\nsample_string[-5:] # will give you the desired result",
"_____no_output_____"
],
[
"sample_string2 = \"I love Python\"",
"_____no_output_____"
],
[
"# Slice this string and give me every third characater\n\n# Expected outout : \"Io tn\"",
"_____no_output_____"
],
[
"# Pythonic\nprint(sample_string2[0::3])\nprint(sample_string2[::3]) # most pythonic\nprint(sample_string2[0:14:3])\nprint(sample_string2[0:15:3])",
"Io tn\nIo tn\nIo tn\nIo tn\n"
],
[
"num1 = \"5\"\nnum2 = \"3\"\nprint(num1+ num2)",
"53\n"
],
[
"sample_string2",
"_____no_output_____"
],
[
"print(sample_string2[0]+sample_string2[7:14])",
"IPython\n"
],
[
"print(sample_string2[0]+ sample_string2[2]+sample_string2[7:14])",
"IlPython\n"
],
[
"print(sample_string, sample_string2)",
"Sorry Madam I love Python\n"
],
[
"print(sample_string + sample_string2)",
"Sorry MadamI love Python\n"
],
[
"print(sample_string + \"!! \"+ sample_string2)",
"Sorry Madam!! I love Python\n"
],
[
"# to convert a string into lower case characters\nsample_string.lower()",
"_____no_output_____"
],
[
"sample_string.upper()",
"_____no_output_____"
],
[
"sample_string.count()",
"_____no_output_____"
],
[
"type(sample_string)",
"_____no_output_____"
],
[
"help(str.count)",
"Help on method_descriptor:\n\ncount(...)\n S.count(sub[, start[, end]]) -> int\n \n Return the number of non-overlapping occurrences of substring sub in\n string S[start:end]. Optional arguments start and end are\n interpreted as in slice notation.\n\n"
],
[
"sample_string",
"_____no_output_____"
],
[
"sample_string.count('a')",
"_____no_output_____"
],
[
"fruit = \"banana\"",
"_____no_output_____"
],
[
"#it has overlapping word ana\nfruit.count('ana')",
"_____no_output_____"
],
[
"sample_string.count('r',0,3)",
"_____no_output_____"
],
[
"sample_string\n",
"_____no_output_____"
],
[
"# Find length of the string\n# i.e. number of characters in the string\nlen(sample_string)",
"_____no_output_____"
],
[
"help(len)",
"Help on built-in function len in module builtins:\n\nlen(obj, /)\n Return the number of items in a container.\n\n"
],
[
"name = \"Jeroen\"\nage = 27\ncountry = \"Netherlands\"\n",
"_____no_output_____"
],
[
"print(\"Hoi, I am {}. I am {} years old.I come from {}\".format(name,age, country) )",
"Hoi, I am Jeroen. I am 27 years old.I come from Netherlands\n"
],
[
"fruit",
"_____no_output_____"
],
[
"fruit2=\"guanabana\"",
"_____no_output_____"
],
[
"fruit == 'banana'",
"_____no_output_____"
],
[
"is_it_raining = False",
"_____no_output_____"
]
],
[
[
"### Conditional operators\n```\n== : Compare two expressions for equality\n!= : compare for inequality\n< : compare less than\n> : greater than\n<= : less than or equal to\n>= : greater than or equal to\n```\n\n",
"_____no_output_____"
]
],
[
[
"fruit == 'banana'",
"_____no_output_____"
],
[
"fruit != 'orange'",
"_____no_output_____"
],
[
"print(\"fruit =\", fruit)\nprint(\"fruit2 =\", fruit2)",
"fruit = banana\nfruit2 = guanabana\n"
],
[
"fruit[0:4] == fruit2[5:9]",
"_____no_output_____"
]
],
[
[
"### Conditional statements",
"_____no_output_____"
]
],
[
[
"it_is_raining = False\nit_is_sunny = not it_is_raining\n\nif it_is_sunny:\n print(\"I will go swimming in Sloterplas\")\nelse:\n print(\"I will work on Python (coding)\")\n ",
"I will go swimming in Sloterplas\n"
],
[
"it_is_raining = True\nit_is_sunny = not it_is_raining\n\nif it_is_sunny:\n print(\"I will go swimming in Sloterplas\")\n print(\"I will run\")\nelse:\n print(\"I will work on Python (coding)\")\n ",
"I will work on Python (coding)\n"
],
[
"# Accept a number from user (input)\n# If the number is even, print \"Hurray\"\n# Else print \"Meah\"",
"_____no_output_____"
],
[
"number = int(input(\"Enter a number : \"))\nif number%2 == 0:\n print (\"Hurray\")\nelse:\n print(\"Meah\")",
"Enter a number : -5\nMeah\n"
],
[
"x = 3 # Assignment\nprint(x)\nprint(x%2)",
"3\n1\n"
],
[
"time = float(input(\"Enter a number between 0 and 23\"))\nif time >= 0 and time <= 8:\n print(\"I am asleep\")\nelif time >8 and time <= 10:\n print(\"Morning rituals\")\nelif time > 10 and time <= 13:\n print(\"I am Pythoning\")\nelif time >13 and time <= 14:\n print(\"I am lunching\")\nelif time >14 and time < 17:\n print(\"I am researching\")\nelse:\n print(\"I am having fun\")",
"Enter a number between 0 and 2316\nI am researching\n"
]
],
[
[
"### Loops\n",
"_____no_output_____"
]
],
[
[
"# Not so smart way of printing Hello 5 times\nprint(\"Hello\")\nprint(\"Hello\")\nprint(\"Hello\")\nprint(\"Hello\")\nprint(\"Hello\")",
"Hello\nHello\nHello\nHello\nHello\n"
],
[
"# Smart way of printing Hello 5 times\nfor i in range(5):\n print(\"Hello\")",
"Hello\nHello\nHello\nHello\nHello\n"
],
[
"for i in range(5):\n print(i)",
"0\n1\n2\n3\n4\n"
],
[
"for i in range(1,6):\n print(i)",
"1\n2\n3\n4\n5\n"
],
[
"for u in range(1,6):\n print(u, \")\", \"Hello\")",
"1 ) Hello\n2 ) Hello\n3 ) Hello\n4 ) Hello\n5 ) Hello\n"
],
[
"sample_string\n\n",
"_____no_output_____"
],
[
"'''\n a way of accessing individual characters in string\n by index\n'''\nsome_number = 15\nfor i in range(len(sample_string)):\n print(\"[\",str(i),\"]:\", sample_string[i], some_number)",
"[ 0 ]: S 15\n[ 1 ]: o 15\n[ 2 ]: r 15\n[ 3 ]: r 15\n[ 4 ]: y 15\n[ 5 ]: 15\n[ 6 ]: M 15\n[ 7 ]: a 15\n[ 8 ]: d 15\n[ 9 ]: a 15\n[ 10 ]: m 15\n"
]
],
[
[
"```\n i = 0\n print(\"[\",str(i),\"]:\", sample_string[0], some_number)\n i = 1\n print(\"[\",str(i),\"]:\", sample_string[1], some_number)\n i = 2\n print(\"[\",str(i),\"]:\", sample_string[2], some_number)\n i = 3\n print(\"[\",str(i),\"]:\", sample_string[3], some_number)\n ...\n ...\n \n i = 10\n print(\"[\",str(i),\"]:\", sample_string[10], some_number)\n```",
"_____no_output_____"
]
],
[
[
"len(sample_string)",
"_____no_output_____"
]
],
[
[
"```\n n = input()\n\nn= 12\n\n12\n24\n36\n48\n60\n72\n84\n96\n108\n120\n\nn = 4\n\n4\n8\n12\n16\n20\n24\n\n.\n40\n\n```",
"_____no_output_____"
]
],
[
[
"n = int(input())\nfor i in range(1,11):\n print(i*n)",
"12\n12\n24\n36\n48\n60\n72\n84\n96\n108\n120\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09b407f0362b6a710e346fe44b06b4c23d77efe | 21,544 | ipynb | Jupyter Notebook | notebooks/ssu-search-Copy4.ipynb | smoe/SSUsearch | ecc39e9526cd30505afe558c5f563680f8adbfbd | [
"BSD-3-Clause"
] | 1 | 2016-05-18T02:36:31.000Z | 2016-05-18T02:36:31.000Z | notebooks/ssu-search-Copy4.ipynb | smoe/SSUsearch | ecc39e9526cd30505afe558c5f563680f8adbfbd | [
"BSD-3-Clause"
] | 4 | 2015-03-13T01:01:11.000Z | 2019-11-26T23:19:23.000Z | notebooks/ssu-search-Copy4.ipynb | smoe/SSUsearch | ecc39e9526cd30505afe558c5f563680f8adbfbd | [
"BSD-3-Clause"
] | 3 | 2015-01-22T11:37:15.000Z | 2021-08-30T11:17:34.000Z | 28.384717 | 269 | 0.561781 | [
[
[
"###Set up working directory",
"_____no_output_____"
]
],
[
[
"cd /usr/local/notebooks",
"/usr/local/notebooks\n"
],
[
"mkdir -p ./workdir",
"_____no_output_____"
],
[
"#check seqfile files to process in data directory (make sure you still remember the data directory)\n!ls ./data/test/data",
"1c.fa 1d.fa 2c.fa 2d.fa\r\n"
]
],
[
[
"#README\n\n## This part of pipeline search for the SSU rRNA gene fragments, classify them, and extract reads aligned specific region. It is also heavy lifting part of the whole pipeline (more cpu will help).\n\n## This part works with one seqfile a time. You just need to change the \"Seqfile\" and maybe other parameters in the two cells bellow.\n\n## To run commands, click \"Cell\" then \"Run All\". After it finishes, you will see \"\\*** pipeline runs successsfully :)\" at bottom of this pape.\n\n##If your computer has many processors, there are two ways to make use of the resource:\n\n1. Set \"Cpu\" higher number.\n\n2. make more copies of this notebook (click \"File\" then \"Make a copy\" in menu bar), so you can run the step on multiple files at the same time.\n\n(Again we assume the \"Seqfile\" is quality trimmed.)\n\n###Here we will process one file at a time; set the \"Seqfile\" variable to the seqfile name to be be processed\n###First part of seqfile basename (separated by \".\") will be the label of this sample, so named it properly.\ne.g. for \"/usr/local/notebooks/data/test/data/1c.fa\", \"1c\" will the label of this sample.",
"_____no_output_____"
]
],
[
[
"Seqfile='./data/test/data/2d.fa'",
"_____no_output_____"
]
],
[
[
"###Other parameters to set",
"_____no_output_____"
]
],
[
[
"Cpu='2' # number of maxixum threads for search and alignment\nHmm='./data/SSUsearch_db/Hmm.ssu.hmm' # hmm model for ssu\nGene='ssu'\nScript_dir='./SSUsearch/scripts'\nGene_model_org='./data/SSUsearch_db/Gene_model_org.16s_ecoli_J01695.fasta'\nAli_template='./data/SSUsearch_db/Ali_template.silva_ssu.fasta'\n\nStart='577' #pick regions for de novo clustering\nEnd='727'\nLen_cutoff='100' # min length for reads picked for the region\n\nGene_tax='./data/SSUsearch_db/Gene_tax.silva_taxa_family.tax' # silva 108 ref\nGene_db='./data/SSUsearch_db/Gene_db.silva_108_rep_set.fasta'\n\nGene_tax_cc='./data/SSUsearch_db/Gene_tax_cc.greengene_97_otus.tax' # greengene 2012.10 ref for copy correction\nGene_db_cc='./data/SSUsearch_db/Gene_db_cc.greengene_97_otus.fasta'",
"_____no_output_____"
],
[
"# first part of file basename will the label of this sample\nimport os\nFilename=os.path.basename(Seqfile)\nTag=Filename.split('.')[0]",
"_____no_output_____"
],
[
"import os\nHmm=os.path.abspath(Hmm)\nSeqfile=os.path.abspath(Seqfile)\nScript_dir=os.path.abspath(Script_dir)\nGene_model_org=os.path.abspath(Gene_model_org)\nAli_template=os.path.abspath(Ali_template)\nGene_tax=os.path.abspath(Gene_tax)\nGene_db=os.path.abspath(Gene_db)\nGene_tax_cc=os.path.abspath(Gene_tax_cc)\nGene_db_cc=os.path.abspath(Gene_db_cc)\n\nos.environ.update(\n {'Cpu':Cpu, \n 'Hmm':os.path.abspath(Hmm), \n 'Gene':Gene, \n 'Seqfile':os.path.abspath(Seqfile), \n 'Filename':Filename, \n 'Tag':Tag, \n 'Script_dir':os.path.abspath(Script_dir), \n 'Gene_model_org':os.path.abspath(Gene_model_org), \n 'Ali_template':os.path.abspath(Ali_template), \n 'Start':Start, \n 'End':End,\n 'Len_cutoff':Len_cutoff,\n 'Gene_tax':os.path.abspath(Gene_tax), \n 'Gene_db':os.path.abspath(Gene_db), \n 'Gene_tax_cc':os.path.abspath(Gene_tax_cc), \n 'Gene_db_cc':os.path.abspath(Gene_db_cc)})",
"_____no_output_____"
],
[
"!echo \"*** make sure: parameters are right\"\n!echo \"Seqfile: $Seqfile\\nCpu: $Cpu\\nFilename: $Filename\\nTag: $Tag\"",
"*** make sure: parameters are right\nSeqfile: /usr/local/notebooks/data/test/data/1c.fa\nCpu: 2\nFilename: 1c.fa\nTag: 1c\n"
],
[
"cd workdir",
"/usr/local/notebooks/workdir\n"
],
[
"mkdir -p $Tag.ssu.out",
"_____no_output_____"
],
[
"### start hmmsearch",
"_____no_output_____"
],
[
"!echo \"*** hmmsearch starting\"\n!time hmmsearch --incE 10 --incdomE 10 --cpu $Cpu \\\n --domtblout $Tag.ssu.out/$Tag.qc.$Gene.hmmdomtblout \\\n -o /dev/null -A $Tag.ssu.out/$Tag.qc.$Gene.sto \\\n $Hmm $Seqfile\n!echo \"*** hmmsearch finished\"",
"*** hmmsearch starting\n0.95user 0.04system 0:00.99elapsed 99%CPU (0avgtext+0avgdata 65712maxresident)k\n0inputs+1080outputs (0major+7774minor)pagefaults 0swaps\n*** hmmsearch finished\n"
],
[
"!python $Script_dir/get-seq-from-hmmout.py \\\n $Tag.ssu.out/$Tag.qc.$Gene.hmmdomtblout \\\n $Tag.ssu.out/$Tag.qc.$Gene.sto \\\n $Tag.ssu.out/$Tag.qc.$Gene",
"parsing hmmdotblout done..\r\n50 of 114 seqs are kept after hmm parser\r\n"
]
],
[
[
"### Pass hits to mothur aligner",
"_____no_output_____"
]
],
[
[
"!echo \"*** Starting mothur align\"\n!cat $Gene_model_org $Tag.ssu.out/$Tag.qc.$Gene > $Tag.ssu.out/$Tag.qc.$Gene.RFadded\n\n# mothur does not allow tab between its flags, thus no indents here\n!time mothur \"#align.seqs(candidate=$Tag.ssu.out/$Tag.qc.$Gene.RFadded, template=$Ali_template, threshold=0.5, flip=t, processors=$Cpu)\"\n\n!rm -f mothur.*.logfile",
"*** Starting mothur align\n\u001b[H\u001b[2J\n\n\n\n\n\nmothur v.1.34.4\nLast updated: 12/22/2014\n\nby\nPatrick D. Schloss\n\nDepartment of Microbiology & Immunology\nUniversity of Michigan\[email protected]\nhttp://www.mothur.org\n\nWhen using, please cite:\nSchloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.\n\nDistributed under the GNU General Public License\n\nType 'help()' for information on the commands that are available\n\nType 'quit()' to exit program\n\n\n\nmothur > align.seqs(candidate=1c.ssu.out/1c.qc.ssu.RFadded, template=/usr/local/notebooks/data/SSUsearch_db/Ali_template.silva_ssu.fasta, threshold=0.5, flip=t, processors=2)\n\nUsing 2 processors.\n\nReading in the /usr/local/notebooks/data/SSUsearch_db/Ali_template.silva_ssu.fasta template sequences...\tDONE.\nIt took 25 to read 18491 sequences.\nAligning sequences from 1c.ssu.out/1c.qc.ssu.RFadded ...\n23\n28\nIt took 1 secs to align 51 sequences.\n\n\nOutput File Names: \n1c.ssu.out/1c.qc.ssu.align\n1c.ssu.out/1c.qc.ssu.align.report\n\n[WARNING]: your sequence names contained ':'. I changed them to '_' to avoid problems in your downstream analysis.\n\nmothur > quit()\n26.96user 2.61system 0:29.14elapsed 101%CPU (0avgtext+0avgdata 4881984maxresident)k\n0inputs+7792outputs (0major+399013minor)pagefaults 0swaps\n"
]
],
[
[
"### Get aligned seqs that have > 50% matched to references",
"_____no_output_____"
]
],
[
[
"!python $Script_dir/mothur-align-report-parser-cutoff.py \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.report \\\n $Tag.ssu.out/$Tag.qc.$Gene.align \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter \\\n 0.5\n ",
"0 bad seqs out of 51 total are removed from alignment\r\n"
],
[
"!python $Script_dir/remove-gap.py $Tag.ssu.out/$Tag.qc.$Gene.align.filter $Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa",
"_____no_output_____"
]
],
[
[
"### Search is done here (the computational intensive part). Hooray!\n\n- \\$Tag.ssu.out/\\$Tag.qc.\\$Gene.align.filter: \n aligned SSU rRNA gene fragments\n \n \n\n- \\$Tag.ssu.out/\\$Tag.qc.\\$Gene.align.filter.fa: \n unaligned SSU rRNA gene fragments\n ",
"_____no_output_____"
],
[
"### Extract the reads mapped 150bp region in V4 (577-727 in *E.coli* SSU rRNA gene position) for unsupervised clustering",
"_____no_output_____"
]
],
[
[
"!python $Script_dir/region-cut.py $Tag.ssu.out/$Tag.qc.$Gene.align.filter $Start $End $Len_cutoff\n\n!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter.\"$Start\"to\"$End\".cut.lenscreen $Tag.ssu.out/$Tag.forclust",
"28 sequences are matched to 577-727 region\r\n"
]
],
[
[
"### Classify SSU rRNA gene seqs using SILVA",
"_____no_output_____"
]
],
[
[
"!rm -f $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy\n!mothur \"#classify.seqs(fasta=$Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa, template=$Gene_db, taxonomy=$Gene_tax, cutoff=50, processors=$Cpu)\"\n!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy",
"\u001b[H\u001b[2J\n\n\n\n\n\nmothur v.1.34.4\nLast updated: 12/22/2014\n\nby\nPatrick D. Schloss\n\nDepartment of Microbiology & Immunology\nUniversity of Michigan\[email protected]\nhttp://www.mothur.org\n\nWhen using, please cite:\nSchloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.\n\nDistributed under the GNU General Public License\n\nType 'help()' for information on the commands that are available\n\nType 'quit()' to exit program\n\n\n\nmothur > classify.seqs(fasta=1c.ssu.out/1c.qc.ssu.align.filter.fa, template=/usr/local/notebooks/data/SSUsearch_db/Gene_db.silva_108_rep_set.fasta, taxonomy=/usr/local/notebooks/data/SSUsearch_db/Gene_tax.silva_taxa_family.tax, cutoff=50, processors=2)\n\nUsing 2 processors.\nReading template taxonomy... DONE.\nReading template probabilities... DONE.\nIt took 20 seconds get probabilities. \nClassifying sequences from 1c.ssu.out/1c.qc.ssu.align.filter.fa ...\nProcessing sequence: 25\nProcessing sequence: 25\n\nIt took 0 secs to classify 50 sequences.\n\n\nIt took 1 secs to create the summary file for 50 sequences.\n\n\nOutput File Names: \n1c.ssu.out/1c.qc.ssu.align.filter.silva_taxa_family.wang.taxonomy\n1c.ssu.out/1c.qc.ssu.align.filter.silva_taxa_family.wang.tax.summary\n\n\nmothur > quit()\n"
],
[
"!python $Script_dir/count-taxon.py \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy.count\n!rm -f mothur.*.logfile",
"_____no_output_____"
]
],
[
[
"### Classify SSU rRNA gene seqs with Greengene for copy correction later",
"_____no_output_____"
]
],
[
[
"!rm -f $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy\n!mothur \"#classify.seqs(fasta=$Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa, template=$Gene_db_cc, taxonomy=$Gene_tax_cc, cutoff=50, processors=$Cpu)\"\n!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy",
"\u001b[H\u001b[2J\n\n\n\n\n\nmothur v.1.34.4\nLast updated: 12/22/2014\n\nby\nPatrick D. Schloss\n\nDepartment of Microbiology & Immunology\nUniversity of Michigan\[email protected]\nhttp://www.mothur.org\n\nWhen using, please cite:\nSchloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.\n\nDistributed under the GNU General Public License\n\nType 'help()' for information on the commands that are available\n\nType 'quit()' to exit program\n\n\n\nmothur > classify.seqs(fasta=1c.ssu.out/1c.qc.ssu.align.filter.fa, template=/usr/local/notebooks/data/SSUsearch_db/Gene_db_cc.greengene_97_otus.fasta, taxonomy=/usr/local/notebooks/data/SSUsearch_db/Gene_tax_cc.greengene_97_otus.tax, cutoff=50, processors=2)\n\nUsing 2 processors.\nReading template taxonomy... DONE.\nReading template probabilities... DONE.\nIt took 14 seconds get probabilities. \nClassifying sequences from 1c.ssu.out/1c.qc.ssu.align.filter.fa ...\nProcessing sequence: 25\nProcessing sequence: 25\n\nIt took 1 secs to classify 50 sequences.\n\n\nIt took 0 secs to create the summary file for 50 sequences.\n\n\nOutput File Names: \n1c.ssu.out/1c.qc.ssu.align.filter.greengene_97_otus.wang.taxonomy\n1c.ssu.out/1c.qc.ssu.align.filter.greengene_97_otus.wang.tax.summary\n\n\nmothur > quit()\n"
],
[
"!python $Script_dir/count-taxon.py \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy \\\n $Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy.count\n!rm -f mothur.*.logfile",
"_____no_output_____"
],
[
"# check the output directory\n!ls $Tag.ssu.out",
"1c.577to727\r\n1c.cut\r\n1c.forclust\r\n1c.qc.ssu\r\n1c.qc.ssu.align\r\n1c.qc.ssu.align.filter\r\n1c.qc.ssu.align.filter.577to727.cut\r\n1c.qc.ssu.align.filter.577to727.cut.lenscreen.fa\r\n1c.qc.ssu.align.filter.fa\r\n1c.qc.ssu.align.filter.greengene_97_otus.wang.tax.summary\r\n1c.qc.ssu.align.filter.silva_taxa_family.wang.tax.summary\r\n1c.qc.ssu.align.filter.wang.gg.taxonomy\r\n1c.qc.ssu.align.filter.wang.gg.taxonomy.count\r\n1c.qc.ssu.align.filter.wang.silva.taxonomy\r\n1c.qc.ssu.align.filter.wang.silva.taxonomy.count\r\n1c.qc.ssu.align.report\r\n1c.qc.ssu.hmmdomtblout\r\n1c.qc.ssu.hmmdomtblout.parsedToDictWithScore.pickle\r\n1c.qc.ssu.hmmtblout\r\n1c.qc.ssu.RFadded\r\n1c.qc.ssu.sto\r\n"
]
],
[
[
"### This part of pipeline (working with one sequence file) finishes here. Next we will combine samples for community analysis (see unsupervised analysis).\n\nFollowing are files useful for community analysis:\n\n* 1c.577to727: aligned fasta file of seqs mapped to target region for de novo clustering\n* 1c.qc.ssu.align.filter: aligned fasta file of all SSU rRNA gene fragments\n* 1c.qc.ssu.align.filter.wang.gg.taxonomy: Greengene taxonomy (for copy correction)\n* 1c.qc.ssu.align.filter.wang.silva.taxonomy: SILVA taxonomy",
"_____no_output_____"
]
],
[
[
"!echo \"*** pipeline runs successsfully :)\"",
"*** pipeline runs successsfully :)\r\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09b40964b61f24908bae90a2165b9f22518a5ab | 620,529 | ipynb | Jupyter Notebook | SL2_Regression_and_Classification.ipynb | tleonhardt/machine_learning | cd21d16a2ee003c182edf4bb94bef99b18ac0c40 | [
"Apache-2.0"
] | null | null | null | SL2_Regression_and_Classification.ipynb | tleonhardt/machine_learning | cd21d16a2ee003c182edf4bb94bef99b18ac0c40 | [
"Apache-2.0"
] | null | null | null | SL2_Regression_and_Classification.ipynb | tleonhardt/machine_learning | cd21d16a2ee003c182edf4bb94bef99b18ac0c40 | [
"Apache-2.0"
] | null | null | null | 302.107595 | 481,636 | 0.900988 | [
[
[
"# Classification and Regression\nThere are two major types of supervised machine learning problems, called *classification* and *regression*.\n\nIn classification, the goal is to predict a *class label*, which is a choice from a predefined list of possibilities. In *Intro_to_Decision_Trees.ipynb* we used the example of classifying irises into one of three possible species. Classification is sometimes separated into binary classification, which is the special case of distinguishing between exactly two classes, and multiclass classification, which is classification between more than two classes. You can think of binary classification as trying to answer a yes/no question. Classifying emails as either spam or not spam is an example of a binary classification problem. In this binary classification task, the yes/no question being asked would be “Is this email spam?”\n\nFor regression tasks, the goal is to predict a *continuous number*, or a floating-point number in programming terms (or real number in mathematical terms). Predicting a person’s annual income from their education, their age, and where they live is an example of a regression task. When predicting income, the predicted value is an amount, and can be any number in a given range. Another example of a regression task is predicting the yield of a corn farm given attributes such as previous yields, weather, and number of employees working on the farm. The yield again can be an arbitrary number.\n\n**An easy way to distinguish between classification and regression tasks is to ask whether there is some kind of continuity in the output. If there is continuity between possible outcomes, then the problem is a regression problem.** Think about predicting annual income. There is a clear continuity in the output. Whether a person makes $40,000 or $40,001 a year does not make a tangible difference, even though these are different amounts of money; if our algorithm predicts $39,999 or $40,001 when it should have predicted $40,000, we don’t mind that much.\n\nBy contrast, for the task of recognizing the language of a website (which is a classification problem), there is no matter of degree. A website is in one language, or it is in another. There is no continuity between languages, and there is no language that is between English and French.\n\n*Disclaimer*: Much of the code in this notebook was lifted from the excellent book [Introduction to Machine Learning with Python](http://shop.oreilly.com/product/0636920030515.do) by Andreas Muller and Sarah Guido.",
"_____no_output_____"
],
[
"# Generalization, Overfitting, and Underfitting\nIn supervised learning, we want to build a model on the training data and then be able to make accurate predictions on new, unseen data that has the same characteristics as the training set that we used. If a model is able to make accurate predictions on unseen data, we say it is able to *generalize* from the training set to the test set. We want to build a model that is able to generalize as accurately as possible.\n\nUsually we build a model in such a way that it can make accurate predictions on the training set. If the training and test sets have enough in common, we expect the model to also be accurate on the test set. However, there are some cases where this can go wrong. For example, if we allow ourselves to build very complex models, we can always be as accurate as we like on the training set.\n\nThe only measure of whether an algorithm will perform well on new data is the evaluation on the test set. However, intuitively we expect simple models to generalize better to new data. Therefore, we always want to find the simplest model. Building a model that is too complex for the amount of information we have, as our novice data scientist did, is called *overfitting*. Overfitting occurs when you fit a model too closely to the particularities of the training set and obtain a model that works well on the training set but is not able to generalize to new data. On the other hand, if your model is too simple, then you might not be able to capture all the aspects of and variability in the data, and your model will do badly even on the training set. Choosing too simple a model is called *underfitting*.\n\nThe more complex we allow our model to be, the better we will be able to predict on the training data. However, if our model becomes too complex, we start focusing too much on each individual data point in our training set, and the model will not generalize well to new data.\n\nThere is a sweet spot in between that will yield the best generalization performance. This is the model we want to find.",
"_____no_output_____"
],
[
"# Relation of Model Complexity to Dataset Size\nIt’s important to note that model complexity is intimately tied to the variation of inputs contained in your training dataset: the larger variety of data points your dataset contains, the more complex a model you can use without overfitting. Usually, collecting more data points will yield more variety, so larger datasets allow building more complex models. However, simply duplicating the same data points or collecting very similar data will not help.\n\nHaving more data and building appropriately more complex models can often work wonders for supervised learning tasks. In the real world, you often have the ability to decide how much data to collect, which might be more beneficial than tweaking and tuning your model. Never underestimate the power of more data.",
"_____no_output_____"
],
[
"# Linear Models\nLinear models are a class of models that are widely used in practice and have been studied extensively in the last few decades, with roots going back over a hundred years. Linear models make a prediction using a linear function of the input features.\n\n## Linear Models for Regression\n\nFor regression, the general prediction formula for a linear model looks as follows:\n\n ŷ = w[0] * x[0] + w[1] * x[1] + ... + w[p] * x[p] + b\nHere, x[0] to x[p] denotes the features (in this example, the number of features is p) of a single data point, w and b are parameters of the model that are learned, and ŷ is the prediction the model makes. For a dataset with a single feature, this is:\n\n ŷ = w[0] * x[0] + b\nwhich you might remember from high school mathematics as the equation for a line. Here, w[0] is the slope and b is the y-axis offset. For more features, w contains the slopes along each feature axis. Alternatively, you can think of the predicted response as being a weighted sum of the input features, with weights (which can be negative) given by the entries of w.\n\nLinear models for regression can be characterized as regression models for which the prediction is a line for a single feature, a plane when using two features, or a hyperplane in higher dimensions (that is, when using more features).\n\nFor datasets with many features, linear models can be very powerful. In particular, if you have more features than training data points, any target y can be perfectly modeled (on the training set) as a linear function.\n\nThere are many different linear models for regression. The difference between these models lies in how the model parameters w and b are learned from the training data, and how model complexity can be controlled.",
"_____no_output_____"
],
[
"# Linear Regression (aka Ordinary Least Squares)\nLinear regression, or *ordinary least squares* (OLS), is the simplest and most classic linear method for regression. Linear regression finds the parameters w and b that minimize the *mean squared error* between predictions and the true regression targets, y, on the training set. The mean squared error is the sum of the squared differences between the predictions and the true values. Linear regression has no parameters, which is a benefit, but it also has no way to control model complexity.\n\nThe scikit-learn documentation on [Linear Regression]http://scikit-learn.org/stable/modules/linear_model.html#ordinary-least-squares) has a decent basic example of its use.\n\n## Advantages of Linear Regression (general, not specific to OLS)\n* Simple to understand and to interpret, at least for a small number of features/dimensions\n * Easy to visualize for 2 or 3 features\n* Very fast to train and also fast to predict\n* Doesn't suffer from the *curse of dimensionality* that methods such as KNearsetNeighbors does\n * Actually linear methods tend to work better with lots of features than with a small number of features\n \n## Big Disadvantage specific to OLS, but not applicable to linear regresison in general\n* OLS has no way to control model complexity and can suffer from overfitting, particularly if there are a large number of features\n * Modified versions of Linear Regression such as *Ridge Regression* and *Lasso* can mitigate or fix this issue\n\n## Disadvantages of Linear Regression in general, not specific to OLS\n* In lower-dimensional spaces, other models might yield better generalization performance\n* Requires more data preparation than some other techniques\n * Feature normalization is required for best results (for any algorithm which includes regularization)\n * Non-ordinal categorical features need to be one-hot encoded\n * Ordinal features need to be numerically encoded",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n%matplotlib inline",
"_____no_output_____"
]
],
[
[
"### A First Application: Predicting Boston Housing Prices\nOne of the most famous datasets for regression in a supervised learning setting is the [Boston Housing data set](https://archive.ics.uci.edu/ml/datasets/Housing). It is a multivariate dataset introduced in a 1978 paper which records 13 attributes concerning housing values in the suburbs of Boston. NOTE: The data is very, very old and the house prices are ridiculously low by today's standards.\n\nscikit-learn has a number of small toy datasets included with it which makes it quick and easy to experiment with different machine learning algorithms on these datasets.\n\nThe [sklearn.datasets.load_boston()](http://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_boston.html#sklearn.datasets.load_boston) method can be used to load the this dataset.\n\n#### Meet the data\nThe *boston* object that is returned by **load_boston** is a **Bunch** object, which is very similar to a dictionary. It contains keys and values.\n\nFeature Information:\n1. CRIM: per capita crime rate by town \n2. ZN: proportion of residential land zoned for lots over 25,000 sq.ft. \n3. INDUS: proportion of non-retail business acres per town \n4. CHAS: Charles River dummy variable (= 1 if tract bounds river; 0 otherwise) \n5. NOX: nitric oxides concentration (parts per 10 million) \n6. RM: average number of rooms per dwelling \n7. AGE: proportion of owner-occupied units built prior to 1940 \n8. DIS: weighted distances to five Boston employment centres \n9. RAD: index of accessibility to radial highways \n10. TAX: full-value property-tax rate per $10,000 \n11. PTRATIO: pupil-teacher ratio by town \n12. B: 1000(Bk - 0.63)^2 where Bk is the proportion of blacks by town \n13. LSTAT: % lower status of the population \n\nTarget Information\n14. MEDV: Median value of owner-occupied homes in $1000's",
"_____no_output_____"
]
],
[
[
"from sklearn.datasets import load_boston\nboston = load_boston()",
"_____no_output_____"
],
[
"print(\"Keys of boston: {}\".format(boston.keys()))",
"Keys of boston: dict_keys(['DESCR', 'feature_names', 'data', 'target'])\n"
],
[
"# The value of the key DESCR is a short description of the dataset. Here we show the beinning of the description.\nprint(boston['DESCR'][:193] + \"\\n...\")",
"Boston House Prices dataset\n===========================\n\nNotes\n------\nData Set Characteristics: \n\n :Number of Instances: 506 \n\n :Number of Attributes: 13 numeric/categorical predictive\n \n...\n"
],
[
"# The value of feature_names is a list of strings, giving the abbreviated name of each feature\nprint(\"Feature names: {}\".format(boston['feature_names']))",
"Feature names: ['CRIM' 'ZN' 'INDUS' 'CHAS' 'NOX' 'RM' 'AGE' 'DIS' 'RAD' 'TAX' 'PTRATIO'\n 'B' 'LSTAT']\n"
],
[
"# The data itself is contained in the target and data fields.\n# data contains the numeric measurements of features in a NumPy array\nprint(\"Type of data: {}\".format(type(boston['data'])))",
"Type of data: <class 'numpy.ndarray'>\n"
],
[
"# The rows in the data array correspond to neighborhoods, while the columns represent the features\nprint(\"Shape of data: {}\".format(boston['data'].shape))",
"Shape of data: (506, 13)\n"
],
[
"# We see that the array contains measurements for 506 different neighborhoods. Here are values for the first 5.\nprint(\"First five columns of data:\\n{}\".format(boston['data'][:5]))",
"First five columns of data:\n[[ 6.32000000e-03 1.80000000e+01 2.31000000e+00 0.00000000e+00\n 5.38000000e-01 6.57500000e+00 6.52000000e+01 4.09000000e+00\n 1.00000000e+00 2.96000000e+02 1.53000000e+01 3.96900000e+02\n 4.98000000e+00]\n [ 2.73100000e-02 0.00000000e+00 7.07000000e+00 0.00000000e+00\n 4.69000000e-01 6.42100000e+00 7.89000000e+01 4.96710000e+00\n 2.00000000e+00 2.42000000e+02 1.78000000e+01 3.96900000e+02\n 9.14000000e+00]\n [ 2.72900000e-02 0.00000000e+00 7.07000000e+00 0.00000000e+00\n 4.69000000e-01 7.18500000e+00 6.11000000e+01 4.96710000e+00\n 2.00000000e+00 2.42000000e+02 1.78000000e+01 3.92830000e+02\n 4.03000000e+00]\n [ 3.23700000e-02 0.00000000e+00 2.18000000e+00 0.00000000e+00\n 4.58000000e-01 6.99800000e+00 4.58000000e+01 6.06220000e+00\n 3.00000000e+00 2.22000000e+02 1.87000000e+01 3.94630000e+02\n 2.94000000e+00]\n [ 6.90500000e-02 0.00000000e+00 2.18000000e+00 0.00000000e+00\n 4.58000000e-01 7.14700000e+00 5.42000000e+01 6.06220000e+00\n 3.00000000e+00 2.22000000e+02 1.87000000e+01 3.96900000e+02\n 5.33000000e+00]]\n"
],
[
"# The target array contains the Median value of owner-occupied homes in $1000's, also as a NumPy array\nprint(\"Type of target: {}\".format(type(boston['target'])))",
"Type of target: <class 'numpy.ndarray'>\n"
],
[
"# target is a one-dimensional array, with one entry per sample\nprint(\"Shape of target: {}\".format(boston['target'].shape))",
"Shape of target: (506,)\n"
],
[
"# The target values are positive floating point numbers which represent a median house value in thousands of dollars.\nprint(\"Target:\\n{}\".format(boston['target']))",
"Target:\n[ 24. 21.6 34.7 33.4 36.2 28.7 22.9 27.1 16.5 18.9 15. 18.9\n 21.7 20.4 18.2 19.9 23.1 17.5 20.2 18.2 13.6 19.6 15.2 14.5\n 15.6 13.9 16.6 14.8 18.4 21. 12.7 14.5 13.2 13.1 13.5 18.9\n 20. 21. 24.7 30.8 34.9 26.6 25.3 24.7 21.2 19.3 20. 16.6\n 14.4 19.4 19.7 20.5 25. 23.4 18.9 35.4 24.7 31.6 23.3 19.6\n 18.7 16. 22.2 25. 33. 23.5 19.4 22. 17.4 20.9 24.2 21.7\n 22.8 23.4 24.1 21.4 20. 20.8 21.2 20.3 28. 23.9 24.8 22.9\n 23.9 26.6 22.5 22.2 23.6 28.7 22.6 22. 22.9 25. 20.6 28.4\n 21.4 38.7 43.8 33.2 27.5 26.5 18.6 19.3 20.1 19.5 19.5 20.4\n 19.8 19.4 21.7 22.8 18.8 18.7 18.5 18.3 21.2 19.2 20.4 19.3\n 22. 20.3 20.5 17.3 18.8 21.4 15.7 16.2 18. 14.3 19.2 19.6\n 23. 18.4 15.6 18.1 17.4 17.1 13.3 17.8 14. 14.4 13.4 15.6\n 11.8 13.8 15.6 14.6 17.8 15.4 21.5 19.6 15.3 19.4 17. 15.6\n 13.1 41.3 24.3 23.3 27. 50. 50. 50. 22.7 25. 50. 23.8\n 23.8 22.3 17.4 19.1 23.1 23.6 22.6 29.4 23.2 24.6 29.9 37.2\n 39.8 36.2 37.9 32.5 26.4 29.6 50. 32. 29.8 34.9 37. 30.5\n 36.4 31.1 29.1 50. 33.3 30.3 34.6 34.9 32.9 24.1 42.3 48.5\n 50. 22.6 24.4 22.5 24.4 20. 21.7 19.3 22.4 28.1 23.7 25.\n 23.3 28.7 21.5 23. 26.7 21.7 27.5 30.1 44.8 50. 37.6 31.6\n 46.7 31.5 24.3 31.7 41.7 48.3 29. 24. 25.1 31.5 23.7 23.3\n 22. 20.1 22.2 23.7 17.6 18.5 24.3 20.5 24.5 26.2 24.4 24.8\n 29.6 42.8 21.9 20.9 44. 50. 36. 30.1 33.8 43.1 48.8 31.\n 36.5 22.8 30.7 50. 43.5 20.7 21.1 25.2 24.4 35.2 32.4 32.\n 33.2 33.1 29.1 35.1 45.4 35.4 46. 50. 32.2 22. 20.1 23.2\n 22.3 24.8 28.5 37.3 27.9 23.9 21.7 28.6 27.1 20.3 22.5 29.\n 24.8 22. 26.4 33.1 36.1 28.4 33.4 28.2 22.8 20.3 16.1 22.1\n 19.4 21.6 23.8 16.2 17.8 19.8 23.1 21. 23.8 23.1 20.4 18.5\n 25. 24.6 23. 22.2 19.3 22.6 19.8 17.1 19.4 22.2 20.7 21.1\n 19.5 18.5 20.6 19. 18.7 32.7 16.5 23.9 31.2 17.5 17.2 23.1\n 24.5 26.6 22.9 24.1 18.6 30.1 18.2 20.6 17.8 21.7 22.7 22.6\n 25. 19.9 20.8 16.8 21.9 27.5 21.9 23.1 50. 50. 50. 50.\n 50. 13.8 13.8 15. 13.9 13.3 13.1 10.2 10.4 10.9 11.3 12.3\n 8.8 7.2 10.5 7.4 10.2 11.5 15.1 23.2 9.7 13.8 12.7 13.1\n 12.5 8.5 5. 6.3 5.6 7.2 12.1 8.3 8.5 5. 11.9 27.9\n 17.2 27.5 15. 17.2 17.9 16.3 7. 7.2 7.5 10.4 8.8 8.4\n 16.7 14.2 20.8 13.4 11.7 8.3 10.2 10.9 11. 9.5 14.5 14.1\n 16.1 14.3 11.7 13.4 9.6 8.7 8.4 12.8 10.5 17.1 18.4 15.4\n 10.8 11.8 14.9 12.6 14.1 13. 13.4 15.2 16.1 17.8 14.9 14.1\n 12.7 13.5 14.9 20. 16.4 17.7 19.5 20.2 21.4 19.9 19. 19.1\n 19.1 20.1 19.9 19.6 23.2 29.8 13.8 13.3 16.7 12. 14.6 21.4\n 23. 23.7 25. 21.8 20.6 21.2 19.1 20.6 15.2 7. 8.1 13.6\n 20.1 21.8 24.5 23.1 19.7 18.3 21.2 17.5 16.8 22.4 20.6 23.9\n 22. 11.9]\n"
]
],
[
[
"#### Measuring Success: Training and testing data\nWe want to build a machine learning model from this data that can predict the species of iris for a new set of measurements. But before we can apply our model to new measurements, we need to know whether it actually works -- that is, whether we should trust its predictions.\n\nUnfortunately, we cannot use the data we used to build the model to evaluate it. This is because our model can always simply remember the whole training set, and will therefore always predict the correct label for any point in the training set. This \"remembering\" does not indicate to us whether the model will *generalize* well (in other words, whether it will also perform well on new data).\n\nTo assess the model's performance, we show it new data (data that it hasn't seen before) for which we have labels. This is usually done by splitting the labeled data we have collected (here, our 150 flower measurements) into two parts. One part of the data is used to build our machine learning model, and is called the *training data* or *training set*. The rest of the data will be used to assess how well the model works; this is called the *test data*, *test set*, or *hold-out set*.\n\nscikit-learn contains a function that shuffles the dataset and splits it for you: the [train_test_split](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.train_test_split.html) function. This function extracts 75% of the rows in the data as the training set, together with the corresponding labels for this data. The remaining 25% of the data, together with the remaining labels, is declared as the test set. Deciding how much data you want to put into the training and the test set respectively is somewhat arbitrary, but scikit-learn's default 75/25 split is a reasonable starting point.\n\nIn scikit-learn, data is usually denoted with a capital X, while labels are denoted by a lowercase y. This is inspired by the standard formulation *f(x)=y* in mathematics, where *x* is the input to a function and *y* is the output. Following more conventions from mathematics, we use a capital *X* because the data is a two-dimensional array (a matrix) and a lowercase *y* because the target is a one-dimensional array (a vector).\n\nBefore making the split, the **train_test_split** function shuffles the dataset using a pseudorandom number generator. If we just took the last 25% of the data as a test set, all the data points would have the label 2, as the data points are sorted by the label.\n\nTo make sure this example code will always get the same output if run multiple times, we provide the pseudorandom number generator with a fixed seed using the **random_state** parameter.\n\nThe output of the **train_test_split** function is **X_train**, **X_test**, **y_train**, and **y_test**, which are all NumPy arrays. **X_train** contains 75% of the rows of the dataset, and **X_test** contains the remaining 25%.",
"_____no_output_____"
]
],
[
[
"from sklearn.model_selection import train_test_split\nX_train, X_test, y_train, y_test = train_test_split(boston['data'], boston['target'], random_state=0)",
"_____no_output_____"
],
[
"print(\"X_train shape: {}\".format(X_train.shape))\nprint(\"y_train shape: {}\".format(y_train.shape))",
"X_train shape: (379, 13)\ny_train shape: (379,)\n"
],
[
"print(\"X_test shape: {}\".format(X_test.shape))\nprint(\"y_test shape: {}\".format(y_test.shape))",
"X_test shape: (127, 13)\ny_test shape: (127,)\n"
]
],
[
[
"#### First things first: Look at your data\nBefore building a machine learning model, it is often a good idea to inspect the data, to see if the task is easily solvable without machine learning, or if the desired information might not be contained in the data.\n\nAdditionally, inspecting the data is a good way to find abnormalities and peculiarities. Maybe some of your irises were measured using inches and not centimeters, for example. In the real world, inconsistencies in the data and unexpected measurements are very common, as are missing data and not-a-number (NaN) or infinite values.\n\nOne of the best ways to inspect data is to visualize it. One way to do this is by using a *scatter plot*. A scatter plot of the data puts one feature along the x-axis and another along the y-axis, and draws a dot for each data point. Unfortunately, computer screens have only two dimensions, which allows us to plot only two (or maybe three) features at a time. It is difficult to plot datasets with more than three features this way. One way around this problem is to do a *pair plot*, which looks at all possible pairs of features. If you have a small number of features, such as the four we have here, this is quite reasonable. You should keep in mind, however, that a pair plot does not show the interaction of all of the features at once, so some interesting aspects of the data may not be revealed when visualizing it this way.\n\nIn Python, the *pandas* library has a convenient function called [scatter_matrix](http://pandas.pydata.org/pandas-docs/version/0.18.1/visualization.html#scatter-matrix-plot) for creating pair plots for a DataFrame.",
"_____no_output_____"
]
],
[
[
"# create dataframe from data in X_train\nboston_df = pd.DataFrame(X_train, columns=boston.feature_names)\n\n# Add in the target data\nboston_df['MEDV'] = y_train\n\n# Look at the first few rows\nboston_df.head()",
"_____no_output_____"
],
[
"# create a scatter matrix from the dataframe\ntmp = pd.scatter_matrix(boston_df, figsize=(15, 15))",
"_____no_output_____"
]
],
[
[
"From the plots, we can see RM has a strong positive linear relationship with MEDV and LSTAT has a strong negative one. This makes sense - the housing price should go up as the number of rooms increases and the housing prices should go down as the percentage of lower class/income families in the neighborhood increases.",
"_____no_output_____"
]
],
[
[
"# Get a high-level overview of the data\nboston_df.describe()",
"_____no_output_____"
],
[
"# Find which features are most highly correlated with the housing prices\ndf = boston_df\ndf['MEDV'] = y_train\ndf.corr()['MEDV']",
"_____no_output_____"
]
],
[
[
"#### Building your model: Linear Regression\nNow we can start building the actual machine learning model. There are many regression algorithms in *scikit-learn* that we could use. Here we will use Ordinary Least Squares (OLS) Linear Regression because it is easy to understand and interpret.\n\nAll machine learning models in *scikit-learn* are implemented in their own classes, which are called *Estimator* classes. The Linear Regression algorithm is implemented in the [LinearRegression](http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LinearRegression.html) class in the **linear_model** module. Before we can use the model, we need to instantiate the class into an object. This is when we will set any parameters of the model. The LinearRegression model doesn't have any particular parameters of importance.",
"_____no_output_____"
]
],
[
[
"from sklearn.linear_model import LinearRegression\nlr = LinearRegression()",
"_____no_output_____"
]
],
[
[
"The *lr* object encapsulates the algorithm that will be used to build the model from the training data, as well the algorithm to make predictions on new data points. It will also hold the information that the algorithm has extracted from the training data.\n\nTo build the model on the training set, we call the **fit** method of the *lr* object, which takes as arguments the NumPy array *X_train* containing the training data and the NumPy array *y_train* of the corresponding training labels.",
"_____no_output_____"
]
],
[
[
"lr.fit(X_train, y_train)",
"_____no_output_____"
]
],
[
[
"The “slope” parameters (w), also called weights or coefficients, are stored in the coef_ attribute, while the offset or intercept (b) is stored in the intercept_ attribute:",
"_____no_output_____"
]
],
[
[
"print(\"lr.coef_: {}\".format(lr.coef_))\nprint(\"lr.intercept_: {}\".format(lr.intercept_))",
"lr.coef_: [ -1.16869578e-01 4.39939421e-02 -5.34808462e-03 2.39455391e+00\n -1.56298371e+01 3.76145473e+00 -6.95007160e-03 -1.43520477e+00\n 2.39755946e-01 -1.12937318e-02 -9.86626289e-01 8.55687565e-03\n -5.00029440e-01]\nlr.intercept_: 36.980455337620576\n"
]
],
[
[
"The intercept_ attribute is always a single float number, while the coef_ attribute is a NumPy array with one entry per input feature. As we only have 13 input features in this dataset, lr.coef_ has 13 entries.\n\nLet’s look at the training set and test set performance:",
"_____no_output_____"
]
],
[
[
"print(\"Training set score: {:.2f}\".format(lr.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(lr.score(X_test, y_test)))",
"Training set score: 0.77\nTest set score: 0.64\n"
]
],
[
[
"An R^2 of around 0.64 on the test set is not very good, but we can see that the scores on the training and test sets are are a decent distance apart. This means we are likely overfitting. With higher-dimensional datasets (meaning datasets with a large number of features), linear models become more powerful, and there is a higher chance of overfitting. More complicated linear models such as *Ridge Regression* and *Lasso* have been designed to help control this overfitting problem.\n\nAn R^2 of around 0.77 on the training set is OK, but not great. For a really good fit, we would want an R^2 of around 0.95 or so. This tells us we are missing someting. One possibility is we could do some feature engineering and either include polynomial powers of some of the features and/or include products of some of the features.\n\nAlso, linear models tend ot work better when all of the features exist on roughly the same scale, we could attempt to scale our data as well.",
"_____no_output_____"
],
[
"# Preprocessing and Scaling\nSome algorithms, like neural networks, SVMs, and k-NearestNeighbors are very sensitive to the scaling of the data; while many others such as linear models with regularization (Ridge, Lasso, etc.) are moderately sensitive to the scaling of the data. Therefore, a common practice is to adjust the features so that the data representation is more suitable for these algorithms. Often, this is a simple per-feature rescaling and shift of the data.\n\n## Different Kinds of Preprocessing\nDiffernt algorithms benefit from different kinds of scaling and thus Scikit-Learn supports a variety of scaling methods, though they all have a similar API.\n\n### StandardScaler\nNeural networks expect all input features to vary in a similar way, and ideally to have a mean of 0, and a variance of 1. When using ANN, we must rescale our data so that it fulfills these requirements. For doing this automatically, *scikit-learn* has the [StandardScaler](http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler). The **StandardScaler** in *scikit-learn* ensures that for each feature the mean is 0 and the variance is 1, bringing all features to the same magnitude. However, this scaling does not ensure any particular minimum and maximum values for the features.\n\n### MinMaxScaler\nA common rescaling method for kernel SVMs is to scale the data such that all features are between 0 and 1. We can do this in *scikit-learn* by using the [MinMaxScaler](http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MinMaxScaler.html#sklearn.preprocessing.MinMaxScaler) preprocessing method. The **MinMaxScaler** shifts the data such that all features are exactly between 0 and 1. For a two-dimensional dataset this means all of the data is contained within the rectangle created by the x-axis between 0 and 1 and the y-axis between 0 and 1.\n\n### RobustScaler\nStandard scaling does not ensure any particular minimum and maximum values for the features. The [RobustScaler](http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.RobustScaler.html#sklearn.preprocessing.RobustScaler) works similarly to the **StandardScaler** in that it ensures statistical properties for each feature that guarantee that they are on the same scale. However, the **RobustScaler** uses the median and quartiles, instead of mean and variance. This makes the **RobustScaler** ignore data points that are very different from the rest (like measurement errors). These odd data points are also called *outliers*, and can lead to trouble for other scaling techniques.\n",
"_____no_output_____"
]
],
[
[
"# Scale the boston dataset\nfrom sklearn.preprocessing import MinMaxScaler\nX = MinMaxScaler().fit_transform(boston.data)",
"_____no_output_____"
],
[
"X_train, X_test, y_train, y_test = train_test_split(X, boston['target'], random_state=0)\nlr = LinearRegression().fit(X_train, y_train)\nprint(\"Training set score: {:.2f}\".format(lr.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(lr.score(X_test, y_test)))",
"Training set score: 0.77\nTest set score: 0.64\n"
]
],
[
[
"Ordinary Least Squares (OLS) regression is not sensitive to feature scaling, but all of the regularized linear methods which help reduce the overfitting present in OLS are sensitive to feature scaling.",
"_____no_output_____"
],
[
"# Feature Engineering\nFeature engineering is the process of using domain knowledge of the data to create features that make machine learning algorithms work. Feature engineering is fundamental to the application of machine learning, and is both difficult and expensive. The need for manual feature engineering can be obviated by automated feature learning.\n\nIn particular, linear models might benefit greatly from generating new features via techniques such as binning, and adding polynomials and interactions. However, more complex models like random forests and SVMs might be able to learn more complex tasks without explicitly expanding the feature space.\n\nIn practice, the features that are used (and the match between features and method) is often the most important piece in making a machine learning approach work well.\n\n## Interactions and Polynomials\nOne way to enrich a feature representation, particularly for linear models, is adding *interaction features* - products of individual original features. Another way to enrich a feature representation is to use *polynomials* of the original features - for a given feature x, we might want to consider x^2, x^3, x^4, and so on. This kind of feature engineering is often used in statistical modeling, but it’s also common in many practical machine learning applications.\n\nWithin *scikit-learn*, the addition of both *interaction features* and *polynomial features* is implemented in [PolynomialFeatures](http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.PolynomialFeatures.html#sklearn.preprocessing.PolynomialFeatures) in the **preprocessing** module.\n\nIn the code below, we modify the boston housing dataset by addig all polynomial features and interactions up to a degree of 2. The data originally had 13 features, which were expanded into 105 interaction features. These new features represent all possible interactions between two different original features, as well as the square of each original feature. degree=2 here means that we look at all features that are the product of up to two original features. The exact correspondence between input and output features can be found using the **get_feature_names** method.",
"_____no_output_____"
]
],
[
[
"from sklearn.datasets import load_boston\nfrom sklearn.preprocessing import MinMaxScaler, PolynomialFeatures, StandardScaler, RobustScaler\ndef load_extended_boston(scaler='minmax'):\n boston = load_boston()\n X = boston.data\n\n if 'standard' == scaler:\n X = StandardScaler().fit_transform(boston.data)\n elif 'robust' == scaler:\n X = RobustScaler().fit_transform(boston.data)\n else:\n X = MinMaxScaler().fit_transform(boston.data)\n X = PolynomialFeatures(degree=2).fit_transform(X)\n return X, boston.target",
"_____no_output_____"
],
[
"X, y = load_extended_boston()\nX.shape",
"_____no_output_____"
],
[
"# What if we fit this new dataset with a vastly expanded set of features using OLS?\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)\nlr = LinearRegression().fit(X_train, y_train)\nprint(\"Training set score: {:.2f}\".format(lr.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(lr.score(X_test, y_test)))",
"Training set score: 0.95\nTest set score: 0.61\n"
]
],
[
[
"Now the basic OLS model is doing a dramatically better job fitting the training set (R^2 of 0.95 vs 0.77).\n\nThis discrepancy between performance on the training set and the test set is a clear sign of overfitting, and therefore we should try to find a model that allows us to control complexity. One of the most commonly used alternatives to standard linear regression is *ridge regression*, which we will look into next.",
"_____no_output_____"
],
[
"# Ridge Regression\nRidge regression is also a linear model for regression, so the formula it uses to make predictions is the same one used for ordinary least squares. In ridge regression, though, the coefficients (w) are chosen not only so that they predict well on the training data, but also to fit an additional constraint. We also want the magnitude of coefficients to be as small as possible; in other words, all entries of w should be close to zero. Intuitively, this means each feature should have as little effect on the outcome as possible (which translates to having a small slope), while still predicting well. This constraint is an example of what is called *regularization*. Regularization means explicitly restricting a model to avoid overfitting. The particular kind used by ridge regression is known as L2 regularization.\n\nRidge regression is implemented in [linear_model.Ridge](http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.Ridge.html#sklearn.linear_model.Ridge). Let’s see how well it does on the extended Boston Housing dataset:",
"_____no_output_____"
]
],
[
[
"from sklearn.linear_model import Ridge\n\nridge = Ridge().fit(X_train, y_train)\nprint(\"Training set score: {:.2f}\".format(ridge.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(ridge.score(X_test, y_test)))",
"Training set score: 0.89\nTest set score: 0.75\n"
]
],
[
[
"As you can see, the training set score of Ridge is *lower* than for LinearRegression, while the test set score is *higher*. This is consistent with our expectation. With linear regression, we were overfitting our data. Ridge is a more restricted model, so we are less likely to overfit. A less complex model means worse performance on the training set, but better generalization. As we are only interested in generalization performance, we should choose the Ridge model over the LinearRegression model.",
"_____no_output_____"
],
[
"The Ridge model makes a trade-off between the simplicity of the model (near-zero coefficients) and its performance on the training set. How much importance the model places on simplicity versus training set performance can be specified by the user, using the **alpha** parameter. In the previous example, we used the default parameter alpha=1.0. There is no reason why this will give us the best trade-off, though. The optimum setting of alpha depends on the particular dataset we are using. Increasing alpha forces coefficients to move more toward zero, which decreases training set performance but might help generalization. For example:",
"_____no_output_____"
]
],
[
[
"ridge10 = Ridge(alpha=10).fit(X_train, y_train)\nprint(\"Training set score: {:.2f}\".format(ridge10.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(ridge10.score(X_test, y_test)))\n",
"Training set score: 0.79\nTest set score: 0.64\n"
]
],
[
[
"Decreasing alpha allows the coefficients to be less restricted. For very small values of alpha, coefficients are barely restricted at all, and we end up with a model that resembles LinearRegression:",
"_____no_output_____"
]
],
[
[
"ridge01 = Ridge(alpha=0.1).fit(X_train, y_train)\nprint(\"Training set score: {:.2f}\".format(ridge01.score(X_train, y_train)))\nprint(\"Test set score: {:.2f}\".format(ridge01.score(X_test, y_test)))",
"Training set score: 0.93\nTest set score: 0.77\n"
]
],
[
[
"Here, alpha=0.1 seems to be working well. We could try decreasing alpha even more to improve generalization. For now, notice how the parameter alpha corresponds to the model complexity. \n\nVery shortly we need to think about systematic methods for properly select optimal values for parameters such as **alpha**.\n\nWe can also get a more qualitative insight into how the alpha parameter changes the model by inspecting the coef_ attribute of models with different values of alpha. A higher alpha means a more restricted model, so we expect the entries of coef_ to have smaller magnitude for a high value of alpha than for a low value of alpha. This is confirmed in the plot below:",
"_____no_output_____"
]
],
[
[
"plt.figure(figsize=(15, 10))\nplt.plot(ridge.coef_, 's', label=\"Ridge alpha=1\")\nplt.plot(ridge10.coef_, '^', label=\"Ridge alpha=10\")\nplt.plot(ridge01.coef_, 'v', label=\"Ridge alpha=0.1\")\n\nplt.plot(lr.coef_, 'o', label=\"LinearRegression\")\nplt.xlabel(\"Coefficient index\")\nplt.ylabel(\"Coefficient magnitude\")\nplt.hlines(0, 0, len(lr.coef_))\nplt.ylim(-25, 25)\nplt.legend()\nplt.show()",
"_____no_output_____"
]
],
[
[
"Clearly, the interactions and polynomial features gave us a good boost in performance when using Ridge. When using a more complex model like a random forest, the story can be a bit different, though. Adding features will benefit linear models the most. For very complex models, adding features may actually slightly decrease the performance.\n\nMachine learning is complex. Often you have to try several experiments and just see what works best.",
"_____no_output_____"
],
[
"# Model Evaluation and Improvement\nTo evaluate our supervised models, so far we have split our dataset into a training set and a test set using the **train_test_split function**, built a model on the training set by calling the fit method, and evaluated it on the test set using the score method, which for classification computes the fraction of correctly classified samples and for regression computes the R^2.\n\nRemember, the reason we split our data into training and test sets is that we are interested in measuring how well our model *generalizes* to new, previously unseen data. We are not interested in how well our model fit the training set, but rather in how well it can make predictions for data that was not observed during training.\n\nAs we saw when exploring Ridge regression, we need a more robust way to assess generalization performance which is capable of automatically choosing optimal values for hyper-parameters such as **alpha**.",
"_____no_output_____"
],
[
"## Cross-Validation\n*Cross-validation* is a statistical method of evaluating generalization performance that is more stable and thorough than using a split into a training and a test set. In cross-validation, the data is instead split repeatedly and multiple models are trained. The most commonly used version of cross-validation is *k-fold cross-validation*, where *k* is a user-specified number, usually 5 or 10. When performing five-fold cross-validation, the data is first partitioned into five parts of (approximately) equal size, called *folds*. Next, a sequence of models is trained. The first model is trained using the first fold as the test set, and the remaining folds (2–5) are used as the training set. The model is built using the data in folds 2–5, and then the accuracy is evaluated on fold 1. Then another model is built, this time using fold 2 as the test set and the data in folds 1, 3, 4, and 5 as the training set. This process is repeated using folds 3, 4, and 5 as test sets. For each of these five splits of the data into training and test sets, we compute the accuracy. In the end, we have collected five accuracy values.\n\nUsually, the first fifth of the data is the first fold, the second fifth of the data is the second fold, and so on.\n\nThe whole point of cross-validation is to be more robust than a simple train/test split so that the results are not likely to be influenced by a particularly good or bad split of the data. The main disadvantage is that it requires more computation.\n\n### Cross-Validation in scikit-learn\nCross-validation is implemented in scikit-learn using the [cross_val_score](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.cross_val_score.html#sklearn.model_selection.cross_val_score) function from the *model_selection* module. The parameters of the **cross_val_score** function are the model we want to evaluate, the training data, and the ground-truth labels.",
"_____no_output_____"
]
],
[
[
"# Let's evaluate cross-validation on the iris dataset using logistic regression (which is actually classification)\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn.datasets import load_iris\nfrom sklearn.linear_model import LogisticRegression\n\niris = load_iris()\nlogreg = LogisticRegression()\n\nscores = cross_val_score(logreg, iris.data, iris.target)\nprint(\"Cross-validation scores: {}\".format(scores))",
"Cross-validation scores: [ 0.96078431 0.92156863 0.95833333]\n"
]
],
[
[
"By default, cross_val_score performs three-fold cross-validation, returning three accuracy values. We can change the number of folds used by changing the cv parameter:",
"_____no_output_____"
]
],
[
[
"scores = cross_val_score(logreg, iris.data, iris.target, cv=5)\nprint(\"Cross-validation scores: {}\".format(scores))",
"Cross-validation scores: [ 1. 0.96666667 0.93333333 0.9 1. ]\n"
]
],
[
[
"A common way to summarize the cross-validation accuracy is to compute the mean:",
"_____no_output_____"
]
],
[
[
"print(\"Average cross-validation score: {:.2f}\".format(scores.mean()))",
"Average cross-validation score: 0.96\n"
]
],
[
[
"Using the mean cross-validation we can conclude that we expect the model to be around 96% accurate on average. Looking at all five scores produced by the five-fold cross-validation, we can also conclude that there is a relatively high variance in the accuracy between folds, ranging from 100% accuracy to 90% accuracy. This could imply that the model is very dependent on the particular folds used for training, but it could also just be a consequence of the small size of the dataset.",
"_____no_output_____"
],
[
"### Benefits of Cross-Validation\nThere are several benefits to using cross-validation instead of a single split into a training and a test set. First, remember that train_test_split performs a random split of the data. Imagine that we are “lucky” when randomly splitting the data, and all examples that are hard to classify end up in the training set. In that case, the test set will only contain “easy” examples, and our test set accuracy will be unrealistically high. Conversely, if we are “unlucky,” we might have randomly put all the hard-to-classify examples in the test set and consequently obtain an unrealistically low score. However, when using cross-validation, each example will be in the training set exactly once: each example is in one of the folds, and each fold is the test set once. Therefore, the model needs to generalize well to all of the samples in the dataset for all of the cross-validation scores (and their mean) to be high.\n\nHaving multiple splits of the data also provides some information about how sensitive our model is to the selection of the training dataset. For the iris dataset, we saw accuracies between 90% and 100%. This is quite a range, and it provides us with an idea about how the model might perform in the worst case and best case scenarios when applied to new data.\n\nAnother benefit of cross-validation as compared to using a single split of the data is that we use our data more a single split of the data is that we use our data more effectively. When using train_test_split, we usually use 75% of the data for training and 25% of the data for evaluation. When using five-fold cross-validation, in each iteration we can use four-fifths of the data (80%) to fit the model. When using 10-fold cross-validation, we can use nine-tenths of the data (90%) to fit the model. More data will usually result in more accurate models.\n\nThe main disadvantage of cross-validation is increased computational cost. As we are now training k models instead of a single model, cross-validation will be roughly k times slower than doing a single split of the data.\n\nIt is important to keep in mind that cross-validation is not a way to build a model that can be applied to new data. Cross-validation does not return a model. When calling cross_val_score, multiple models are built internally, but the purpose of cross-validation is only to evaluate how well a given algorithm will generalize when trained on a specific dataset.",
"_____no_output_____"
],
[
"# Stratified k-Fold Cross-Validation and Other Strategies\nSplitting the dataset into k folds by starting with the first one-k-th part of the data, as described in the previous section, might not always be a good idea. For example, let’s have a look at the boston housing dataset:",
"_____no_output_____"
]
],
[
[
"lr = LinearRegression()\nscores = cross_val_score(lr, boston.data, boston.target)\nprint(\"Cross-validation scores: {}\".format(scores))",
"Cross-validation scores: [ 0.5828011 0.53193819 -5.85104986]\n"
]
],
[
[
"As we can see, a default 3-fold cross-validation performed ok for the first two folds, but horribly bad for the third one. \n\nThe fundamental problem here is that if that data isn't organized in a random way, then just taking folds in order doesn't represent a random sampling for each fold. There are multiple possible ways to mitigate this issue.\n\n### Stratified k-Fold Cross-Validation\nAs the simple k-fold strategy would obviously fail for classification problems if the data is organized by target category, *scikit-learn* does not use it for classification, but rather uses *stratified k-fold cross-validation*. In stratified cross-validation, we split the data such that the proportions between classes are the same in each fold as they are in the whole dataset.\n\n*scikit-learn* supports startified k-fold cross-validation via the [StratifiedKFold](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.StratifiedKFold.html#sklearn.model_selection.StratifiedKFold) class in the *model_selection* module.\n\nFor example, if 90% of your samples belong to class A and 10% of your samples belong to class B, then stratified cross-validation ensures that in each fold, 90% of samples belong to class A and 10% of samples belong to class B.\n\nFor regression, *scikit-learn* uses the standard k-fold cross-validation by default.\n\n### Shuffle-split cross-validation\nAnother, very flexible strategy for cross-validation is *shuffle-split cross-validation*. In shuffle-split cross-validation, each split samples **train_size** many points for the training set and **test_size** many (disjoint) point for the test set. This splitting is repeated **n_iter** times. You can use integers for **train_size** and **test_size** to use absolute sizes for these sets, or floating-point numbers to use fractions of the whole dataset.\n\nSince the sampling in *shuffle-split cross-validation* is done in a random fashion, this is a safer alternative to default *k-Fold Cross-Validation* when the data isn't truly randomized.\n\n*scikit-learn* supports shuffle-split cross-validation via the [ShuffleSplit](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.ShuffleSplit.html#sklearn.model_selection.ShuffleSplit) class in the *model_selection* module.\n\nThere is also a stratified variant of ShuffleSplit, aptly named [StratifiedShuffleSplit](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.StratifiedShuffleSplit.html#sklearn.model_selection.StratifiedShuffleSplit), which can provide more reliable results for classification tasks.",
"_____no_output_____"
]
],
[
[
"# Let's look at the boston housing dataset again using shuffle-split cross-validation to ensure random sampling\n# The following code splits the dataset into 80% training set and 20% test set for 3 iterations:\nfrom sklearn.model_selection import ShuffleSplit\nshuffle_split = ShuffleSplit(test_size=.8, train_size=.2, n_splits=3)\nscores = cross_val_score(lr, boston.data, boston.target, cv=shuffle_split)\nprint(\"Cross-validation scores:\\n{}\".format(scores))",
"Cross-validation scores:\n[ 0.69514428 0.69350546 0.63047915]\n"
]
],
[
[
"## Grid Search\nNow that we know how to evaluate how well a model generalizes, we can take the next step and improve the model’s generalization performance by tuning its parameters. We discussed the parameter settings of the Ridge model for ridge regression earlier. Finding the values of the important parameters of a model (the ones that provide the best generalization performance) is a tricky task, but necessary for almost all models and datasets. Because it is such a common task, there are standard methods in *scikit-learn* to help you with it. The most commonly used method is grid search, which basically means trying all possible combinations of the parameters of interest.\n\nConsider the case of ridge regression, as implemented in the Ridge class. As we discussed earlier, there is one important parameters: the regularization parameter, *alpha*. Say we want to try the values 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, and 100 for *alpha*. Because we have eleven different settings for *alpha* and *alpha* is the only parameter, we have 11 combinations of parameters in total. Looking at all possible combinations creates a table (or grid) of parameter settings for the Ridge regression model.\n\n### Simple Grid Search\nWe can implement a simple grid search just as a for loop over the parameter, training and evaluating a classifier for each value:",
"_____no_output_____"
]
],
[
[
"X, y = load_extended_boston(scaler='standard')\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)\nprint(\"Size of training set: {} size of test set: {}\".format(X_train.shape[0], X_test.shape[0]))\n\nbest_score = 0\n\nfor alpha in [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]:\n # for each combination of parameters, train an SVC\n ridge = Ridge(alpha=alpha)\n ridge.fit(X_train, y_train)\n # evaluate the SVC on the test set\n score = ridge.score(X_test, y_test)\n # if we got a better score, store the score and parameters\n if score > best_score:\n best_score = score\n best_parameters = {'alpha': alpha}\n\nprint(\"Best score: {:.2f}\".format(best_score))\nprint(\"Best parameters: {}\".format(best_parameters))",
"Size of training set: 379 size of test set: 127\nBest score: 0.78\nBest parameters: {'alpha': 50}\n"
]
],
[
[
"### The Danger of Overfitting the Parameters and the Validation Set\nGiven this result, we might be tempted to report that we found a model that performs with 78% accuracy on our dataset. However, this claim could be overly optimistic (or just wrong), for the following reason: we tried many different parameters and selected the one with best accuracy on the test set, but this accuracy won’t necessarily carry over to new data. Because we used the test data to adjust the parameters, we can no longer use it to assess how good the model is. This is the same reason we needed to split the data into training and test sets in the first place; we need an independent dataset to evaluate, one that was not used to create the model.\n\nOne way to resolve this problem is to split the data again, so we have three sets: the training set to build the model, the validation (or development) set to select the parameters of the model, and the test set to evaluate the performance of the selected parameters. \n\nAfter selecting the best parameters using the validation set, we can rebuild a model using the parameter settings we found, but now training on both the training data and the validation data. This way, we can use as much data as possible to build our model. This leads to the following implementation:",
"_____no_output_____"
]
],
[
[
"X, y = load_extended_boston(scaler='standard')\n# split data into train+validation set and test set\nX_trainval, X_test, y_trainval, y_test = train_test_split(X, y, random_state=0)\n# split train+validation set into training and validation sets\nX_train, X_valid, y_train, y_valid = train_test_split(X_trainval, y_trainval, random_state=1)\nprint(\"Size of training set: {} size of validation set: {} size of test set:\"\n \" {}\\n\".format(X_train.shape[0], X_valid.shape[0], X_test.shape[0]))\n\nbest_score = 0\n\nfor alpha in [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]:\n # for each combination of parameters, train an SVC\n ridge = Ridge(alpha=alpha)\n ridge.fit(X_train, y_train)\n # evaluate the Ridge on the test set\n score = ridge.score(X_valid, y_valid)\n # if we got a better score, store the score and parameters\n if score > best_score:\n best_score = score\n best_parameters = {'alpha': alpha}\n\n# rebuild a model on the combined training and validation set,\n# and evaluate it on the test set\nridge = Ridge(**best_parameters)\nridge.fit(X_trainval, y_trainval)\ntest_score = ridge.score(X_test, y_test)\nprint(\"Best score on validation set: {:.2f}\".format(best_score))\nprint(\"Best parameters: \", best_parameters)\nprint(\"Test set score with best parameters: {:.2f}\".format(test_score))",
"Size of training set: 284 size of validation set: 95 size of test set: 127\n\nBest score on validation set: 0.92\nBest parameters: {'alpha': 50}\nTest set score with best parameters: 0.78\n"
]
],
[
[
"The best score on the validation set is 92%. However, the score on the test set—the score that actually tells us how well we generalize—is lower, at 78%. So we can claim to classify new data 78% correctly. This happens to be the same as before, now we can make a stronger claim since the final test set wasn't used in any way shape or form during hyper-parameter tuning.\n\nThe distinction between the training set, validation set, and test set is fundamentally important to applying machine learning methods in practice. Any choices made based on the test set accuracy “leak” information from the test set into the model. Therefore, it is important to keep a separate test set, which is only used for the final evaluation. It is good practice to do all exploratory analysis and model selection using the combination of a training and a validation set, and reserve the test set for a final evaluation—this is even true for exploratory visualization. Strictly speaking, evaluating more than one model on the test set and choosing the better of the two will result in an overly optimistic estimate of how accurate the model is.",
"_____no_output_____"
],
[
"### Grid Search with Cross-Validation\nWhile the method of splitting the data into a training, a validation, and a test set that we just saw is workable, and relatively commonly used, it is quite sensitive to how exactly the data is split. From the output of the previous code snippet we can see that GridSearchCV selects 'alhpa': 50 as the best parameter. But if we were to take a different part of the training data as the validation set, it may optimize for a different value. For a better estimate of the generalization performance, instead of using a single split into a training and a validation set, we can use cross-validation to evaluate the performance of each parameter combination. This method can be coded up as follows:",
"_____no_output_____"
]
],
[
[
"best_score = 0\nfor alpha in [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]:\n # for each combination of parameters, train an SVC\n ridge = Ridge(alpha=alpha)\n \n # perform cross-validation\n scores = cross_val_score(ridge, X_trainval, y_trainval, cv=5)\n \n # compute mean cross-validation accuracy\n score = np.mean(scores)\n \n # if we got a better score, store the score and parameters\n if score > best_score:\n best_score = score\n best_parameters = {'alpha': alpha}\n \n# rebuild a model on the combined training and validation set,\n# and evaluate it on the test set\nridge = Ridge(**best_parameters)\nridge.fit(X_trainval, y_trainval)\ntest_score = ridge.score(X_test, y_test)\nprint(\"Best score on validation set: {:.2f}\".format(best_score))\nprint(\"Best parameters: \", best_parameters)\nprint(\"Test set score with best parameters: {:.2f}\".format(test_score))",
"Best score on validation set: 0.83\nBest parameters: {'alpha': 10}\nTest set score with best parameters: 0.77\n"
]
],
[
[
"To evaluate the accuracy of the Ridge Regression model using a particular setting of alpha using five-fold cross-validation, we need to train 11 * 5 = 55 models. As you can imagine, the main downside of the use of cross-validation is the time it takes to train all these models. However, as you can see here, it is a more reliable method which is less sensitive to how precisely the validation set is sampled from the overall trainin set, and thus more likely to generalize well.",
"_____no_output_____"
],
[
"### GridSearchCV\nBecause grid search with cross-validation is such a commonly used method to adjust parameters, *scikit-learn* provides the [GridSearchCV](http://scikit-learn.org/stable/modules/generated/sklearn.model_selection.GridSearchCV.html#sklearn.model_selection.GridSearchCV) class, which implements it in the form of an estimator. To use the **GridSearchCV** class, you first need to specify the parameters you want to search over using a dictionary. GridSearchCV will then perform all the necessary model fits. The keys of the dictionary are the names of parameters we want to adjust (as given when constructing the model—in this case, alpha), and the values are the parameter settings we want to try out. Trying the values 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, and 100 for alpha translates to the following dictionary:",
"_____no_output_____"
]
],
[
[
"param_grid = {'alpha': [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]}\nprint(\"Parameter grid:\\n{}\".format(param_grid))",
"Parameter grid:\n{'alpha': [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]}\n"
]
],
[
[
"We can now instantiate the **GridSearchCV** class with the model (*Ridge*), the parameter grid to search (*param_grid*), and the cross-validation strategy we want to use (say, five-fold stratified cross-validation):",
"_____no_output_____"
]
],
[
[
"from sklearn.model_selection import GridSearchCV\nfrom sklearn.linear_model import Ridge\ngrid_search = GridSearchCV(Ridge(), param_grid, cv=5)",
"_____no_output_____"
]
],
[
[
"**GridSearchCV** will use cross-validation in place of the split into a training and validation set that we used before. However, we still need to split the data into a training and a test set, to avoid overfitting the parameters:",
"_____no_output_____"
]
],
[
[
"X, y = load_extended_boston(scaler='standard')\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)",
"_____no_output_____"
]
],
[
[
"The *grid_search* object that we created behaves just like a classifier; we can call the standard methods **fit**, **predict**, and **score** on it. However, when we call **fit**, it will run cross-validation for each combination of parameters we specified in param_grid:",
"_____no_output_____"
]
],
[
[
"grid_search.fit(X_train, y_train)",
"_____no_output_____"
]
],
[
[
"Fitting the **GridSearchCV** object not only searches for the best parameters, but also automatically fits a new model on the whole training dataset with the parameters that yielded the best cross-validation performance. What happens in fit is therefore equivalent to the result of the code we saw at the beginning of this section. The **GridSearchCV** class provides a very convenient interface to access the retrained model using the predict and score methods. To evaluate how well the best found parameters generalize, we can call score on the test set:",
"_____no_output_____"
]
],
[
[
"print(\"Test set score: {:.2f}\".format(grid_search.score(X_test, y_test)))",
"Test set score: 0.77\n"
]
],
[
[
"Choosing the parameters using cross-validation, we actually found a model that achieves 77% accuracy on the test set. The important thing here is that we *did not use the test set* to choose the parameters. The parameters that were found are scored in the **`best_params_`** attribute, and the best cross-validation accuracy (the mean accuracy over the different splits for this parameter setting) is stored in **`best_score_`**:",
"_____no_output_____"
]
],
[
[
"print(\"Best parameters: {}\".format(grid_search.best_params_))\nprint(\"Best cross-validation score: {:.2f}\".format(grid_search.best_score_))",
"Best parameters: {'alpha': 10}\nBest cross-validation score: 0.83\n"
]
],
[
[
"Sometimes it is helpful to have access to the actual model that was found—for example, to look at coefficients or feature importances. You can access the model with the best parameters trained on the whole training set using the **`best_estimator_`** attribute:",
"_____no_output_____"
]
],
[
[
"print(\"Best estimator:\\n{}\".format(grid_search.best_estimator_))",
"Best estimator:\nRidge(alpha=10, copy_X=True, fit_intercept=True, max_iter=None,\n normalize=False, random_state=None, solver='auto', tol=0.001)\n"
]
],
[
[
"Because *grid_search* itself has **predict** and **score** methods, using **`best_estimator_`** is not needed to make predictions or evaluate the model.",
"_____no_output_____"
],
[
"### Putting it all together\nThe one thing we didn't do was experiment with different train/test splits. Let's run it with randomness a bunch of times and see how consistent it is:",
"_____no_output_____"
]
],
[
[
"from sklearn.model_selection import GridSearchCV\nfrom sklearn.linear_model import Ridge\n\nparam_grid = {'alpha': [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100]}\ngrid_search = GridSearchCV(Ridge(), param_grid, cv=5)\n\nX, y = load_extended_boston(scaler='standard')\n\nfor i in range(10):\n X_train, X_test, y_train, y_test = train_test_split(X, y)\n grid_search.fit(X_train, y_train)\n \n print(\"Run {} - Test set score: {:.2f} Best parameters: {}\".format(i, grid_search.score(X_test, y_test), \n grid_search.best_params_))",
"Run 0 - Test set score: 0.81 Best parameters: {'alpha': 1}\nRun 1 - Test set score: 0.82 Best parameters: {'alpha': 5}\nRun 2 - Test set score: 0.81 Best parameters: {'alpha': 10}\nRun 3 - Test set score: 0.89 Best parameters: {'alpha': 10}\nRun 4 - Test set score: 0.87 Best parameters: {'alpha': 10}\nRun 5 - Test set score: 0.84 Best parameters: {'alpha': 10}\nRun 6 - Test set score: 0.87 Best parameters: {'alpha': 50}\nRun 7 - Test set score: 0.88 Best parameters: {'alpha': 10}\nRun 8 - Test set score: 0.88 Best parameters: {'alpha': 10}\nRun 9 - Test set score: 0.85 Best parameters: {'alpha': 50}\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
]
] |
d09b5474d77fa593754b30e1d5ac667ebab1bfe3 | 28,875 | ipynb | Jupyter Notebook | chapter_deep-learning-computation/parameters.ipynb | dltech-xyz/d2l-java | 746af284f86cb1424516d4923713e40d838c1672 | [
"MIT-0"
] | null | null | null | chapter_deep-learning-computation/parameters.ipynb | dltech-xyz/d2l-java | 746af284f86cb1424516d4923713e40d838c1672 | [
"MIT-0"
] | null | null | null | chapter_deep-learning-computation/parameters.ipynb | dltech-xyz/d2l-java | 746af284f86cb1424516d4923713e40d838c1672 | [
"MIT-0"
] | 1 | 2020-09-15T06:01:30.000Z | 2020-09-15T06:01:30.000Z | 36.138924 | 235 | 0.605991 | [
[
[
"# Parameter Management\n\nOnce we have chosen an architecture\nand set our hyperparameters,\nwe proceed to the training loop,\nwhere our goal is to find parameter values\nthat minimize our objective function. \nAfter training, we will need these parameters \nin order to make future predictions.\nAdditionally, we will sometimes wish \nto extract the parameters \neither to reuse them in some other context,\nto save our model to disk so that \nit may be exectuted in other software,\nor for examination in the hopes of \ngaining scientific understanding.\n\nMost of the time, we will be able \nto ignore the nitty-gritty details\nof how parameters are declared\nand manipulated, relying on DJL\nto do the heavy lifting.\nHowever, when we move away from \nstacked architectures with standard layers, \nwe will sometimes need to get into the weeds\nof declaring and manipulating parameters. \nIn this section, we cover the following:\n\n* Accessing parameters for debugging, diagnostics, and visualiziations.\n* Parameter initialization.\n* Sharing parameters across different model components.\n\nWe start by focusing on an MLP with one hidden layer.",
"_____no_output_____"
]
],
[
[
"%mavenRepo snapshots https://oss.sonatype.org/content/repositories/snapshots/\n\n%maven ai.djl:api:0.7.0-SNAPSHOT\n%maven ai.djl:model-zoo:0.7.0-SNAPSHOT\n%maven org.slf4j:slf4j-api:1.7.26\n%maven org.slf4j:slf4j-simple:1.7.26\n \n%maven net.java.dev.jna:jna:5.3.0\n%maven ai.djl.mxnet:mxnet-engine:0.7.0-SNAPSHOT\n%maven ai.djl.mxnet:mxnet-native-auto:1.7.0-a",
"_____no_output_____"
],
[
"import ai.djl.*;\nimport ai.djl.ndarray.*;\nimport ai.djl.ndarray.types.*;\nimport ai.djl.ndarray.index.*;\nimport ai.djl.nn.*;\nimport ai.djl.nn.core.*;\nimport ai.djl.training.*;\nimport ai.djl.training.initializer.*;\nimport ai.djl.training.dataset.*;\nimport ai.djl.util.*;\nimport ai.djl.translate.*;\nimport ai.djl.inference.Predictor;",
"_____no_output_____"
],
[
"NDManager manager = NDManager.newBaseManager();\n\nNDArray x = manager.randomUniform(0, 1, new Shape(2, 4));\n\nModel model = Model.newInstance(\"lin-reg\");\n\nSequentialBlock net = new SequentialBlock();\n\nnet.add(Linear.builder().setUnits(8).build());\nnet.add(Activation.reluBlock());\nnet.add(Linear.builder().setUnits(1).build());\nnet.setInitializer(new NormalInitializer());\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\n\nmodel.setBlock(net);\n\nPredictor<NDList, NDList> predictor = model.newPredictor(new NoopTranslator());\n\npredictor.predict(new NDList(x)).singletonOrThrow(); // forward computation",
"_____no_output_____"
]
],
[
[
"## Parameter Access\n\nLet us start with how to access parameters\nfrom the models that you already know.\nEach layer's parameters are conveniently stored in a `Pair<String, Parameter>` consisting of a unique\n`String` that serves as a key for the layer and the `Parameter` itself.\nThe `ParameterList` is an extension of `PairList` and is returned with a call to the `getParameters()` method on a `Block`. \nWe can inspect the parameters of the `net` defined above.\nWhen a model is defined via the `SequentialBlock` class,\nwe can access any layer's `Pair<String, Parameter>` by calling `get()` on the `ParameterList` and passing in the index\nof the parameter we want. Calling `getKey()` and `getValue()` on a `Pair<String, Parameter>` will get the parameter's name and `Parameter` respectively. We can also directly get the `Parameter` we want from the `ParameterList`\nby calling `get()` and passing in its unique key(the `String` portion of the `Pair<String, Parameter>`. If we call `valueAt()` and pass in\nthe index, we will get the `Parameter` directly as well.",
"_____no_output_____"
]
],
[
[
"ParameterList params = net.getParameters();\n// Print out all the keys (unique!)\nfor (var pair : params) {\n System.out.println(pair.getKey());\n}\n\n// Use the unique key to access the Parameter\nNDArray dense0Weight = params.get(\"01Linear_weight\").getArray();\nNDArray dense0Bias = params.get(\"01Linear_bias\").getArray();\n\n// Use indexing to access the Parameter\nNDArray dense1Weight = params.valueAt(2).getArray();\nNDArray dense1Bias = params.valueAt(3).getArray();\n\nSystem.out.println(dense0Weight);\nSystem.out.println(dense0Bias);\n\nSystem.out.println(dense1Weight);\nSystem.out.println(dense1Bias);",
"_____no_output_____"
]
],
[
[
"The output tells us a few important things.\nFirst, each fully-connected layer \nhas two parameters, e.g., \n`dense0Weight` and `dense0Bias`,\ncorresponding to that layer's \nweights and biases, respectively.\nThe `params` variable is a `ParameterList` which contain the\nkey-value pairs of the layer name and a parameter of the \n`Parameter` class.\nWith a `Parameter`, we can get the underlying numerical values as `NDArray`s by calling \n`getArray()` on them!\nBoth the weights and biases are stored as single precision floats(`FLOAT32`).\n\n\n### Targeted Parameters\n\nParameters are complex objects,\ncontaining data, gradients,\nand additional information.\nThat's why we need to request the data explicitly.\nNote that the bias vector consists of zeroes\nbecause we have not updated the network\nsince it was initialized.",
"_____no_output_____"
],
[
"Note that unlike the biases, the weights are nonzero. \nThis is because unlike biases, \nweights are initialized randomly. \nIn addition to `getArray()`, each `Parameter`\nalso provides a `requireGradient()` method which\nreturns whether the parameter needs gradients to be computed\n(which we set on the `NDArray` with `attachGradient()`).\nThe gradient has the same shape as the weight. \nTo actually access the gradient, we simply call `getGradient()` on the\n`NDArray`.\nBecause we have not invoked backpropagation \nfor this network yet, its values are all 0.\nWe would invoke it by creating a `GradientCollector` instance and\nrun our calculations inside it.",
"_____no_output_____"
]
],
[
[
"dense0Weight.getGradient();",
"_____no_output_____"
]
],
[
[
"### Collecting Parameters from Nested Blocks\n\nLet us see how the parameter naming conventions work \nif we nest multiple blocks inside each other. \nFor that we first define a function that produces Blocks \n(a Block factory, so to speak) and then \ncombine these inside yet larger Blocks.",
"_____no_output_____"
]
],
[
[
"public SequentialBlock block1() {\n SequentialBlock net = new SequentialBlock();\n net.add(Linear.builder().setUnits(32).build());\n net.add(Activation.reluBlock());\n net.add(Linear.builder().setUnits(16).build());\n net.add(Activation.reluBlock());\n return net;\n}\n\npublic SequentialBlock block2() {\n SequentialBlock net = new SequentialBlock();\n for (int i = 0; i < 4; i++) {\n net.add(block1());\n }\n return net;\n}\n\nSequentialBlock rgnet = new SequentialBlock();\nrgnet.add(block2());\nrgnet.add(Linear.builder().setUnits(10).build());\nrgnet.setInitializer(new NormalInitializer());\nrgnet.initialize(manager, DataType.FLOAT32, x.getShape());\n\nModel model = Model.newInstance(\"rgnet\");\nmodel.setBlock(rgnet);\n\nPredictor<NDList, NDList> predictor = model.newPredictor(new NoopTranslator());\n\npredictor.predict(new NDList(x)).singletonOrThrow();",
"_____no_output_____"
]
],
[
[
"Now that we have designed the network, \nlet us see how it is organized.\nWe can get the list of named parameters by calling `getParameters()`.\nHowever, we not only want to see the parameters, but also how\nour network is structured.\nTo see our network architecture, we can simply print out the block whose architecture we want to see.",
"_____no_output_____"
]
],
[
[
"/* Network Architecture for RgNet */\nrgnet",
"_____no_output_____"
],
[
"/* Parameters for RgNet */\nfor (var param : rgnet.getParameters()) {\n System.out.println(param.getValue().getArray());\n}",
"_____no_output_____"
]
],
[
[
"Since the layers are hierarchically nested,\nwe can also access them by calling their `getChildren()` method\nto get a `BlockList`(also an extension of `PairList`) of their inner blocks.\nIt shares methods with `ParameterList` and as such we can use their\nfamiliar structure to access the blocks. We can call `get(i)` to get the\n`Pair<String, Block>` at the index `i` we want, and then finally `getValue()` to get the actual\nblock. We can do this in one step as shown above with `valueAt(i)`. Then we have to repeat that to get that blocks child and so on.\n\nHere, we access the first major block, \nwithin it the second subblock, \nand within that the bias of the first layer,\nwith as follows:",
"_____no_output_____"
]
],
[
[
"Block majorBlock1 = rgnet.getChildren().get(0).getValue();\nBlock subBlock2 = majorBlock1.getChildren().valueAt(1);\nBlock linearLayer1 = subBlock2.getChildren().valueAt(0);\nNDArray bias = linearLayer1.getParameters().valueAt(1).getArray();\nbias",
"_____no_output_____"
]
],
[
[
"## Parameter Initialization\n\nNow that we know how to access the parameters,\nlet us look at how to initialize them properly.\nWe discussed the need for initialization in :numref:`sec_numerical_stability`. \nBy default, DJL initializes weight matrices\nbased on your set initializer \nand the bias parameters are all set to $0$.\nHowever, we will often want to initialize our weights\naccording to various other protocols. \nDJL's `ai.djl.training.initializer` package provides a variety \nof preset initialization methods.\nIf we want to create a custom initializer,\nwe need to do some extra work.\n\n### Built-in Initialization\n\nIn DJL, when setting the initializer for blocks, the default `setInitializer()` function does not overwrite\nany previous set initializers. So if you set an initializer earlier, but decide you want to change your initializer and call `setInitializer()` again, the second `setInitializer()` will NOT overwrite your first one.\n\nAdditionally, when you call `setInitializer()` on a block, all internal blocks will also call `setInitializer()` with the same given `initializer`.\n\nThis means that we can call `setInitializer()` on the highest level of a block and know that all internal blocks that do not have an initializer already set will be set to that given `initializer`.\n\nThis setup has the advantage that we don't have to worry about our `setInitializer()` overriding our previous `initializer`s on internal blocks!\n\nIf you want to however, you can explicitly set an initializer for a `Parameter` by calling its `setInitializer()` function directly and passing in `true` to the overwrite input.\nSimply loop over all the parameters returned from `getParameters()` and set their initializers directly!",
"_____no_output_____"
],
[
"Let us begin by calling on built-in initializers. \nThe code below initializes all parameters \nto a given constant value 1, \nby using the `ConstantInitializer()` initializer. \n\nNote that this will not do anything currently since we have already set\nour initializer in the previous code block.\nWe can verify this by checking the weight of a parameter.",
"_____no_output_____"
]
],
[
[
"net.setInitializer(new ConstantInitializer(1));\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\nBlock linearLayer = net.getChildren().get(0).getValue();\nNDArray weight = linearLayer.getParameters().get(0).getValue().getArray();\nweight",
"_____no_output_____"
]
],
[
[
"We can see these initializations however if we create a new network.\nLet us write a function to create these network architectures for us\nconveniently.",
"_____no_output_____"
]
],
[
[
"public SequentialBlock getNet() {\n SequentialBlock net = new SequentialBlock();\n net.add(Linear.builder().setUnits(8).build());\n net.add(Activation.reluBlock());\n net.add(Linear.builder().setUnits(1).build());\n return net;\n}",
"_____no_output_____"
]
],
[
[
"If we run our previous initializer on this new net and check a parameter, we'll\nsee that everything is initialized properly! (to 7777!)",
"_____no_output_____"
]
],
[
[
"SequentialBlock net = getNet();\nnet.setInitializer(new ConstantInitializer(7777));\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\nBlock linearLayer = net.getChildren().valueAt(0);\nNDArray weight = linearLayer.getParameters().valueAt(0).getArray();\nweight",
"_____no_output_____"
]
],
[
[
"We can also initialize all parameters \nas Gaussian random variables \nwith standard deviation $.01$.",
"_____no_output_____"
]
],
[
[
"SequentialBlock net = getNet();\nnet.setInitializer(new NormalInitializer());\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\nBlock linearLayer = net.getChildren().valueAt(0);\nNDArray weight = linearLayer.getParameters().valueAt(0).getArray();\nweight",
"_____no_output_____"
]
],
[
[
"We can also apply different initializers for certain Blocks.\nFor example, below we initialize the first layer\nwith the `Xavier` initializer\nand initialize the second layer \nto a constant value of 0.\n\nWe will do this without the `getNet()` function as it will be easier\nto have the reference to each block we want to set.\n",
"_____no_output_____"
]
],
[
[
"SequentialBlock net = new SequentialBlock();\nLinear linear1 = Linear.builder().setUnits(8).build();\nnet.add(linear1);\nnet.add(Activation.reluBlock());\nLinear linear2 = Linear.builder().setUnits(1).build();\nnet.add(linear2);\n\nlinear1.setInitializer(new XavierInitializer());\nlinear1.initialize(manager, DataType.FLOAT32, x.getShape());\n\nlinear2.setInitializer(Initializer.ZEROS);\nlinear2.initialize(manager, DataType.FLOAT32, x.getShape());\n\nSystem.out.println(linear1.getParameters().valueAt(0).getArray());\nSystem.out.println(linear2.getParameters().valueAt(0).getArray());",
"_____no_output_____"
]
],
[
[
"Finally, we can loop over the `ParameterList` and set their initializers individually.\nWhen setting initializers directly on the `Parameter`, you must pass in an `overwrite`\nboolean along with the initializer to declare whether you want your current\ninitializer to overwrite the previous initializer if one has already been set.\nHere, we do want to overwrite and so pass in `true`. \n\nFor this example, however, since we haven't set the `weight` initializers before, there is no initializer to overwrite so we could pass in `false` and still have the same outcome.\n\nHowever, since `bias` parameters are automatically set to initialize at 0, to properly set our intializer here, we have to set overwrite to `true`.",
"_____no_output_____"
]
],
[
[
"SequentialBlock net = getNet();\nParameterList params = net.getParameters();\nfor (int i = 0; i < params.size(); i++) {\n // Here we interleave initializers.\n // We initialize parameters at even indexes to 0\n // and parameters at odd indexes to 2.\n Parameter param = params.valueAt(i);\n if (i % 2 == 0) {\n // All weight parameters happen to be at even indices.\n // We set them to initialize to 0.\n // There is no need to overwrite \n // since no initializer has been set for them previously.\n param.setInitializer(new ConstantInitializer(0), false);\n }\n else {\n // All bias parameters happen to be at odd indices.\n // We set them to initialize to 2.\n // To set the initializer here properly, we must pass in true\n // for overwrite\n // since bias parameters automatically have their\n // initializer set to 0.\n param.setInitializer(new ConstantInitializer(2), true);\n }\n}\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\n\nfor (var param : net.getParameters()) {\n System.out.println(param.getKey());\n System.out.println(param.getValue().getArray());\n}",
"_____no_output_____"
]
],
[
[
"### Custom Initialization\n\nSometimes, the initialization methods we need \nare not standard in DJL. \nIn these cases, we can define a class to implement the `Initializer` interface. \nWe only have to implement the `initialize()` function,\nwhich takes an `NDManager`, a `Shape`, and the `DataType`. \nWe then create the `NDArray` with the aforementioned `Shape` and `DataType`\nand initialize it to what we want! You can also design your\ninitializer to take in some parameters. Simply declare them\nas fields in the class and pass them in as inputs to the constructor!\nIn the example below, we define an initializer\nfor the following strange distribution:\n\n$$\n\\begin{aligned}\n w \\sim \\begin{cases}\n U[5, 10] & \\text{ with probability } \\frac{1}{4} \\\\\n 0 & \\text{ with probability } \\frac{1}{2} \\\\\n U[-10, -5] & \\text{ with probability } \\frac{1}{4}\n \\end{cases}\n\\end{aligned}\n$$",
"_____no_output_____"
]
],
[
[
"class MyInit implements Initializer {\n\n public MyInit() {}\n\n @Override\n public NDArray initialize(NDManager manager, Shape shape, DataType dataType) {\n System.out.printf(\"Init %s\\n\", shape.toString());\n // Here we generate data points \n // from a uniform distribution [-10, 10]\n NDArray data = manager.randomUniform(-10, 10, shape, dataType);\n // We keep the data points whose absolute value is >= 5\n // and set the others to 0.\n // This generates the distribution `w` shown above.\n NDArray absGte5 = data.abs().gte(5); // returns boolean NDArray where \n // true indicates abs >= 5 and\n // false otherwise\n return data.mul(absGte5); // keeps true indices and sets false indices to 0.\n // special operation when multiplying a numerical\n // NDArray with a boolean NDArray\n }\n\n}",
"_____no_output_____"
],
[
"SequentialBlock net = getNet();\nnet.setInitializer(new MyInit());\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\nBlock linearLayer = net.getChildren().valueAt(0);\nNDArray weight = linearLayer.getParameters().valueAt(0).getArray();\nweight",
"_____no_output_____"
]
],
[
[
"Note that we always have the option \nof setting parameters directly by calling `getValue().getArray()` \nto access the underlying `NDArray`. \nA note for advanced users: \nyou cannot directly modify parameters within a `GarbageCollector` scope.\nYou must modify them outside the `GarbageCollector` scope to avoid confusing \nthe automatic differentiation mechanics.",
"_____no_output_____"
]
],
[
[
"// '__'i() is an inplace operation to modify the original NDArray\nNDArray weightLayer = net.getChildren().valueAt(0)\n .getParameters().valueAt(0).getArray();\nweightLayer.addi(7);\nweightLayer.divi(9);\nweightLayer.set(new NDIndex(0, 0), 2020); // set the (0, 0) index to 2020\nweightLayer;",
"_____no_output_____"
]
],
[
[
"## Tied Parameters\n\nOften, we want to share parameters across multiple layers.\nLater we will see that when learning word embeddings,\nit might be sensible to use the same parameters\nboth for encoding and decoding words. \nWe discussed one such case when we introduced :numref:`sec_model_construction`. \nLet us see how to do this a bit more elegantly. \nIn the following we allocate a dense layer \nand then use its parameters specifically \nto set those of another layer.",
"_____no_output_____"
]
],
[
[
"SequentialBlock net = new SequentialBlock();\n\n// We need to give the shared layer a name \n// such that we can reference its parameters\nBlock shared = Linear.builder().setUnits(8).build();\nSequentialBlock sharedRelu = new SequentialBlock();\nsharedRelu.add(shared);\nsharedRelu.add(Activation.reluBlock());\n\nnet.add(Linear.builder().setUnits(8).build());\nnet.add(Activation.reluBlock());\nnet.add(sharedRelu);\nnet.add(sharedRelu);\nnet.add(Linear.builder().setUnits(10).build());\n\nNDArray x = manager.randomUniform(-10f, 10f, new Shape(2, 20), DataType.FLOAT32);\n\nnet.setInitializer(new NormalInitializer());\nnet.initialize(manager, DataType.FLOAT32, x.getShape());\n\nmodel.setBlock(net);\n\nPredictor<NDList, NDList> predictor = model.newPredictor(new NoopTranslator());\nSystem.out.println(predictor.predict(new NDList(x)).singletonOrThrow());\n\n// Check that the parameters are the same\nNDArray shared1 = net.getChildren().valueAt(2)\n .getParameters().valueAt(0).getArray();\nNDArray shared2 = net.getChildren().valueAt(3)\n .getParameters().valueAt(0).getArray();\nshared1.eq(shared2);",
"_____no_output_____"
]
],
[
[
"This example shows that the parameters \nof the second and third layer are tied. \nThey are not just equal, they are \nrepresented by the same exact `NDArray`. \nThus, if we change one of the parameters,\nthe other one changes, too. \nYou might wonder, \n*when parameters are tied\nwhat happens to the gradients?*\nSince the model parameters contain gradients,\nthe gradients of the second hidden layer\nand the third hidden layer are added together\nin `shared.getGradient()` during backpropagation.\n\n## Summary\n\n* We have several ways to access, initialize, and tie model parameters.\n* We can use custom initialization.\n* DJL has a sophisticated mechanism for accessing parameters in a unique and hierarchical manner.\n\n\n## Exercises\n\n1. Use the FancyMLP defined in :numref:`sec_model_construction` and access the parameters of the various layers.\n1. Look at the [DJL documentation](https://javadoc.io/doc/ai.djl/api/latest/ai/djl/training/initializer/Initializer.html) and explore different initializers.\n1. Try accessing the model parameters after `net.initialize()` and before `predictor.predict(x)` to observe the shape of the model parameters. What changes? Why?\n1. Construct a multilayer perceptron containing a shared parameter layer and train it. During the training process, observe the model parameters and gradients of each layer.\n1. Why is sharing parameters a good idea?",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
d09b75aa2323c1b83721dda69174d251f40e6fd3 | 1,291 | ipynb | Jupyter Notebook | Tree/101. Symmetric Tree.ipynb | kkoo1122/Leetcode_Practice | f9500d561d4747dc0df3472bbc5e21b51431cac8 | [
"BSD-2-Clause"
] | null | null | null | Tree/101. Symmetric Tree.ipynb | kkoo1122/Leetcode_Practice | f9500d561d4747dc0df3472bbc5e21b51431cac8 | [
"BSD-2-Clause"
] | null | null | null | Tree/101. Symmetric Tree.ipynb | kkoo1122/Leetcode_Practice | f9500d561d4747dc0df3472bbc5e21b51431cac8 | [
"BSD-2-Clause"
] | null | null | null | 22.258621 | 65 | 0.486445 | [
[
[
"def isSymmetric(self, root):\n \"\"\"\n :type root: TreeNode\n :rtype: bool\n \"\"\"\n if not root: \n return True\n else:\n return self.mirror(root.right,root.left)\n \ndef mirror(self,right,left):\n\n if not right and not left: return True\n\n if not right or not left: return False\n\n if right.val == left.val:\n outer=self.mirror(right.right,left.left)\n inner=self.mirror(right.left,left.right) \n return outer and inner\n else:\n return False",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code"
]
] |
d09b7e8031a67a8fef7530ac1bf28505acb62960 | 75,449 | ipynb | Jupyter Notebook | Untitled.ipynb | subhadip2038/Python | ac17efabdb8d68dfa6dde4f7c4d6bcfcfd350aaf | [
"BSD-4-Clause-UC"
] | null | null | null | Untitled.ipynb | subhadip2038/Python | ac17efabdb8d68dfa6dde4f7c4d6bcfcfd350aaf | [
"BSD-4-Clause-UC"
] | null | null | null | Untitled.ipynb | subhadip2038/Python | ac17efabdb8d68dfa6dde4f7c4d6bcfcfd350aaf | [
"BSD-4-Clause-UC"
] | null | null | null | 31.21597 | 1,676 | 0.362497 | [
[
[
"# Pandas for Data Science",
"_____no_output_____"
]
],
[
[
"import pandas as pd\ns = pd.Series([3,-5,7,4], index = ['a','b','c','d'])",
"_____no_output_____"
]
],
[
[
"# Calling a single dimentional Array\n",
"_____no_output_____"
]
],
[
[
"s",
"_____no_output_____"
]
],
[
[
"# Create a two dimentional array",
"_____no_output_____"
]
],
[
[
"data = {'Country': ['Belgium','India','Brazil'],'Capital':['Brussels','New Delhi','Brasilia'],\n 'Population': [11190846,1303171035,207847528]}\ndf= pd.DataFrame(data, columns = ['Country','Capital','Population'])",
"_____no_output_____"
],
[
"df",
"_____no_output_____"
]
],
[
[
"# Selection of values",
"_____no_output_____"
]
],
[
[
"s['b']",
"_____no_output_____"
],
[
"df[1:]",
"_____no_output_____"
],
[
"df[0:2]",
"_____no_output_____"
],
[
"df[:]",
"_____no_output_____"
],
[
"df[2:2]",
"_____no_output_____"
],
[
"df[0:2]",
"_____no_output_____"
],
[
"df.iloc[[0],[0]]",
"_____no_output_____"
],
[
"df.loc[[0],['Country']]",
"_____no_output_____"
],
[
"df.ix[2, 'Capital']",
"c:\\users\\subha\\appdata\\local\\programs\\python\\python37-32\\lib\\site-packages\\ipykernel_launcher.py:1: FutureWarning: \n.ix is deprecated. Please use\n.loc for label based indexing or\n.iloc for positional indexing\n\nSee the documentation here:\nhttp://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#ix-indexer-is-deprecated\n \"\"\"Entry point for launching an IPython kernel.\n"
],
[
"df['Country']",
"_____no_output_____"
],
[
"df.loc[[0],['Country']]",
"_____no_output_____"
],
[
"df.ix[[2],['Capital']]",
"c:\\users\\subha\\appdata\\local\\programs\\python\\python37-32\\lib\\site-packages\\ipykernel_launcher.py:1: FutureWarning: \n.ix is deprecated. Please use\n.loc for label based indexing or\n.iloc for positional indexing\n\nSee the documentation here:\nhttp://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#ix-indexer-is-deprecated\n \"\"\"Entry point for launching an IPython kernel.\n"
],
[
"df.Country",
"_____no_output_____"
],
[
"df.Capital",
"_____no_output_____"
],
[
"df.Population",
"_____no_output_____"
],
[
"df.Country",
"_____no_output_____"
],
[
"df[:2]",
"_____no_output_____"
],
[
"df[1:2]",
"_____no_output_____"
],
[
"df[2:3]",
"_____no_output_____"
],
[
"df[:3]",
"_____no_output_____"
],
[
"df[2:]",
"_____no_output_____"
],
[
"df[1:]",
"_____no_output_____"
],
[
"import pandas as pd\nenergy = pd.read_excel('Energy Indicators.xls', header= 16,usecols=\"C:F\")",
"_____no_output_____"
],
[
"energy.head()",
"_____no_output_____"
],
[
"energy.tail(50)",
"_____no_output_____"
],
[
"new_name = {'Unnamed: 2' :'Country',\n 'Energy Supply':'Energy Supply',\n 'Energy Supply per capita':'Energy Supply per capita',\n 'Renewable Electricity Production':'% Renewable'}\n",
"_____no_output_____"
],
[
"energy.rename(columns= new_name, inplace=True)",
"_____no_output_____"
],
[
"energy.tail()",
"_____no_output_____"
],
[
"import pandas as pd\nenergy = pd.read_excel('Energy Indicators.xls', header= 16,usecols=\"C:F\")\nnew_name = {'Unnamed: 2' :'Country',\n 'Energy Supply':'Energy Supply',\n 'Energy Supply per capita':'Energy Supply per capita',\n 'Renewable Electricity Production':'% Renewable'}\nenergy.rename(columns= new_name, inplace=True)\nenergy",
"_____no_output_____"
],
[
"energy = energy.drop(energy, axis = 0)",
"_____no_output_____"
],
[
"energy = energy.drop(energy.index[0], axis = 0)\nenergy",
"_____no_output_____"
],
[
"\n",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09b8b896e6ebb23f1374d066032de72bd4e0d9b | 58,798 | ipynb | Jupyter Notebook | jupyter/Chapter07/frustum.ipynb | mberkanbicer/software | 89f8004f567129216b92c156bbed658a9c03745a | [
"Apache-2.0"
] | null | null | null | jupyter/Chapter07/frustum.ipynb | mberkanbicer/software | 89f8004f567129216b92c156bbed658a9c03745a | [
"Apache-2.0"
] | null | null | null | jupyter/Chapter07/frustum.ipynb | mberkanbicer/software | 89f8004f567129216b92c156bbed658a9c03745a | [
"Apache-2.0"
] | null | null | null | 292.527363 | 54,284 | 0.933212 | [
[
[
"# ***Introduction to Radar Using Python and MATLAB***\n## Andy Harrison - Copyright (C) 2019 Artech House\n<br/>\n\n# Frustum Radar Cross Section\n***",
"_____no_output_____"
],
[
"Referring to Section 7.4.1.7, the frustum geometry is shown in Figure 7.13. An approximation for the radar cross section due to linearly polarized incident energy is given by (Equation 7.60)\n\n$$\n \\sigma = \\frac{b\\, \\lambda}{8\\pi\\sin\\theta_i} \\tan^2(\\theta_i - \\alpha) \\hspace{0.5in} \\text{(m}^2\\text{)},\n$$\n\nWhen the incident angle is normal to the side of the frustum, $\\theta_i = 90 + \\alpha$, an approximation for the radar cross section is (Equation 7.61)\n\n$$\n \\sigma = \\frac{8\\pi\\big(z_2^{1.5} - z_1^{1.5} \\big)^2 \\sin\\alpha}{9\\lambda\\cos^4\\alpha} \\hspace{0.5in} \\text{(m}^2\\text{)}.\n$$\n\nWhen the incident angle is either $0^o$ or $180^o$, the radar cross section is approximated by a flat circular plate, as given in Table 7.1.\n***",
"_____no_output_____"
],
[
"Begin by getting the library path",
"_____no_output_____"
]
],
[
[
"import lib_path",
"_____no_output_____"
]
],
[
[
"Set the operating frequency (Hz), the nose radius (m), the base radisu (m) and the length (m)",
"_____no_output_____"
]
],
[
[
"frequency = 1e9\n\nnose_radius = 0.1\n\nbase_radius = 0.8\n\nlength = 3.0",
"_____no_output_____"
]
],
[
[
"Set the incident angles using the `linspace` routine from `scipy`",
"_____no_output_____"
]
],
[
[
"from numpy import linspace\n\nfrom scipy.constants import pi\n\nincident_angle = linspace(0, pi, 1801)",
"_____no_output_____"
]
],
[
[
"Calculate the radar cross section (m^2) for the frustum",
"_____no_output_____"
]
],
[
[
"from Libs.rcs.frustum import radar_cross_section\n\nfrom numpy import array\n\n\nrcs = array([radar_cross_section(frequency, nose_radius, base_radius, length, ia) for ia in incident_angle])",
"_____no_output_____"
],
[
"from matplotlib import pyplot as plt\n\nfrom numpy import log10, degrees\n\n\n# Set the figure size\n\nplt.rcParams[\"figure.figsize\"] = (15, 10)\n\n\n\n# Display the results\n\nplt.plot(degrees(incident_angle), 10 * log10(rcs), '')\n\n\n\n# Set the plot title and labels\n\nplt.title('RCS vs Incident Angle', size=14)\n\nplt.ylabel('RCS (dBsm)', size=12)\n\nplt.xlabel('Incident Angle (deg)', size=12)\n\n\n\n# Set the tick label size\n\nplt.tick_params(labelsize=12)\n\n\n\n# Turn on the grid\n\nplt.grid(linestyle=':', linewidth=0.5)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09b9afe64a0b7217ce084c90171e9281d00d0ac | 3,325 | ipynb | Jupyter Notebook | Advent_of_Code_1.ipynb | wustudent/advent-of-code-2019 | 218f9a99d346aa41553650768fe190fd12992f42 | [
"MIT"
] | 1 | 2019-12-08T13:53:58.000Z | 2019-12-08T13:53:58.000Z | Advent_of_Code_1.ipynb | wustudent/advent-of-code-2019 | 218f9a99d346aa41553650768fe190fd12992f42 | [
"MIT"
] | null | null | null | Advent_of_Code_1.ipynb | wustudent/advent-of-code-2019 | 218f9a99d346aa41553650768fe190fd12992f42 | [
"MIT"
] | null | null | null | 28.663793 | 666 | 0.498947 | [
[
[
"<a href=\"https://colab.research.google.com/github/wustudent/advent-of-code-2019/blob/master/Advent_of_Code_1.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
],
[
"# Advent of Code Day 1:\n\nsee [here](https://adventofcode.com/2019/day/1) for detail\n\n## Part 1:",
"_____no_output_____"
]
],
[
[
"input=[56123,145192,123702,66722,148748,53337,147279,126828,118438,54030,145839,87751,58832,90085,113196,104802,61235,136935,108620,60795,107908,123023,142399,131074,123411,122653,84776,100891,78816,62762,92077,91428,56831,65122,94694,78668,112506,73406,118239,57897,59200,54437,55185,102667,86076,80655,83406,141502,67171,88472,149260,68395,56828,108798,125682,68203,118263,101824,94853,68536,95646,120283,135355,82701,92243,122282,55760,129959,142814,56599,70836,69996,85262,126648,69043,67460,119934,82453,147012,72957,53374,97577,59696,121630,122666,116591,145967,75699,85963,140970,75612,78792,100795,92034,132569,117172,134179,109504,103707,54664]\n\ndef calcFuel(x):\n return x//3-2\n\nprint(sum(map(calcFuel,input)))",
"3223398\n"
]
],
[
[
"## Part 2:",
"_____no_output_____"
]
],
[
[
"def calcExtra(x):\n sum = 0\n t = x\n while t>0:\n t = calcFuel(t)\n if t>0:\n sum+=t\n return sum\n\nprint(sum(map(calcExtra,input)))",
"4832253\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09ba22e11762b4f211b5cfb92c028865b0b4e0b | 506,439 | ipynb | Jupyter Notebook | notebooks/annotation_poc.ipynb | ydty/concreate_inspection_app | 86f022f2185c71f88c6d0c19439f2f92a94767d1 | [
"MIT"
] | null | null | null | notebooks/annotation_poc.ipynb | ydty/concreate_inspection_app | 86f022f2185c71f88c6d0c19439f2f92a94767d1 | [
"MIT"
] | null | null | null | notebooks/annotation_poc.ipynb | ydty/concreate_inspection_app | 86f022f2185c71f88c6d0c19439f2f92a94767d1 | [
"MIT"
] | null | null | null | 100.623684 | 105,514 | 0.705811 | [
[
[
"from tensorflow.python.client import device_lib\ndevice_lib.list_local_devices()",
"_____no_output_____"
],
[
"import segmentation_models as sm\nimport tensorflow as tf\nfrom pycocotools.coco import COCO\nfrom pathlib import Path\nimport numpy as np\nfrom typing import Final\nimport plotly.express as px\nfrom matplotlib import pyplot as plt\nimport cv2\nfrom albumentations import Compose, VerticalFlip, HorizontalFlip, ShiftScaleRotate, RandomSizedCrop\n",
"Segmentation Models: using `tf.keras` framework.\n"
],
[
"image_size:Final[int] = 256",
"_____no_output_____"
],
[
"annotation_file = Path(\"data/annotations/instances_default.json\")\ncoco = COCO(str(annotation_file))\n\nfor i in coco.getCatIds():\n if coco.loadCats(i)[0][\"name\"] == \"Crack\":\n cat_id = coco.loadCats(i)[0][\"id\"]",
"loading annotations into memory...\nDone (t=0.01s)\ncreating index...\nindex created!\n"
],
[
"def create_mask(anns, cat_id):\n mask = np.zeros((image_size, image_size, 1))\n for i in range(len(anns)):\n if anns[i][\"category_id\"] == cat_id:\n cv2.fillPoly(mask, pts=[np.array(anns[i][\"segmentation\"], dtype=np.int32).reshape(-1,1, 2)], color=(1))\n return mask\n\ndef read_data(idx, cat_id):\n anns = coco.loadAnns(coco.getAnnIds(idx))\n mask = create_mask(anns, cat_id)\n image = np.array(tf.keras.preprocessing.image.load_img(str(Path(\"data/images/annotated\") / coco.loadImgs(idx)[0][\"file_name\"]))) / 255\n return image, mask",
"_____no_output_____"
],
[
"def file_load_generator(cat_id, coco, split=15, is_train=True):\n transform = Compose([VerticalFlip(), HorizontalFlip(), ShiftScaleRotate()])\n ids = coco.getImgIds()[:split] if is_train else coco.getImgIds()[split:]\n for idx in ids:\n image, mask = read_data(idx, cat_id)\n ret = transform(image=image, mask=mask)\n yield ret[\"image\"], ret[\"mask\"]\n\nds = tf.data.Dataset.from_generator(lambda: file_load_generator(cat_id, coco), (tf.float32, tf.float32)).batch(5)\nfor image, label in ds:\n img = image[0]\n lbl = label[0]",
"_____no_output_____"
],
[
"image, label = read_data(15, cat_id)",
"_____no_output_____"
],
[
"fig = plt.figure()\nax1 = fig.add_subplot(1, 2, 1)\nax1.set_title(\"image\",fontsize=20)\nplt.imshow(image)\nax2 = fig.add_subplot(1, 2, 2)\nax2.set_title(\"label\",fontsize=20)\nplt.imshow(label)",
"_____no_output_____"
],
[
"def non_clack_generator(split=180, is_train=True):\n transform = Compose([VerticalFlip(), HorizontalFlip(), ShiftScaleRotate()])\n files = [f for f in Path(\"data/images/non_clack\").glob(\"*\")]\n files = files[:split] if is_train else files[split:]\n for file in files:\n image = np.expand_dims(tf.keras.preprocessing.image.load_img(file),axis=0)\n ret = transform(image=image)\n yield ret[\"image\"], ret[\"image\"]",
"_____no_output_____"
],
[
"train_ds = tf.data.Dataset.from_generator(lambda: file_load_generator(cat_id, coco), (tf.float32, tf.float32)).batch(5)\ntest_ds = tf.data.Dataset.from_generator(lambda: file_load_generator(cat_id, coco, is_train=False), (tf.float32, tf.float32))\n\nnon_clack_images = [np.expand_dims(tf.keras.preprocessing.image.load_img(p),axis=0) for p in Path(\"data/images/non_clack\").glob(\"*\")]\nnon_clack_images = np.concatenate(non_clack_images) / 255\n\nrecon_train_ds = tf.data.Dataset.from_tensor_slices((non_clack_images[:180,:,:,:], non_clack_images[:180,:,:,:])).batch(8)\nrecon_test_ds = tf.data.Dataset.from_tensor_slices((non_clack_images[180:,:,:,:], non_clack_images[180:,:,:,:]))\n\n#recon_train_ds = tf.data.Dataset.from_generator(lambda: non_clack_generator(), (tf.float32, tf.float32)).batch(8)\n#recon_test_ds = tf.data.Dataset.from_generator(lambda: non_clack_generator(is_train=False), (tf.float32, tf.float32))",
"_____no_output_____"
],
[
"try:\n segment_model = tf.keras.models.load_model(\"segmentation.hf5\")\nexcept:\n segment_model = sm.Unet(input_shape=(256, 256, 3))\n\nclass ClackAnnotNet(tf.keras.Model):\n def __init__(self):\n super(ClackAnnotNet, self).__init__()\n self.model = sm.Unet(input_shape=(256, 256, 3), classes=3)\n self.model = tf.keras.Model(inputs=self.model.input, outputs=self.model.get_layer(\"final_conv\").output)\n self.model = tf.keras.Sequential([self.model, tf.keras.layers.Activation(\"sigmoid\", name=\"Sigmoid\")])\n def call(self, inputs):\n return self.model(inputs)\n\ntry:\n model = tf.keras.models.load_model(\"reconstruct.hf5\")\nexcept:\n model = ClackAnnotNet()",
"_____no_output_____"
]
],
[
[
"## 再構成モデル",
"_____no_output_____"
]
],
[
[
"callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=100)\n\nmodel.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-4), loss=\"mse\")\nmodel.fit(recon_train_ds, epochs=1000, validation_data=recon_test_ds, callbacks=[callback])",
"Epoch 1/1000\n23/23 [==============================] - 19s 758ms/step - loss: 2.6931e-04 - val_loss: 0.0014\nEpoch 2/1000\n23/23 [==============================] - 16s 692ms/step - loss: 3.0463e-04 - val_loss: 0.0013\nEpoch 3/1000\n23/23 [==============================] - 16s 684ms/step - loss: 2.7788e-04 - val_loss: 4.3864e-04\nEpoch 4/1000\n23/23 [==============================] - 15s 667ms/step - loss: 2.6806e-04 - val_loss: 3.5493e-04\nEpoch 5/1000\n23/23 [==============================] - 15s 667ms/step - loss: 2.6256e-04 - val_loss: 2.2297e-04\nEpoch 6/1000\n23/23 [==============================] - 16s 710ms/step - loss: 2.5900e-04 - val_loss: 3.0397e-04\nEpoch 7/1000\n23/23 [==============================] - 16s 696ms/step - loss: 2.6216e-04 - val_loss: 7.5065e-04\nEpoch 8/1000\n23/23 [==============================] - 16s 686ms/step - loss: 2.6271e-04 - val_loss: 2.6105e-04\nEpoch 9/1000\n23/23 [==============================] - 15s 662ms/step - loss: 2.7049e-04 - val_loss: 4.4475e-04\nEpoch 10/1000\n23/23 [==============================] - 15s 665ms/step - loss: 3.6309e-04 - val_loss: 0.0033\nEpoch 11/1000\n23/23 [==============================] - 15s 661ms/step - loss: 4.0707e-04 - val_loss: 0.0044\nEpoch 12/1000\n23/23 [==============================] - 15s 664ms/step - loss: 3.3436e-04 - val_loss: 0.0015\nEpoch 13/1000\n23/23 [==============================] - 15s 662ms/step - loss: 3.2791e-04 - val_loss: 7.9584e-04\nEpoch 14/1000\n23/23 [==============================] - 15s 660ms/step - loss: 3.0325e-04 - val_loss: 0.0011\nEpoch 15/1000\n23/23 [==============================] - 15s 663ms/step - loss: 2.6937e-04 - val_loss: 0.0014\nEpoch 16/1000\n23/23 [==============================] - 15s 660ms/step - loss: 2.6344e-04 - val_loss: 7.5094e-04\nEpoch 17/1000\n23/23 [==============================] - 15s 660ms/step - loss: 2.5912e-04 - val_loss: 2.7722e-04\nEpoch 18/1000\n23/23 [==============================] - 15s 661ms/step - loss: 2.5858e-04 - val_loss: 8.7809e-04\nEpoch 19/1000\n23/23 [==============================] - 15s 661ms/step - loss: 2.7711e-04 - val_loss: 2.1553e-04\nEpoch 20/1000\n23/23 [==============================] - 15s 661ms/step - loss: 2.6730e-04 - val_loss: 2.6714e-04\nEpoch 21/1000\n23/23 [==============================] - 15s 662ms/step - loss: 2.6007e-04 - val_loss: 4.6356e-04\nEpoch 22/1000\n23/23 [==============================] - 15s 661ms/step - loss: 2.4658e-04 - val_loss: 0.0015\nEpoch 23/1000\n23/23 [==============================] - 15s 660ms/step - loss: 2.4077e-04 - val_loss: 0.0012\nEpoch 24/1000\n23/23 [==============================] - 15s 664ms/step - loss: 2.3507e-04 - val_loss: 0.0015\nEpoch 25/1000\n23/23 [==============================] - 17s 731ms/step - loss: 2.4945e-04 - val_loss: 2.9421e-04\nEpoch 26/1000\n23/23 [==============================] - 16s 684ms/step - loss: 2.4924e-04 - val_loss: 0.0013\nEpoch 27/1000\n23/23 [==============================] - 15s 670ms/step - loss: 2.4059e-04 - val_loss: 7.0216e-04\nEpoch 28/1000\n23/23 [==============================] - 16s 675ms/step - loss: 2.3116e-04 - val_loss: 3.3376e-04\nEpoch 29/1000\n23/23 [==============================] - 15s 635ms/step - loss: 2.3752e-04 - val_loss: 0.0029\nEpoch 30/1000\n23/23 [==============================] - 15s 646ms/step - loss: 2.2816e-04 - val_loss: 3.0590e-04\nEpoch 31/1000\n23/23 [==============================] - 14s 626ms/step - loss: 2.2568e-04 - val_loss: 2.8490e-04\nEpoch 32/1000\n23/23 [==============================] - 14s 624ms/step - loss: 2.1650e-04 - val_loss: 8.1723e-04\nEpoch 33/1000\n23/23 [==============================] - 15s 647ms/step - loss: 2.2072e-04 - val_loss: 0.0025\nEpoch 34/1000\n23/23 [==============================] - 14s 628ms/step - loss: 2.9488e-04 - val_loss: 2.3816e-04\nEpoch 35/1000\n23/23 [==============================] - 14s 622ms/step - loss: 2.9208e-04 - val_loss: 0.0030\nEpoch 36/1000\n23/23 [==============================] - 14s 629ms/step - loss: 2.4878e-04 - val_loss: 0.0036\nEpoch 37/1000\n23/23 [==============================] - 15s 633ms/step - loss: 2.9344e-04 - val_loss: 9.4073e-04\nEpoch 38/1000\n23/23 [==============================] - 15s 632ms/step - loss: 2.4745e-04 - val_loss: 0.0042\nEpoch 39/1000\n23/23 [==============================] - 15s 639ms/step - loss: 2.3642e-04 - val_loss: 9.5809e-04\nEpoch 40/1000\n23/23 [==============================] - 16s 687ms/step - loss: 2.3311e-04 - val_loss: 3.2295e-04\nEpoch 41/1000\n23/23 [==============================] - 16s 678ms/step - loss: 2.3596e-04 - val_loss: 2.5416e-04\nEpoch 42/1000\n23/23 [==============================] - 16s 701ms/step - loss: 2.3155e-04 - val_loss: 1.9505e-04\nEpoch 43/1000\n23/23 [==============================] - 16s 682ms/step - loss: 2.4357e-04 - val_loss: 5.2335e-04\nEpoch 44/1000\n23/23 [==============================] - 16s 679ms/step - loss: 2.5182e-04 - val_loss: 3.6590e-04\nEpoch 45/1000\n23/23 [==============================] - 15s 669ms/step - loss: 2.4585e-04 - val_loss: 9.1492e-04\nEpoch 46/1000\n23/23 [==============================] - 15s 665ms/step - loss: 2.2842e-04 - val_loss: 5.7687e-04\nEpoch 47/1000\n23/23 [==============================] - 15s 667ms/step - loss: 2.2323e-04 - val_loss: 1.5992e-04\nEpoch 48/1000\n23/23 [==============================] - 16s 676ms/step - loss: 2.3632e-04 - val_loss: 7.4080e-04\nEpoch 49/1000\n23/23 [==============================] - 16s 676ms/step - loss: 2.2493e-04 - val_loss: 0.0032\nEpoch 50/1000\n23/23 [==============================] - 15s 669ms/step - loss: 2.6978e-04 - val_loss: 4.4632e-04\nEpoch 51/1000\n23/23 [==============================] - 15s 668ms/step - loss: 2.2619e-04 - val_loss: 9.1764e-04\nEpoch 52/1000\n23/23 [==============================] - 15s 667ms/step - loss: 2.1252e-04 - val_loss: 2.3814e-04\nEpoch 53/1000\n23/23 [==============================] - 16s 683ms/step - loss: 2.0336e-04 - val_loss: 1.6774e-04\nEpoch 54/1000\n23/23 [==============================] - 15s 672ms/step - loss: 2.0439e-04 - val_loss: 4.5984e-04\nEpoch 55/1000\n23/23 [==============================] - 16s 675ms/step - loss: 2.0363e-04 - val_loss: 0.0013\nEpoch 56/1000\n23/23 [==============================] - 15s 670ms/step - loss: 2.2389e-04 - val_loss: 0.0065\nEpoch 57/1000\n23/23 [==============================] - 15s 671ms/step - loss: 2.7850e-04 - val_loss: 6.5166e-04\nEpoch 58/1000\n23/23 [==============================] - 15s 662ms/step - loss: 2.6746e-04 - val_loss: 0.0011\nEpoch 59/1000\n23/23 [==============================] - 15s 671ms/step - loss: 2.6016e-04 - val_loss: 3.5491e-04\nEpoch 60/1000\n23/23 [==============================] - 15s 671ms/step - loss: 2.0630e-04 - val_loss: 1.3370e-04\nEpoch 61/1000\n23/23 [==============================] - 15s 669ms/step - loss: 2.0109e-04 - val_loss: 2.0369e-04\nEpoch 62/1000\n23/23 [==============================] - 16s 675ms/step - loss: 2.0214e-04 - val_loss: 3.9300e-04\nEpoch 63/1000\n23/23 [==============================] - 16s 675ms/step - loss: 1.9187e-04 - val_loss: 2.6768e-04\nEpoch 64/1000\n23/23 [==============================] - 16s 676ms/step - loss: 2.0184e-04 - val_loss: 2.5621e-04\nEpoch 65/1000\n23/23 [==============================] - 15s 669ms/step - loss: 2.0221e-04 - val_loss: 2.5124e-04\nEpoch 66/1000\n23/23 [==============================] - 16s 678ms/step - loss: 1.8469e-04 - val_loss: 2.4853e-04\nEpoch 67/1000\n23/23 [==============================] - 16s 679ms/step - loss: 2.1092e-04 - val_loss: 2.8843e-04\nEpoch 68/1000\n23/23 [==============================] - 15s 671ms/step - loss: 1.9626e-04 - val_loss: 0.0019\nEpoch 69/1000\n23/23 [==============================] - 15s 668ms/step - loss: 1.9190e-04 - val_loss: 2.1657e-04\nEpoch 70/1000\n23/23 [==============================] - 15s 667ms/step - loss: 1.9456e-04 - val_loss: 2.4827e-04\nEpoch 71/1000\n23/23 [==============================] - 15s 673ms/step - loss: 1.8696e-04 - val_loss: 2.3935e-04\nEpoch 72/1000\n23/23 [==============================] - 16s 678ms/step - loss: 1.7884e-04 - val_loss: 4.1783e-04\nEpoch 73/1000\n23/23 [==============================] - 16s 675ms/step - loss: 1.7981e-04 - val_loss: 1.9446e-04\nEpoch 74/1000\n23/23 [==============================] - 15s 667ms/step - loss: 2.0818e-04 - val_loss: 3.9886e-04\nEpoch 75/1000\n23/23 [==============================] - 15s 667ms/step - loss: 1.9467e-04 - val_loss: 0.0014\nEpoch 76/1000\n23/23 [==============================] - 16s 694ms/step - loss: 2.1267e-04 - val_loss: 5.1296e-04\nEpoch 77/1000\n23/23 [==============================] - 16s 692ms/step - loss: 2.3400e-04 - val_loss: 6.7129e-04\nEpoch 78/1000\n23/23 [==============================] - 16s 699ms/step - loss: 2.2141e-04 - val_loss: 0.0010\nEpoch 79/1000\n23/23 [==============================] - 16s 709ms/step - loss: 2.0008e-04 - val_loss: 9.8638e-04\nEpoch 80/1000\n23/23 [==============================] - 16s 676ms/step - loss: 2.3342e-04 - val_loss: 1.6988e-04\nEpoch 81/1000\n23/23 [==============================] - 17s 727ms/step - loss: 1.9834e-04 - val_loss: 0.0025\nEpoch 82/1000\n23/23 [==============================] - 16s 692ms/step - loss: 2.0885e-04 - val_loss: 0.0023\nEpoch 83/1000\n23/23 [==============================] - 16s 694ms/step - loss: 2.0974e-04 - val_loss: 6.0048e-04\nEpoch 84/1000\n23/23 [==============================] - 15s 670ms/step - loss: 2.0607e-04 - val_loss: 0.0084\nEpoch 85/1000\n23/23 [==============================] - 16s 692ms/step - loss: 2.0870e-04 - val_loss: 0.0017\nEpoch 86/1000\n23/23 [==============================] - 16s 706ms/step - loss: 1.9577e-04 - val_loss: 3.6291e-04\nEpoch 87/1000\n23/23 [==============================] - 17s 723ms/step - loss: 1.7144e-04 - val_loss: 4.3733e-04\nEpoch 88/1000\n23/23 [==============================] - 16s 701ms/step - loss: 1.9855e-04 - val_loss: 4.0861e-04\nEpoch 89/1000\n23/23 [==============================] - 16s 702ms/step - loss: 1.9790e-04 - val_loss: 1.7028e-04\nEpoch 90/1000\n23/23 [==============================] - 16s 682ms/step - loss: 1.8026e-04 - val_loss: 9.2695e-04\nEpoch 91/1000\n23/23 [==============================] - 16s 701ms/step - loss: 1.7046e-04 - val_loss: 9.8231e-04\nEpoch 92/1000\n23/23 [==============================] - 18s 797ms/step - loss: 1.6572e-04 - val_loss: 6.0884e-04\nEpoch 93/1000\n23/23 [==============================] - 17s 753ms/step - loss: 1.5448e-04 - val_loss: 2.2141e-04\nEpoch 94/1000\n23/23 [==============================] - 15s 671ms/step - loss: 1.6892e-04 - val_loss: 4.5721e-04\nEpoch 95/1000\n23/23 [==============================] - 15s 671ms/step - loss: 1.7620e-04 - val_loss: 2.8149e-04\nEpoch 96/1000\n23/23 [==============================] - 16s 673ms/step - loss: 1.7527e-04 - val_loss: 3.0979e-04\nEpoch 97/1000\n23/23 [==============================] - 15s 658ms/step - loss: 1.7063e-04 - val_loss: 4.5756e-04\nEpoch 98/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.6344e-04 - val_loss: 2.7359e-04\nEpoch 99/1000\n23/23 [==============================] - 16s 695ms/step - loss: 1.5203e-04 - val_loss: 4.3915e-04\nEpoch 100/1000\n23/23 [==============================] - 16s 700ms/step - loss: 1.7857e-04 - val_loss: 0.0011\nEpoch 101/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.9629e-04 - val_loss: 3.3144e-04\nEpoch 102/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.6494e-04 - val_loss: 4.2757e-04\nEpoch 103/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.5575e-04 - val_loss: 5.2974e-04\nEpoch 104/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.5404e-04 - val_loss: 2.1810e-04\nEpoch 105/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.3957e-04 - val_loss: 2.0010e-04\nEpoch 106/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.4385e-04 - val_loss: 5.0322e-04\nEpoch 107/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.3835e-04 - val_loss: 0.0012\nEpoch 108/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.5717e-04 - val_loss: 8.0037e-04\nEpoch 109/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.4003e-04 - val_loss: 3.7162e-04\nEpoch 110/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.5414e-04 - val_loss: 4.7373e-04\nEpoch 111/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.3622e-04 - val_loss: 3.5076e-04\nEpoch 112/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.5844e-04 - val_loss: 5.5163e-04\nEpoch 113/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.6136e-04 - val_loss: 2.5431e-04\nEpoch 114/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.5537e-04 - val_loss: 1.7010e-04\nEpoch 115/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.6491e-04 - val_loss: 5.0787e-04\nEpoch 116/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.7724e-04 - val_loss: 0.0017\nEpoch 117/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.6633e-04 - val_loss: 7.8944e-04\nEpoch 118/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.7287e-04 - val_loss: 2.8613e-04\nEpoch 119/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.8492e-04 - val_loss: 4.1230e-04\nEpoch 120/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.4525e-04 - val_loss: 0.0010\nEpoch 121/1000\n23/23 [==============================] - 15s 648ms/step - loss: 1.3240e-04 - val_loss: 2.0035e-04\nEpoch 122/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.3879e-04 - val_loss: 2.8640e-04\nEpoch 123/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.3572e-04 - val_loss: 0.0013\nEpoch 124/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.4374e-04 - val_loss: 4.2176e-04\nEpoch 125/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.4544e-04 - val_loss: 8.4127e-04\nEpoch 126/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.3499e-04 - val_loss: 2.4049e-04\nEpoch 127/1000\n23/23 [==============================] - 15s 652ms/step - loss: 1.6036e-04 - val_loss: 0.0039\nEpoch 128/1000\n23/23 [==============================] - 16s 695ms/step - loss: 2.0673e-04 - val_loss: 0.0018\nEpoch 129/1000\n23/23 [==============================] - 15s 654ms/step - loss: 2.1472e-04 - val_loss: 6.8521e-04\nEpoch 130/1000\n23/23 [==============================] - 15s 671ms/step - loss: 1.6757e-04 - val_loss: 0.0011\nEpoch 131/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.3960e-04 - val_loss: 1.2232e-04\nEpoch 132/1000\n23/23 [==============================] - 15s 664ms/step - loss: 1.5514e-04 - val_loss: 4.6518e-04\nEpoch 133/1000\n23/23 [==============================] - 15s 651ms/step - loss: 1.4478e-04 - val_loss: 0.0019\nEpoch 134/1000\n23/23 [==============================] - 15s 672ms/step - loss: 1.3420e-04 - val_loss: 0.0020\nEpoch 135/1000\n23/23 [==============================] - 16s 714ms/step - loss: 1.9585e-04 - val_loss: 5.7078e-04\nEpoch 136/1000\n23/23 [==============================] - 16s 681ms/step - loss: 1.5260e-04 - val_loss: 6.1735e-04\nEpoch 137/1000\n23/23 [==============================] - 15s 673ms/step - loss: 1.4818e-04 - val_loss: 0.0015\nEpoch 138/1000\n23/23 [==============================] - 17s 722ms/step - loss: 1.4660e-04 - val_loss: 0.0028\nEpoch 139/1000\n23/23 [==============================] - 16s 696ms/step - loss: 1.5105e-04 - val_loss: 2.7117e-04\nEpoch 140/1000\n23/23 [==============================] - 16s 712ms/step - loss: 1.3566e-04 - val_loss: 5.0722e-04\nEpoch 141/1000\n23/23 [==============================] - 16s 693ms/step - loss: 1.7393e-04 - val_loss: 0.0011\nEpoch 142/1000\n23/23 [==============================] - 17s 741ms/step - loss: 2.6077e-04 - val_loss: 0.0153\nEpoch 143/1000\n23/23 [==============================] - 16s 702ms/step - loss: 0.0010 - val_loss: 0.0019\nEpoch 144/1000\n23/23 [==============================] - 17s 724ms/step - loss: 5.3874e-04 - val_loss: 0.0253\nEpoch 145/1000\n23/23 [==============================] - 17s 738ms/step - loss: 2.4405e-04 - val_loss: 0.0023\nEpoch 146/1000\n23/23 [==============================] - 16s 675ms/step - loss: 1.6825e-04 - val_loss: 9.8633e-04\nEpoch 147/1000\n23/23 [==============================] - 16s 682ms/step - loss: 1.4791e-04 - val_loss: 7.7319e-04\nEpoch 148/1000\n23/23 [==============================] - 16s 702ms/step - loss: 1.3564e-04 - val_loss: 0.0049\nEpoch 149/1000\n23/23 [==============================] - 16s 682ms/step - loss: 1.8267e-04 - val_loss: 0.0041\nEpoch 150/1000\n23/23 [==============================] - 15s 668ms/step - loss: 2.0590e-04 - val_loss: 0.0102\nEpoch 151/1000\n23/23 [==============================] - 15s 668ms/step - loss: 1.9887e-04 - val_loss: 0.0017\nEpoch 152/1000\n23/23 [==============================] - 15s 675ms/step - loss: 1.5852e-04 - val_loss: 0.0012\nEpoch 153/1000\n23/23 [==============================] - 17s 719ms/step - loss: 1.2788e-04 - val_loss: 3.1935e-04\nEpoch 154/1000\n23/23 [==============================] - 16s 700ms/step - loss: 1.3903e-04 - val_loss: 2.0976e-04\nEpoch 155/1000\n23/23 [==============================] - 16s 715ms/step - loss: 1.1767e-04 - val_loss: 2.9402e-04\nEpoch 156/1000\n23/23 [==============================] - 17s 739ms/step - loss: 1.1437e-04 - val_loss: 5.4074e-04\nEpoch 157/1000\n23/23 [==============================] - 15s 646ms/step - loss: 1.1515e-04 - val_loss: 0.0018\nEpoch 158/1000\n23/23 [==============================] - 15s 633ms/step - loss: 1.5289e-04 - val_loss: 3.3638e-04\nEpoch 159/1000\n23/23 [==============================] - 15s 660ms/step - loss: 1.1444e-04 - val_loss: 0.0010\nEpoch 160/1000\n23/23 [==============================] - 15s 653ms/step - loss: 1.1932e-04 - val_loss: 2.1282e-04\nEpoch 161/1000\n23/23 [==============================] - 14s 622ms/step - loss: 1.1191e-04 - val_loss: 0.0011\nEpoch 162/1000\n23/23 [==============================] - 15s 660ms/step - loss: 1.1420e-04 - val_loss: 0.0026\nEpoch 163/1000\n23/23 [==============================] - 16s 675ms/step - loss: 1.4274e-04 - val_loss: 2.2346e-04\nEpoch 164/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.2231e-04 - val_loss: 5.4641e-04\nEpoch 165/1000\n23/23 [==============================] - 16s 683ms/step - loss: 1.6918e-04 - val_loss: 0.0025\nEpoch 166/1000\n23/23 [==============================] - 16s 674ms/step - loss: 1.5079e-04 - val_loss: 0.0017\nEpoch 167/1000\n23/23 [==============================] - 15s 634ms/step - loss: 1.9319e-04 - val_loss: 0.0022\nEpoch 168/1000\n23/23 [==============================] - 16s 684ms/step - loss: 1.7569e-04 - val_loss: 4.2596e-04\nEpoch 169/1000\n23/23 [==============================] - 14s 629ms/step - loss: 1.1923e-04 - val_loss: 0.0011\nEpoch 170/1000\n23/23 [==============================] - 14s 628ms/step - loss: 1.1500e-04 - val_loss: 2.8580e-04\nEpoch 171/1000\n23/23 [==============================] - 15s 646ms/step - loss: 1.0920e-04 - val_loss: 9.6705e-04\nEpoch 172/1000\n23/23 [==============================] - 15s 636ms/step - loss: 1.2356e-04 - val_loss: 0.0022\nEpoch 173/1000\n23/23 [==============================] - 15s 649ms/step - loss: 1.4291e-04 - val_loss: 5.1553e-04\nEpoch 174/1000\n23/23 [==============================] - 15s 649ms/step - loss: 1.1708e-04 - val_loss: 0.0013\nEpoch 175/1000\n23/23 [==============================] - 15s 638ms/step - loss: 1.0709e-04 - val_loss: 3.4964e-04\nEpoch 176/1000\n23/23 [==============================] - 15s 646ms/step - loss: 1.1189e-04 - val_loss: 5.0408e-04\nEpoch 177/1000\n23/23 [==============================] - 18s 775ms/step - loss: 1.1457e-04 - val_loss: 3.4192e-04\nEpoch 178/1000\n23/23 [==============================] - 17s 717ms/step - loss: 1.4419e-04 - val_loss: 0.0011\nEpoch 179/1000\n23/23 [==============================] - 18s 787ms/step - loss: 1.6207e-04 - val_loss: 0.0017\nEpoch 180/1000\n23/23 [==============================] - 17s 750ms/step - loss: 1.1805e-04 - val_loss: 2.0848e-04\nEpoch 181/1000\n23/23 [==============================] - 17s 723ms/step - loss: 1.1239e-04 - val_loss: 6.9807e-04\nEpoch 182/1000\n23/23 [==============================] - 15s 669ms/step - loss: 1.0697e-04 - val_loss: 1.1478e-04\nEpoch 183/1000\n23/23 [==============================] - 15s 665ms/step - loss: 1.1756e-04 - val_loss: 7.4999e-04\nEpoch 184/1000\n23/23 [==============================] - 15s 663ms/step - loss: 1.1200e-04 - val_loss: 2.3420e-04\nEpoch 185/1000\n23/23 [==============================] - 15s 664ms/step - loss: 1.1297e-04 - val_loss: 1.5326e-04\nEpoch 186/1000\n23/23 [==============================] - 15s 663ms/step - loss: 1.1030e-04 - val_loss: 3.4488e-04\nEpoch 187/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.0501e-04 - val_loss: 1.1828e-04\nEpoch 188/1000\n23/23 [==============================] - 15s 661ms/step - loss: 1.2165e-04 - val_loss: 5.8026e-04\nEpoch 189/1000\n23/23 [==============================] - 15s 665ms/step - loss: 1.5336e-04 - val_loss: 7.8114e-04\nEpoch 190/1000\n23/23 [==============================] - 15s 664ms/step - loss: 1.3314e-04 - val_loss: 4.8967e-04\nEpoch 191/1000\n23/23 [==============================] - 15s 665ms/step - loss: 1.0769e-04 - val_loss: 0.0023\nEpoch 192/1000\n23/23 [==============================] - 15s 670ms/step - loss: 1.0570e-04 - val_loss: 0.0012\nEpoch 193/1000\n23/23 [==============================] - 15s 663ms/step - loss: 1.2058e-04 - val_loss: 2.7041e-04\nEpoch 194/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.0705e-04 - val_loss: 4.7686e-04\nEpoch 195/1000\n23/23 [==============================] - 16s 681ms/step - loss: 8.9597e-05 - val_loss: 2.1531e-04\nEpoch 196/1000\n23/23 [==============================] - 15s 654ms/step - loss: 1.1748e-04 - val_loss: 5.7347e-04\nEpoch 197/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.4451e-04 - val_loss: 0.0037\nEpoch 198/1000\n23/23 [==============================] - 16s 678ms/step - loss: 1.3179e-04 - val_loss: 0.0012\nEpoch 199/1000\n23/23 [==============================] - 16s 671ms/step - loss: 9.8411e-05 - val_loss: 1.2076e-04\nEpoch 200/1000\n23/23 [==============================] - 15s 665ms/step - loss: 8.6410e-05 - val_loss: 4.3190e-04\nEpoch 201/1000\n23/23 [==============================] - 17s 725ms/step - loss: 9.5341e-05 - val_loss: 4.6861e-04\nEpoch 202/1000\n23/23 [==============================] - 16s 706ms/step - loss: 8.3709e-05 - val_loss: 1.9518e-04\nEpoch 203/1000\n23/23 [==============================] - 16s 704ms/step - loss: 8.2903e-05 - val_loss: 1.0753e-04\nEpoch 204/1000\n23/23 [==============================] - 16s 685ms/step - loss: 9.6000e-05 - val_loss: 4.0448e-04\nEpoch 205/1000\n23/23 [==============================] - 16s 688ms/step - loss: 1.1637e-04 - val_loss: 0.0019\nEpoch 206/1000\n23/23 [==============================] - 17s 718ms/step - loss: 1.2999e-04 - val_loss: 5.5372e-04\nEpoch 207/1000\n23/23 [==============================] - 16s 677ms/step - loss: 1.1406e-04 - val_loss: 4.4783e-04\nEpoch 208/1000\n23/23 [==============================] - 15s 667ms/step - loss: 9.2362e-05 - val_loss: 1.0418e-04\nEpoch 209/1000\n23/23 [==============================] - 16s 698ms/step - loss: 8.6476e-05 - val_loss: 7.8167e-05\nEpoch 210/1000\n23/23 [==============================] - 17s 724ms/step - loss: 9.9494e-05 - val_loss: 5.6862e-04\nEpoch 211/1000\n23/23 [==============================] - 17s 713ms/step - loss: 1.2758e-04 - val_loss: 0.0011\nEpoch 212/1000\n23/23 [==============================] - 16s 696ms/step - loss: 1.1053e-04 - val_loss: 4.5280e-04\nEpoch 213/1000\n23/23 [==============================] - 16s 700ms/step - loss: 9.1211e-05 - val_loss: 2.8869e-04\nEpoch 214/1000\n23/23 [==============================] - 16s 677ms/step - loss: 9.4580e-05 - val_loss: 1.1355e-04\nEpoch 215/1000\n23/23 [==============================] - 15s 670ms/step - loss: 9.9763e-05 - val_loss: 6.4341e-04\nEpoch 216/1000\n23/23 [==============================] - 14s 621ms/step - loss: 1.3142e-04 - val_loss: 2.7706e-04\nEpoch 217/1000\n23/23 [==============================] - 14s 620ms/step - loss: 1.2608e-04 - val_loss: 0.0014\nEpoch 218/1000\n23/23 [==============================] - 14s 620ms/step - loss: 1.1253e-04 - val_loss: 2.4089e-04\nEpoch 219/1000\n23/23 [==============================] - 14s 619ms/step - loss: 9.5533e-05 - val_loss: 0.0011\nEpoch 220/1000\n23/23 [==============================] - 14s 618ms/step - loss: 8.5999e-05 - val_loss: 0.0012\nEpoch 221/1000\n23/23 [==============================] - 14s 617ms/step - loss: 9.5943e-05 - val_loss: 0.0018\nEpoch 222/1000\n23/23 [==============================] - 14s 619ms/step - loss: 1.0874e-04 - val_loss: 5.2739e-04\nEpoch 223/1000\n23/23 [==============================] - 14s 617ms/step - loss: 1.3878e-04 - val_loss: 7.3780e-04\nEpoch 224/1000\n23/23 [==============================] - 14s 618ms/step - loss: 1.0670e-04 - val_loss: 3.8383e-04\nEpoch 225/1000\n23/23 [==============================] - 14s 621ms/step - loss: 8.3555e-05 - val_loss: 2.0055e-04\nEpoch 226/1000\n23/23 [==============================] - 15s 636ms/step - loss: 8.0928e-05 - val_loss: 4.5755e-04\nEpoch 227/1000\n23/23 [==============================] - 16s 699ms/step - loss: 7.6199e-05 - val_loss: 8.3726e-04\nEpoch 228/1000\n23/23 [==============================] - 15s 672ms/step - loss: 8.4891e-05 - val_loss: 0.0040\nEpoch 229/1000\n23/23 [==============================] - 15s 672ms/step - loss: 1.1997e-04 - val_loss: 0.0040\nEpoch 230/1000\n23/23 [==============================] - 16s 690ms/step - loss: 1.8949e-04 - val_loss: 0.0016\nEpoch 231/1000\n23/23 [==============================] - 16s 689ms/step - loss: 1.0629e-04 - val_loss: 7.4612e-04\nEpoch 232/1000\n23/23 [==============================] - 16s 691ms/step - loss: 8.7829e-05 - val_loss: 4.4970e-04\nEpoch 233/1000\n23/23 [==============================] - 16s 704ms/step - loss: 8.0259e-05 - val_loss: 4.5791e-04\nEpoch 234/1000\n23/23 [==============================] - 17s 749ms/step - loss: 7.0723e-05 - val_loss: 5.3438e-04\nEpoch 235/1000\n23/23 [==============================] - 16s 707ms/step - loss: 6.9378e-05 - val_loss: 9.9936e-05\nEpoch 236/1000\n23/23 [==============================] - 15s 670ms/step - loss: 8.0675e-05 - val_loss: 5.5927e-04\nEpoch 237/1000\n23/23 [==============================] - 15s 665ms/step - loss: 9.1138e-05 - val_loss: 1.1798e-04\nEpoch 238/1000\n23/23 [==============================] - 15s 665ms/step - loss: 9.9707e-05 - val_loss: 0.0014\nEpoch 239/1000\n23/23 [==============================] - 15s 632ms/step - loss: 7.8218e-05 - val_loss: 4.0964e-04\nEpoch 240/1000\n23/23 [==============================] - 14s 628ms/step - loss: 8.3750e-05 - val_loss: 2.1858e-04\nEpoch 241/1000\n23/23 [==============================] - 15s 636ms/step - loss: 7.9370e-05 - val_loss: 3.6395e-04\nEpoch 242/1000\n23/23 [==============================] - 14s 627ms/step - loss: 7.1904e-05 - val_loss: 2.6569e-04\nEpoch 243/1000\n23/23 [==============================] - 15s 642ms/step - loss: 9.6953e-05 - val_loss: 0.0019\nEpoch 244/1000\n23/23 [==============================] - 15s 636ms/step - loss: 1.7347e-04 - val_loss: 5.8287e-04\nEpoch 245/1000\n23/23 [==============================] - 15s 650ms/step - loss: 1.1883e-04 - val_loss: 9.0168e-04\nEpoch 246/1000\n23/23 [==============================] - 15s 633ms/step - loss: 8.6529e-05 - val_loss: 0.0011\nEpoch 247/1000\n23/23 [==============================] - 15s 640ms/step - loss: 7.9669e-05 - val_loss: 5.3988e-04\nEpoch 248/1000\n23/23 [==============================] - 15s 641ms/step - loss: 1.0798e-04 - val_loss: 7.6920e-04\nEpoch 249/1000\n23/23 [==============================] - 15s 643ms/step - loss: 1.3301e-04 - val_loss: 9.5231e-04\nEpoch 250/1000\n23/23 [==============================] - 15s 648ms/step - loss: 1.0443e-04 - val_loss: 5.6924e-04\nEpoch 251/1000\n23/23 [==============================] - 15s 638ms/step - loss: 1.0534e-04 - val_loss: 1.1943e-04\nEpoch 252/1000\n23/23 [==============================] - 15s 636ms/step - loss: 1.0536e-04 - val_loss: 4.3356e-04\nEpoch 253/1000\n23/23 [==============================] - 15s 653ms/step - loss: 1.0634e-04 - val_loss: 1.5282e-04\nEpoch 254/1000\n23/23 [==============================] - 15s 646ms/step - loss: 9.4955e-05 - val_loss: 6.5689e-04\nEpoch 255/1000\n23/23 [==============================] - 14s 627ms/step - loss: 9.6670e-05 - val_loss: 6.7971e-04\nEpoch 256/1000\n23/23 [==============================] - 14s 622ms/step - loss: 9.0910e-05 - val_loss: 0.0014\nEpoch 257/1000\n23/23 [==============================] - 15s 642ms/step - loss: 8.5290e-05 - val_loss: 4.6642e-04\nEpoch 258/1000\n23/23 [==============================] - 15s 651ms/step - loss: 8.4130e-05 - val_loss: 3.9960e-04\nEpoch 259/1000\n23/23 [==============================] - 15s 643ms/step - loss: 7.7888e-05 - val_loss: 2.0773e-04\nEpoch 260/1000\n23/23 [==============================] - 15s 637ms/step - loss: 7.7414e-05 - val_loss: 9.3789e-05\nEpoch 261/1000\n23/23 [==============================] - 17s 759ms/step - loss: 9.2085e-05 - val_loss: 0.0015\nEpoch 262/1000\n23/23 [==============================] - 18s 760ms/step - loss: 9.8998e-05 - val_loss: 4.4217e-04\nEpoch 263/1000\n23/23 [==============================] - 18s 782ms/step - loss: 9.7327e-05 - val_loss: 7.5551e-05\nEpoch 264/1000\n23/23 [==============================] - 16s 683ms/step - loss: 9.8785e-05 - val_loss: 3.3084e-04\nEpoch 265/1000\n23/23 [==============================] - 15s 655ms/step - loss: 7.4745e-05 - val_loss: 8.2197e-04\nEpoch 266/1000\n23/23 [==============================] - 16s 685ms/step - loss: 7.5642e-05 - val_loss: 0.0011\nEpoch 267/1000\n23/23 [==============================] - 17s 718ms/step - loss: 8.0876e-05 - val_loss: 2.2032e-04\nEpoch 268/1000\n23/23 [==============================] - 16s 708ms/step - loss: 8.3589e-05 - val_loss: 3.7776e-04\nEpoch 269/1000\n23/23 [==============================] - 18s 769ms/step - loss: 8.6947e-05 - val_loss: 3.6664e-04\nEpoch 270/1000\n23/23 [==============================] - 16s 714ms/step - loss: 1.2916e-04 - val_loss: 3.0799e-04\nEpoch 271/1000\n23/23 [==============================] - 17s 717ms/step - loss: 1.0953e-04 - val_loss: 2.2332e-04\nEpoch 272/1000\n23/23 [==============================] - 17s 718ms/step - loss: 8.5116e-05 - val_loss: 2.6896e-04\nEpoch 273/1000\n23/23 [==============================] - 17s 736ms/step - loss: 8.0718e-05 - val_loss: 3.2201e-04\nEpoch 274/1000\n23/23 [==============================] - 17s 734ms/step - loss: 7.9204e-05 - val_loss: 0.0013\nEpoch 275/1000\n23/23 [==============================] - 15s 672ms/step - loss: 7.8251e-05 - val_loss: 4.8717e-04\nEpoch 276/1000\n23/23 [==============================] - 16s 702ms/step - loss: 9.9208e-05 - val_loss: 1.5839e-04\nEpoch 277/1000\n23/23 [==============================] - 15s 659ms/step - loss: 9.5094e-05 - val_loss: 0.0039\nEpoch 278/1000\n23/23 [==============================] - 16s 709ms/step - loss: 2.0854e-04 - val_loss: 6.8690e-04\nEpoch 279/1000\n23/23 [==============================] - 17s 742ms/step - loss: 3.6528e-04 - val_loss: 0.0039\nEpoch 280/1000\n23/23 [==============================] - 19s 827ms/step - loss: 2.5086e-04 - val_loss: 2.2197e-04\nEpoch 281/1000\n23/23 [==============================] - 18s 765ms/step - loss: 1.4447e-04 - val_loss: 2.9405e-04\nEpoch 282/1000\n23/23 [==============================] - 16s 707ms/step - loss: 1.1184e-04 - val_loss: 5.3401e-04\nEpoch 283/1000\n23/23 [==============================] - 19s 836ms/step - loss: 1.0177e-04 - val_loss: 0.0016\nEpoch 284/1000\n23/23 [==============================] - 19s 834ms/step - loss: 9.5719e-05 - val_loss: 4.8914e-04\nEpoch 285/1000\n23/23 [==============================] - 19s 835ms/step - loss: 8.7329e-05 - val_loss: 2.3498e-04\nEpoch 286/1000\n23/23 [==============================] - 19s 836ms/step - loss: 8.2389e-05 - val_loss: 0.0013\nEpoch 287/1000\n23/23 [==============================] - 18s 789ms/step - loss: 1.0385e-04 - val_loss: 8.8304e-04\nEpoch 288/1000\n23/23 [==============================] - 17s 721ms/step - loss: 9.3454e-05 - val_loss: 1.7767e-04\nEpoch 289/1000\n23/23 [==============================] - 15s 643ms/step - loss: 7.8535e-05 - val_loss: 2.9859e-04\nEpoch 290/1000\n23/23 [==============================] - 15s 648ms/step - loss: 7.5864e-05 - val_loss: 0.0017\nEpoch 291/1000\n23/23 [==============================] - 16s 679ms/step - loss: 1.3143e-04 - val_loss: 0.0011\nEpoch 292/1000\n23/23 [==============================] - 16s 689ms/step - loss: 1.2135e-04 - val_loss: 0.0052\nEpoch 293/1000\n23/23 [==============================] - 16s 687ms/step - loss: 9.3240e-05 - val_loss: 0.0030\nEpoch 294/1000\n23/23 [==============================] - 16s 685ms/step - loss: 8.6622e-05 - val_loss: 2.7965e-04\nEpoch 295/1000\n23/23 [==============================] - 16s 679ms/step - loss: 8.4352e-05 - val_loss: 2.7716e-04\nEpoch 296/1000\n23/23 [==============================] - 17s 752ms/step - loss: 9.0380e-05 - val_loss: 2.6130e-04\nEpoch 297/1000\n23/23 [==============================] - 16s 705ms/step - loss: 7.9760e-05 - val_loss: 2.3163e-04\nEpoch 298/1000\n23/23 [==============================] - 17s 717ms/step - loss: 7.1155e-05 - val_loss: 1.6107e-04\nEpoch 299/1000\n23/23 [==============================] - 16s 688ms/step - loss: 7.7386e-05 - val_loss: 0.0013\nEpoch 300/1000\n23/23 [==============================] - 16s 682ms/step - loss: 1.1138e-04 - val_loss: 0.0016\nEpoch 301/1000\n23/23 [==============================] - 16s 683ms/step - loss: 1.0013e-04 - val_loss: 1.9795e-04\nEpoch 302/1000\n23/23 [==============================] - 16s 686ms/step - loss: 8.2814e-05 - val_loss: 8.6194e-05\nEpoch 303/1000\n23/23 [==============================] - 16s 683ms/step - loss: 8.1023e-05 - val_loss: 6.3182e-04\nEpoch 304/1000\n23/23 [==============================] - 16s 683ms/step - loss: 8.0645e-05 - val_loss: 7.9532e-05\nEpoch 305/1000\n23/23 [==============================] - 16s 685ms/step - loss: 6.8268e-05 - val_loss: 5.1277e-04\nEpoch 306/1000\n23/23 [==============================] - 16s 685ms/step - loss: 6.2709e-05 - val_loss: 9.0953e-05\nEpoch 307/1000\n23/23 [==============================] - 16s 688ms/step - loss: 6.3203e-05 - val_loss: 8.4901e-05\nEpoch 308/1000\n23/23 [==============================] - 17s 725ms/step - loss: 6.1703e-05 - val_loss: 6.8715e-05\nEpoch 309/1000\n23/23 [==============================] - 17s 732ms/step - loss: 6.1924e-05 - val_loss: 6.4956e-04\nEpoch 310/1000\n23/23 [==============================] - 16s 674ms/step - loss: 5.7828e-05 - val_loss: 4.2400e-04\nEpoch 311/1000\n23/23 [==============================] - 16s 684ms/step - loss: 6.2367e-05 - val_loss: 2.9409e-04\nEpoch 312/1000\n23/23 [==============================] - 16s 696ms/step - loss: 6.1845e-05 - val_loss: 7.3305e-05\nEpoch 313/1000\n23/23 [==============================] - 16s 682ms/step - loss: 6.0853e-05 - val_loss: 3.2798e-04\nEpoch 314/1000\n23/23 [==============================] - 16s 687ms/step - loss: 6.9445e-05 - val_loss: 8.9930e-05\nEpoch 315/1000\n23/23 [==============================] - 17s 719ms/step - loss: 6.3028e-05 - val_loss: 1.0633e-04\nEpoch 316/1000\n23/23 [==============================] - 16s 675ms/step - loss: 6.8123e-05 - val_loss: 2.0997e-04\nEpoch 317/1000\n23/23 [==============================] - 17s 725ms/step - loss: 6.9151e-05 - val_loss: 1.3818e-04\nEpoch 318/1000\n23/23 [==============================] - 16s 687ms/step - loss: 8.4388e-05 - val_loss: 6.3117e-05\nEpoch 319/1000\n23/23 [==============================] - 16s 685ms/step - loss: 6.2061e-05 - val_loss: 1.5672e-04\nEpoch 320/1000\n23/23 [==============================] - 16s 681ms/step - loss: 6.3250e-05 - val_loss: 1.3906e-04\nEpoch 321/1000\n23/23 [==============================] - 16s 683ms/step - loss: 7.2621e-05 - val_loss: 2.3827e-04\nEpoch 322/1000\n23/23 [==============================] - 16s 681ms/step - loss: 6.6934e-05 - val_loss: 5.9031e-04\nEpoch 323/1000\n23/23 [==============================] - 18s 765ms/step - loss: 6.0328e-05 - val_loss: 1.2673e-04\nEpoch 324/1000\n23/23 [==============================] - 16s 684ms/step - loss: 6.4040e-05 - val_loss: 1.7989e-04\nEpoch 325/1000\n23/23 [==============================] - 15s 657ms/step - loss: 5.9834e-05 - val_loss: 1.7417e-04\nEpoch 326/1000\n23/23 [==============================] - 15s 630ms/step - loss: 5.8511e-05 - val_loss: 6.8909e-05\nEpoch 327/1000\n23/23 [==============================] - 14s 627ms/step - loss: 6.6812e-05 - val_loss: 8.9699e-05\nEpoch 328/1000\n23/23 [==============================] - 14s 625ms/step - loss: 7.0100e-05 - val_loss: 9.5119e-05\nEpoch 329/1000\n23/23 [==============================] - 15s 632ms/step - loss: 5.8636e-05 - val_loss: 3.3346e-04\nEpoch 330/1000\n23/23 [==============================] - 15s 634ms/step - loss: 5.4574e-05 - val_loss: 4.0824e-04\nEpoch 331/1000\n23/23 [==============================] - 14s 625ms/step - loss: 6.1734e-05 - val_loss: 1.1207e-04\nEpoch 332/1000\n23/23 [==============================] - 15s 630ms/step - loss: 5.9587e-05 - val_loss: 6.9079e-05\nEpoch 333/1000\n23/23 [==============================] - 14s 629ms/step - loss: 6.7056e-05 - val_loss: 1.6734e-04\nEpoch 334/1000\n23/23 [==============================] - 14s 626ms/step - loss: 8.4613e-05 - val_loss: 6.9166e-05\nEpoch 335/1000\n23/23 [==============================] - 15s 631ms/step - loss: 5.7981e-05 - val_loss: 6.3956e-05\nEpoch 336/1000\n23/23 [==============================] - 14s 628ms/step - loss: 5.9933e-05 - val_loss: 1.4144e-04\nEpoch 337/1000\n23/23 [==============================] - 14s 629ms/step - loss: 6.0938e-05 - val_loss: 2.5485e-04\nEpoch 338/1000\n23/23 [==============================] - 15s 633ms/step - loss: 6.1318e-05 - val_loss: 3.1939e-04\nEpoch 339/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.6089e-05 - val_loss: 1.2090e-04\nEpoch 340/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.2934e-05 - val_loss: 1.9651e-04\nEpoch 341/1000\n23/23 [==============================] - 15s 631ms/step - loss: 6.8435e-05 - val_loss: 7.6524e-05\nEpoch 342/1000\n23/23 [==============================] - 14s 628ms/step - loss: 8.1911e-05 - val_loss: 0.0013\nEpoch 343/1000\n23/23 [==============================] - 14s 626ms/step - loss: 7.1757e-05 - val_loss: 5.9992e-04\nEpoch 344/1000\n23/23 [==============================] - 15s 648ms/step - loss: 7.2663e-05 - val_loss: 9.1942e-05\nEpoch 345/1000\n23/23 [==============================] - 15s 664ms/step - loss: 6.0782e-05 - val_loss: 5.9945e-04\nEpoch 346/1000\n23/23 [==============================] - 15s 641ms/step - loss: 6.7595e-05 - val_loss: 3.7760e-04\nEpoch 347/1000\n23/23 [==============================] - 15s 647ms/step - loss: 6.2292e-05 - val_loss: 1.0344e-04\nEpoch 348/1000\n23/23 [==============================] - 15s 645ms/step - loss: 7.9188e-05 - val_loss: 0.0012\nEpoch 349/1000\n23/23 [==============================] - 15s 644ms/step - loss: 1.2151e-04 - val_loss: 1.3991e-04\nEpoch 350/1000\n23/23 [==============================] - 15s 641ms/step - loss: 8.3167e-05 - val_loss: 1.2822e-04\nEpoch 351/1000\n23/23 [==============================] - 15s 646ms/step - loss: 6.0219e-05 - val_loss: 6.7602e-04\nEpoch 352/1000\n23/23 [==============================] - 15s 637ms/step - loss: 6.9976e-05 - val_loss: 2.8744e-04\nEpoch 353/1000\n23/23 [==============================] - 15s 641ms/step - loss: 6.1714e-05 - val_loss: 1.1887e-04\nEpoch 354/1000\n23/23 [==============================] - 15s 647ms/step - loss: 6.9404e-05 - val_loss: 4.4153e-04\nEpoch 355/1000\n23/23 [==============================] - 15s 644ms/step - loss: 9.4744e-05 - val_loss: 7.7449e-04\nEpoch 356/1000\n23/23 [==============================] - 15s 648ms/step - loss: 7.5659e-05 - val_loss: 1.0052e-04\nEpoch 357/1000\n23/23 [==============================] - 15s 635ms/step - loss: 6.9455e-05 - val_loss: 1.4792e-04\nEpoch 358/1000\n23/23 [==============================] - 14s 625ms/step - loss: 6.5517e-05 - val_loss: 1.5945e-04\nEpoch 359/1000\n23/23 [==============================] - 15s 631ms/step - loss: 7.3663e-05 - val_loss: 9.1352e-05\nEpoch 360/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.5747e-05 - val_loss: 1.7546e-04\nEpoch 361/1000\n23/23 [==============================] - 14s 628ms/step - loss: 5.7269e-05 - val_loss: 1.6948e-04\nEpoch 362/1000\n23/23 [==============================] - 15s 631ms/step - loss: 5.5551e-05 - val_loss: 3.5239e-04\nEpoch 363/1000\n23/23 [==============================] - 15s 647ms/step - loss: 6.0143e-05 - val_loss: 8.2046e-05\nEpoch 364/1000\n23/23 [==============================] - 14s 627ms/step - loss: 5.7678e-05 - val_loss: 5.5298e-04\nEpoch 365/1000\n23/23 [==============================] - 15s 631ms/step - loss: 6.4172e-05 - val_loss: 2.5454e-04\nEpoch 366/1000\n23/23 [==============================] - 14s 627ms/step - loss: 5.5999e-05 - val_loss: 0.0010\nEpoch 367/1000\n23/23 [==============================] - 14s 625ms/step - loss: 6.5324e-05 - val_loss: 3.9830e-04\nEpoch 368/1000\n23/23 [==============================] - 14s 630ms/step - loss: 6.6774e-05 - val_loss: 2.1039e-04\nEpoch 369/1000\n23/23 [==============================] - 15s 630ms/step - loss: 6.1156e-05 - val_loss: 4.4659e-04\nEpoch 370/1000\n23/23 [==============================] - 14s 626ms/step - loss: 7.1143e-05 - val_loss: 1.5294e-04\nEpoch 371/1000\n23/23 [==============================] - 15s 630ms/step - loss: 5.9414e-05 - val_loss: 3.5998e-04\nEpoch 372/1000\n23/23 [==============================] - 14s 629ms/step - loss: 6.0674e-05 - val_loss: 2.1903e-04\nEpoch 373/1000\n23/23 [==============================] - 14s 625ms/step - loss: 6.2067e-05 - val_loss: 3.5203e-04\nEpoch 374/1000\n23/23 [==============================] - 15s 632ms/step - loss: 6.9223e-05 - val_loss: 0.0017\nEpoch 375/1000\n23/23 [==============================] - 14s 628ms/step - loss: 6.1498e-05 - val_loss: 3.0350e-04\nEpoch 376/1000\n23/23 [==============================] - 14s 627ms/step - loss: 7.0638e-05 - val_loss: 1.1943e-04\nEpoch 377/1000\n23/23 [==============================] - 15s 634ms/step - loss: 7.1672e-05 - val_loss: 2.3100e-04\nEpoch 378/1000\n23/23 [==============================] - 14s 629ms/step - loss: 6.2816e-05 - val_loss: 4.4763e-04\nEpoch 379/1000\n23/23 [==============================] - 14s 627ms/step - loss: 5.8992e-05 - val_loss: 5.1623e-04\nEpoch 380/1000\n23/23 [==============================] - 15s 633ms/step - loss: 6.4751e-05 - val_loss: 2.5885e-04\nEpoch 381/1000\n23/23 [==============================] - 14s 629ms/step - loss: 5.6421e-05 - val_loss: 0.0027\nEpoch 382/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.4082e-05 - val_loss: 3.7146e-04\nEpoch 383/1000\n23/23 [==============================] - 15s 632ms/step - loss: 6.7319e-05 - val_loss: 5.8043e-04\nEpoch 384/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.3111e-05 - val_loss: 4.8248e-04\nEpoch 385/1000\n23/23 [==============================] - 14s 628ms/step - loss: 7.6156e-05 - val_loss: 0.0030\nEpoch 386/1000\n23/23 [==============================] - 15s 630ms/step - loss: 1.8910e-04 - val_loss: 0.0011\nEpoch 387/1000\n23/23 [==============================] - 14s 627ms/step - loss: 1.1372e-04 - val_loss: 3.3992e-04\nEpoch 388/1000\n23/23 [==============================] - 14s 626ms/step - loss: 9.1117e-05 - val_loss: 0.0038\nEpoch 389/1000\n23/23 [==============================] - 15s 630ms/step - loss: 1.4107e-04 - val_loss: 0.0170\nEpoch 390/1000\n23/23 [==============================] - 15s 630ms/step - loss: 1.9821e-04 - val_loss: 0.0317\nEpoch 391/1000\n23/23 [==============================] - 14s 626ms/step - loss: 2.7783e-04 - val_loss: 9.9357e-04\nEpoch 392/1000\n23/23 [==============================] - 14s 630ms/step - loss: 1.2182e-04 - val_loss: 4.0435e-04\nEpoch 393/1000\n23/23 [==============================] - 14s 627ms/step - loss: 8.3279e-05 - val_loss: 2.8044e-04\nEpoch 394/1000\n23/23 [==============================] - 14s 627ms/step - loss: 6.6977e-05 - val_loss: 9.8559e-05\nEpoch 395/1000\n23/23 [==============================] - 15s 630ms/step - loss: 6.6767e-05 - val_loss: 1.7850e-04\nEpoch 396/1000\n23/23 [==============================] - 14s 629ms/step - loss: 6.7449e-05 - val_loss: 7.6396e-05\nEpoch 397/1000\n23/23 [==============================] - 15s 644ms/step - loss: 7.0251e-05 - val_loss: 2.4133e-04\nEpoch 398/1000\n23/23 [==============================] - 16s 670ms/step - loss: 7.4870e-05 - val_loss: 3.6817e-04\nEpoch 399/1000\n23/23 [==============================] - 16s 677ms/step - loss: 6.5644e-05 - val_loss: 2.2102e-04\nEpoch 400/1000\n23/23 [==============================] - 16s 693ms/step - loss: 8.5757e-05 - val_loss: 0.0028\nEpoch 401/1000\n23/23 [==============================] - 15s 662ms/step - loss: 1.2854e-04 - val_loss: 4.1062e-04\nEpoch 402/1000\n23/23 [==============================] - 16s 683ms/step - loss: 7.9855e-05 - val_loss: 1.3397e-04\nEpoch 403/1000\n23/23 [==============================] - 15s 654ms/step - loss: 7.4873e-05 - val_loss: 2.0793e-04\nEpoch 404/1000\n23/23 [==============================] - 15s 645ms/step - loss: 8.8317e-05 - val_loss: 6.5751e-04\nEpoch 405/1000\n23/23 [==============================] - 15s 645ms/step - loss: 8.7776e-05 - val_loss: 3.7997e-04\nEpoch 406/1000\n23/23 [==============================] - 14s 627ms/step - loss: 6.9350e-05 - val_loss: 0.0019\nEpoch 407/1000\n23/23 [==============================] - 14s 617ms/step - loss: 6.1656e-05 - val_loss: 2.3150e-04\nEpoch 408/1000\n23/23 [==============================] - 14s 616ms/step - loss: 7.3993e-05 - val_loss: 6.9324e-05\nEpoch 409/1000\n23/23 [==============================] - 14s 606ms/step - loss: 9.1926e-05 - val_loss: 0.0012\nEpoch 410/1000\n23/23 [==============================] - 14s 609ms/step - loss: 7.5132e-05 - val_loss: 0.0026\nEpoch 411/1000\n23/23 [==============================] - 14s 616ms/step - loss: 7.3166e-05 - val_loss: 3.0433e-04\nEpoch 412/1000\n23/23 [==============================] - 14s 615ms/step - loss: 6.9194e-05 - val_loss: 3.9418e-04\nEpoch 413/1000\n23/23 [==============================] - 14s 616ms/step - loss: 7.2402e-05 - val_loss: 1.0840e-04\nEpoch 414/1000\n23/23 [==============================] - 14s 622ms/step - loss: 6.7491e-05 - val_loss: 0.0015\nEpoch 415/1000\n23/23 [==============================] - 14s 609ms/step - loss: 6.9331e-05 - val_loss: 2.6549e-04\nEpoch 416/1000\n23/23 [==============================] - 14s 622ms/step - loss: 5.4063e-05 - val_loss: 0.0012\nEpoch 417/1000\n23/23 [==============================] - 14s 611ms/step - loss: 5.0254e-05 - val_loss: 1.3736e-04\nEpoch 418/1000\n23/23 [==============================] - 14s 613ms/step - loss: 5.5713e-05 - val_loss: 2.4701e-04\n"
]
],
[
[
"## セグメンテーションモデル",
"_____no_output_____"
]
],
[
[
"#callback = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=30)\n\n#segment_model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-4), loss=\"binary_crossentropy\",metrics=[\"binary_crossentropy\"])\nsegment_model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3), loss=\"mae\",metrics=[\"mae\"])\nsegment_model.fit(train_ds, epochs=1000)",
"Epoch 1/1000\n4/4 [==============================] - 4s 457ms/step - loss: 0.0773 - mae: 0.0773\nEpoch 2/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.1224 - mae: 0.1224\nEpoch 3/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.1064 - mae: 0.1064\nEpoch 4/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0856 - mae: 0.0856\nEpoch 5/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0841 - mae: 0.0841\nEpoch 6/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0761 - mae: 0.0761\nEpoch 7/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0726 - mae: 0.0726\nEpoch 8/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0758 - mae: 0.0758\nEpoch 9/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0683 - mae: 0.0683\nEpoch 10/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0650 - mae: 0.0650\nEpoch 11/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0720 - mae: 0.0720\nEpoch 12/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0615 - mae: 0.0615\nEpoch 13/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0647 - mae: 0.0647\nEpoch 14/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0623 - mae: 0.0623\nEpoch 15/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0617 - mae: 0.0617\nEpoch 16/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0584 - mae: 0.0584\nEpoch 17/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0575 - mae: 0.0575\nEpoch 18/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0595 - mae: 0.0595\nEpoch 19/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0688 - mae: 0.0688\nEpoch 20/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0586 - mae: 0.0586\nEpoch 21/1000\n4/4 [==============================] - 2s 512ms/step - loss: 0.0545 - mae: 0.0545\nEpoch 22/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0554 - mae: 0.0554\nEpoch 23/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0554 - mae: 0.0554\nEpoch 24/1000\n4/4 [==============================] - 2s 432ms/step - loss: 0.0575 - mae: 0.0575\nEpoch 25/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0503 - mae: 0.0503\nEpoch 26/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0548 - mae: 0.0548\nEpoch 27/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0607 - mae: 0.0607\nEpoch 28/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0531 - mae: 0.0531\nEpoch 29/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0543 - mae: 0.0543\nEpoch 30/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0553 - mae: 0.0553\nEpoch 31/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0530 - mae: 0.0530\nEpoch 32/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0489 - mae: 0.0489\nEpoch 33/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0583 - mae: 0.0583\nEpoch 34/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0522 - mae: 0.0522\nEpoch 35/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0514 - mae: 0.0514\nEpoch 36/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0504 - mae: 0.0504\nEpoch 37/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0525 - mae: 0.0525\nEpoch 38/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0464 - mae: 0.0464\nEpoch 39/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0493 - mae: 0.0493\nEpoch 40/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0421 - mae: 0.0421\nEpoch 41/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0449 - mae: 0.0449\nEpoch 42/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0440 - mae: 0.0440\nEpoch 43/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0447 - mae: 0.0447\nEpoch 44/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0419 - mae: 0.0419\nEpoch 45/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0437 - mae: 0.0437\nEpoch 46/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0458 - mae: 0.0458\nEpoch 47/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0465 - mae: 0.0465\nEpoch 48/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0449 - mae: 0.0449\nEpoch 49/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0408 - mae: 0.0408\nEpoch 50/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0454 - mae: 0.0454\nEpoch 51/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0485 - mae: 0.0485\nEpoch 52/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0475 - mae: 0.0475\nEpoch 53/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0470 - mae: 0.0470\nEpoch 54/1000\n4/4 [==============================] - 2s 431ms/step - loss: 0.0475 - mae: 0.0475\nEpoch 55/1000\n4/4 [==============================] - 2s 428ms/step - loss: 0.0429 - mae: 0.0429\nEpoch 56/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0420 - mae: 0.0420\nEpoch 57/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0425 - mae: 0.0425\nEpoch 58/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0448 - mae: 0.0448\nEpoch 59/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0423 - mae: 0.0423\nEpoch 60/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0419 - mae: 0.0419\nEpoch 61/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0431 - mae: 0.0431\nEpoch 62/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0414 - mae: 0.0414\nEpoch 63/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0457 - mae: 0.0457\nEpoch 64/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0538 - mae: 0.0538\nEpoch 65/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0461 - mae: 0.0461\nEpoch 66/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0463 - mae: 0.0463\nEpoch 67/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0418 - mae: 0.0418\nEpoch 68/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0455 - mae: 0.0455\nEpoch 69/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0448 - mae: 0.0448\nEpoch 70/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0430 - mae: 0.0430\nEpoch 71/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0394 - mae: 0.0394\nEpoch 72/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0373 - mae: 0.0373\nEpoch 73/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0466 - mae: 0.0466\nEpoch 74/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0429 - mae: 0.0429\nEpoch 75/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0436 - mae: 0.0436\nEpoch 76/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0384 - mae: 0.0384\nEpoch 77/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0385 - mae: 0.0385\nEpoch 78/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0378 - mae: 0.0378\nEpoch 79/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0423 - mae: 0.0423\nEpoch 80/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0352 - mae: 0.0352\nEpoch 81/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0344 - mae: 0.0344\nEpoch 82/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0394 - mae: 0.0394\nEpoch 83/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0379 - mae: 0.0379\nEpoch 84/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0356 - mae: 0.0356\nEpoch 85/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0339 - mae: 0.0339\nEpoch 86/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0385 - mae: 0.0385\nEpoch 87/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0351 - mae: 0.0351\nEpoch 88/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0306 - mae: 0.0306\nEpoch 89/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0386 - mae: 0.0386\nEpoch 90/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0438 - mae: 0.0438\nEpoch 91/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0478 - mae: 0.0478\nEpoch 92/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0403 - mae: 0.0403\nEpoch 93/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0396 - mae: 0.0396\nEpoch 94/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0402 - mae: 0.0402\nEpoch 95/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0366 - mae: 0.0366\nEpoch 96/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0410 - mae: 0.0410\nEpoch 97/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0364 - mae: 0.0364\nEpoch 98/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0375 - mae: 0.0375\nEpoch 99/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0342 - mae: 0.0342\nEpoch 100/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0395 - mae: 0.0395\nEpoch 101/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0374 - mae: 0.0374\nEpoch 102/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0327 - mae: 0.0327\nEpoch 103/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0363 - mae: 0.0363\nEpoch 104/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0368 - mae: 0.0368\nEpoch 105/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0333 - mae: 0.0333\nEpoch 106/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0332 - mae: 0.0332\nEpoch 107/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0338 - mae: 0.0338\nEpoch 108/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0293 - mae: 0.0293\nEpoch 109/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0391 - mae: 0.0391\nEpoch 110/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0421 - mae: 0.0421\nEpoch 111/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0348 - mae: 0.0348\nEpoch 112/1000\n4/4 [==============================] - 2s 432ms/step - loss: 0.0337 - mae: 0.0337\nEpoch 113/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0319 - mae: 0.0319\nEpoch 114/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0339 - mae: 0.0339\nEpoch 115/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0349 - mae: 0.0349\nEpoch 116/1000\n4/4 [==============================] - 2s 431ms/step - loss: 0.0337 - mae: 0.0337\nEpoch 117/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0325 - mae: 0.0325\nEpoch 118/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0332 - mae: 0.0332\nEpoch 119/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0318 - mae: 0.0318\nEpoch 120/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0309 - mae: 0.0309\nEpoch 121/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0300 - mae: 0.0300\nEpoch 122/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0304 - mae: 0.0304\nEpoch 123/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0311 - mae: 0.0311\nEpoch 124/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0324 - mae: 0.0324\nEpoch 125/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0309 - mae: 0.0309\nEpoch 126/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0314 - mae: 0.0314\nEpoch 127/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0366 - mae: 0.0366\nEpoch 128/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0300 - mae: 0.0300\nEpoch 129/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0357 - mae: 0.0357\nEpoch 130/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0300 - mae: 0.0300\nEpoch 131/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0308 - mae: 0.0308\nEpoch 132/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0331 - mae: 0.0331\nEpoch 133/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0330 - mae: 0.0330\nEpoch 134/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0296 - mae: 0.0296\nEpoch 135/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0312 - mae: 0.0312\nEpoch 136/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0332 - mae: 0.0332\nEpoch 137/1000\n4/4 [==============================] - 2s 434ms/step - loss: 0.0312 - mae: 0.0312\nEpoch 138/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0277 - mae: 0.0277\nEpoch 139/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0288 - mae: 0.0288\nEpoch 140/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0286 - mae: 0.0286\nEpoch 141/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0281 - mae: 0.0281\nEpoch 142/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0310 - mae: 0.0310\nEpoch 143/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0341 - mae: 0.0341\nEpoch 144/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0304 - mae: 0.0304\nEpoch 145/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0267 - mae: 0.0267\nEpoch 146/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0294 - mae: 0.0294\nEpoch 147/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0289 - mae: 0.0289\nEpoch 148/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0281 - mae: 0.0281\nEpoch 149/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0328 - mae: 0.0328\nEpoch 150/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0305 - mae: 0.0305\nEpoch 151/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0323 - mae: 0.0323\nEpoch 152/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0316 - mae: 0.0316\nEpoch 153/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0287 - mae: 0.0287\nEpoch 154/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0315 - mae: 0.0315\nEpoch 155/1000\n4/4 [==============================] - 2s 431ms/step - loss: 0.0283 - mae: 0.0283\nEpoch 156/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0272 - mae: 0.0272\nEpoch 157/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0313 - mae: 0.0313\nEpoch 158/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0278 - mae: 0.0278\nEpoch 159/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0285 - mae: 0.0285\nEpoch 160/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0281 - mae: 0.0281\nEpoch 161/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0288 - mae: 0.0288\nEpoch 162/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0274 - mae: 0.0274\nEpoch 163/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0275 - mae: 0.0275\nEpoch 164/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0302 - mae: 0.0302\nEpoch 165/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0298 - mae: 0.0298\nEpoch 166/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0307 - mae: 0.0307\nEpoch 167/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0329 - mae: 0.0329\nEpoch 168/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0290 - mae: 0.0290\nEpoch 169/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0290 - mae: 0.0290\nEpoch 170/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0289 - mae: 0.0289\nEpoch 171/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0289 - mae: 0.0289\nEpoch 172/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0285 - mae: 0.0285\nEpoch 173/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0268 - mae: 0.0268\nEpoch 174/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0283 - mae: 0.0283\nEpoch 175/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0265 - mae: 0.0265\nEpoch 176/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0302 - mae: 0.0302\nEpoch 177/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0301 - mae: 0.0301\nEpoch 178/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0375 - mae: 0.0375\nEpoch 179/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0370 - mae: 0.0370\nEpoch 180/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0344 - mae: 0.0344\nEpoch 181/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0329 - mae: 0.0329\nEpoch 182/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0417 - mae: 0.0417\nEpoch 183/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0458 - mae: 0.0458\nEpoch 184/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0390 - mae: 0.0390\nEpoch 185/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0322 - mae: 0.0322\nEpoch 186/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0334 - mae: 0.0334\nEpoch 187/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0294 - mae: 0.0294\nEpoch 188/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0305 - mae: 0.0305\nEpoch 189/1000\n4/4 [==============================] - 2s 434ms/step - loss: 0.0292 - mae: 0.0292\nEpoch 190/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0313 - mae: 0.0313\nEpoch 191/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0275 - mae: 0.0275\nEpoch 192/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0278 - mae: 0.0278\nEpoch 193/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0274 - mae: 0.0274\nEpoch 194/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0240 - mae: 0.0240\nEpoch 195/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0239 - mae: 0.0239\nEpoch 196/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0286 - mae: 0.0286\nEpoch 197/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0276 - mae: 0.0276\nEpoch 198/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0272 - mae: 0.0272\nEpoch 199/1000\n4/4 [==============================] - 2s 432ms/step - loss: 0.0216 - mae: 0.0216\nEpoch 200/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0231 - mae: 0.0231\nEpoch 201/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0219 - mae: 0.0219\nEpoch 202/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0244 - mae: 0.0244\nEpoch 203/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0263 - mae: 0.0263\nEpoch 204/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0220 - mae: 0.0220\nEpoch 205/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0263 - mae: 0.0263\nEpoch 206/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0234 - mae: 0.0234\nEpoch 207/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0231 - mae: 0.0231\nEpoch 208/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0246 - mae: 0.0246\nEpoch 209/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0227 - mae: 0.0227\nEpoch 210/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0258 - mae: 0.0258\nEpoch 211/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0256 - mae: 0.0256\nEpoch 212/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0258 - mae: 0.0258\nEpoch 213/1000\n4/4 [==============================] - 2s 433ms/step - loss: 0.0233 - mae: 0.0233\nEpoch 214/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0234 - mae: 0.0234\nEpoch 215/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0233 - mae: 0.0233\nEpoch 216/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0247 - mae: 0.0247\nEpoch 217/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0226 - mae: 0.0226\nEpoch 218/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0224 - mae: 0.0224\nEpoch 219/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0208 - mae: 0.0208\nEpoch 220/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0210 - mae: 0.0210\nEpoch 221/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0237 - mae: 0.0237\nEpoch 222/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 223/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0243 - mae: 0.0243\nEpoch 224/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0244 - mae: 0.0244\nEpoch 225/1000\n4/4 [==============================] - 2s 435ms/step - loss: 0.0248 - mae: 0.0248\nEpoch 226/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0266 - mae: 0.0266\nEpoch 227/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0275 - mae: 0.0275\nEpoch 228/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0239 - mae: 0.0239\nEpoch 229/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0272 - mae: 0.0272\nEpoch 230/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0248 - mae: 0.0248\nEpoch 231/1000\n4/4 [==============================] - 2s 438ms/step - loss: 0.0267 - mae: 0.0267\nEpoch 232/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0289 - mae: 0.0289\nEpoch 233/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0331 - mae: 0.0331\nEpoch 234/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0279 - mae: 0.0279\nEpoch 235/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0269 - mae: 0.0269\nEpoch 236/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0232 - mae: 0.0232\nEpoch 237/1000\n4/4 [==============================] - 2s 520ms/step - loss: 0.0247 - mae: 0.0247\nEpoch 238/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0261 - mae: 0.0261\nEpoch 239/1000\n4/4 [==============================] - 2s 432ms/step - loss: 0.0219 - mae: 0.0219\nEpoch 240/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0242 - mae: 0.0242\nEpoch 241/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0286 - mae: 0.0286\nEpoch 242/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0326 - mae: 0.0326\nEpoch 243/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0256 - mae: 0.0256\nEpoch 244/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0266 - mae: 0.0266\nEpoch 245/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0226 - mae: 0.0226\nEpoch 246/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 247/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0257 - mae: 0.0257\nEpoch 248/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0244 - mae: 0.0244\nEpoch 249/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0252 - mae: 0.0252\nEpoch 250/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0252 - mae: 0.0252\nEpoch 251/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0307 - mae: 0.0307\nEpoch 252/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0246 - mae: 0.0246\nEpoch 253/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0242 - mae: 0.0242\nEpoch 254/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0231 - mae: 0.0231\nEpoch 255/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 256/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0230 - mae: 0.0230\nEpoch 257/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0220 - mae: 0.0220\nEpoch 258/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0227 - mae: 0.0227\nEpoch 259/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0215 - mae: 0.0215\nEpoch 260/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0220 - mae: 0.0220\nEpoch 261/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0218 - mae: 0.0218\nEpoch 262/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0229 - mae: 0.0229\nEpoch 263/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0268 - mae: 0.0268\nEpoch 264/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0216 - mae: 0.0216\nEpoch 265/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0217 - mae: 0.0217\nEpoch 266/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0229 - mae: 0.0229\nEpoch 267/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0209 - mae: 0.0209\nEpoch 268/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0223 - mae: 0.0223\nEpoch 269/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0229 - mae: 0.0229\nEpoch 270/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0219 - mae: 0.0219\nEpoch 271/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0231 - mae: 0.0231\nEpoch 272/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0253 - mae: 0.0253\nEpoch 273/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 274/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0221 - mae: 0.0221\nEpoch 275/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0188 - mae: 0.0188\nEpoch 276/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0237 - mae: 0.0237\nEpoch 277/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0198 - mae: 0.0198\nEpoch 278/1000\n4/4 [==============================] - 2s 437ms/step - loss: 0.0239 - mae: 0.0239\nEpoch 279/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0208 - mae: 0.0208\nEpoch 280/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 281/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0246 - mae: 0.0246\nEpoch 282/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0252 - mae: 0.0252\nEpoch 283/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0307 - mae: 0.0307\nEpoch 284/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 285/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0248 - mae: 0.0248\nEpoch 286/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0244 - mae: 0.0244\nEpoch 287/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0239 - mae: 0.0239\nEpoch 288/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0257 - mae: 0.0257\nEpoch 289/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0268 - mae: 0.0268\nEpoch 290/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0269 - mae: 0.0269\nEpoch 291/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0219 - mae: 0.0219\nEpoch 292/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0226 - mae: 0.0226\nEpoch 293/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0252 - mae: 0.0252\nEpoch 294/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0239 - mae: 0.0239\nEpoch 295/1000\n4/4 [==============================] - 2s 439ms/step - loss: 0.0232 - mae: 0.0232\nEpoch 296/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0231 - mae: 0.0231\nEpoch 297/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0277 - mae: 0.0277\nEpoch 298/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0191 - mae: 0.0191\nEpoch 299/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0326 - mae: 0.0326\nEpoch 300/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0242 - mae: 0.0242\nEpoch 301/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0249 - mae: 0.0249\nEpoch 302/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0251 - mae: 0.0251\nEpoch 303/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0214 - mae: 0.0214\nEpoch 304/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0226 - mae: 0.0226\nEpoch 305/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0200 - mae: 0.0200\nEpoch 306/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0240 - mae: 0.0240\nEpoch 307/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0220 - mae: 0.0220\nEpoch 308/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 309/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0221 - mae: 0.0221\nEpoch 310/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0243 - mae: 0.0243\nEpoch 311/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0202 - mae: 0.0202\nEpoch 312/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0221 - mae: 0.0221\nEpoch 313/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0190 - mae: 0.0190\nEpoch 314/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0179 - mae: 0.0179\nEpoch 315/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0187 - mae: 0.0187\nEpoch 316/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 317/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0221 - mae: 0.0221\nEpoch 318/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0197 - mae: 0.0197\nEpoch 319/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0171 - mae: 0.0171\nEpoch 320/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0218 - mae: 0.0218\nEpoch 321/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0234 - mae: 0.0234\nEpoch 322/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0187 - mae: 0.0187\nEpoch 323/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0205 - mae: 0.0205\nEpoch 324/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0185 - mae: 0.0185\nEpoch 325/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0195 - mae: 0.0195\nEpoch 326/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0192 - mae: 0.0192\nEpoch 327/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0182 - mae: 0.0182\nEpoch 328/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0194 - mae: 0.0194\nEpoch 329/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0202 - mae: 0.0202\nEpoch 330/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0228 - mae: 0.0228\nEpoch 331/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0230 - mae: 0.0230\nEpoch 332/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0245 - mae: 0.0245\nEpoch 333/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0197 - mae: 0.0197\nEpoch 334/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0249 - mae: 0.0249\nEpoch 335/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0254 - mae: 0.0254\nEpoch 336/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0241 - mae: 0.0241\nEpoch 337/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0211 - mae: 0.0211\nEpoch 338/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0256 - mae: 0.0256\nEpoch 339/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0232 - mae: 0.0232\nEpoch 340/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0213 - mae: 0.0213\nEpoch 341/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0233 - mae: 0.0233\nEpoch 342/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0187 - mae: 0.0187\nEpoch 343/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0235 - mae: 0.0235\nEpoch 344/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 345/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0156 - mae: 0.0156\nEpoch 346/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0205 - mae: 0.0205\nEpoch 347/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0177 - mae: 0.0177\nEpoch 348/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0198 - mae: 0.0198\nEpoch 349/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0197 - mae: 0.0197\nEpoch 350/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0181 - mae: 0.0181\nEpoch 351/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0208 - mae: 0.0208\nEpoch 352/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0185 - mae: 0.0185\nEpoch 353/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0170 - mae: 0.0170\nEpoch 354/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0222 - mae: 0.0222\nEpoch 355/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0226 - mae: 0.0226\nEpoch 356/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0181 - mae: 0.0181\nEpoch 357/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0191 - mae: 0.0191\nEpoch 358/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0215 - mae: 0.0215\nEpoch 359/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0178 - mae: 0.0178\nEpoch 360/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0202 - mae: 0.0202\nEpoch 361/1000\n4/4 [==============================] - 2s 440ms/step - loss: 0.0157 - mae: 0.0157\nEpoch 362/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0214 - mae: 0.0214\nEpoch 363/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0198 - mae: 0.0198\nEpoch 364/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0153 - mae: 0.0153\nEpoch 365/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0188 - mae: 0.0188\nEpoch 366/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 367/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0183 - mae: 0.0183\nEpoch 368/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0205 - mae: 0.0205\nEpoch 369/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0255 - mae: 0.0255\nEpoch 370/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0275 - mae: 0.0275\nEpoch 371/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0264 - mae: 0.0264\nEpoch 372/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0241 - mae: 0.0241\nEpoch 373/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0216 - mae: 0.0216\nEpoch 374/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0214 - mae: 0.0214\nEpoch 375/1000\n4/4 [==============================] - 2s 436ms/step - loss: 0.0182 - mae: 0.0182\nEpoch 376/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0198 - mae: 0.0198\nEpoch 377/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0188 - mae: 0.0188\nEpoch 378/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0227 - mae: 0.0227\nEpoch 379/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0191 - mae: 0.0191\nEpoch 380/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0174 - mae: 0.0174\nEpoch 381/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0188 - mae: 0.0188\nEpoch 382/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0207 - mae: 0.0207\nEpoch 383/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0171 - mae: 0.0171\nEpoch 384/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0177 - mae: 0.0177\nEpoch 385/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 386/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0179 - mae: 0.0179\nEpoch 387/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0174 - mae: 0.0174\nEpoch 388/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0143 - mae: 0.0143\nEpoch 389/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0166 - mae: 0.0166\nEpoch 390/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0173 - mae: 0.0173\nEpoch 391/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0146 - mae: 0.0146\nEpoch 392/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0154 - mae: 0.0154\nEpoch 393/1000\n4/4 [==============================] - 2s 441ms/step - loss: 0.0179 - mae: 0.0179\nEpoch 394/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0194 - mae: 0.0194\nEpoch 395/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 396/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 397/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0166 - mae: 0.0166\nEpoch 398/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0165 - mae: 0.0165\nEpoch 399/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 400/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0150 - mae: 0.0150\nEpoch 401/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 402/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0165 - mae: 0.0165\nEpoch 403/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0151 - mae: 0.0151\nEpoch 404/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0144 - mae: 0.0144\nEpoch 405/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0173 - mae: 0.0173\nEpoch 406/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 407/1000\n4/4 [==============================] - 2s 443ms/step - loss: 0.0167 - mae: 0.0167\nEpoch 408/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 409/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0153 - mae: 0.0153\nEpoch 410/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 411/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0189 - mae: 0.0189\nEpoch 412/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0225 - mae: 0.0225\nEpoch 413/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0200 - mae: 0.0200\nEpoch 414/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0173 - mae: 0.0173\nEpoch 415/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0180 - mae: 0.0180\nEpoch 416/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0191 - mae: 0.0191\nEpoch 417/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0170 - mae: 0.0170\nEpoch 418/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0187 - mae: 0.0187\nEpoch 419/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 420/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0218 - mae: 0.0218\nEpoch 421/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 422/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0196 - mae: 0.0196\nEpoch 423/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0163 - mae: 0.0163\nEpoch 424/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0169 - mae: 0.0169\nEpoch 425/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0150 - mae: 0.0150\nEpoch 426/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0154 - mae: 0.0154\nEpoch 427/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0168 - mae: 0.0168\nEpoch 428/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0167 - mae: 0.0167\nEpoch 429/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0148 - mae: 0.0148\nEpoch 430/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0164 - mae: 0.0164\nEpoch 431/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0144 - mae: 0.0144\nEpoch 432/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0164 - mae: 0.0164\nEpoch 433/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0138 - mae: 0.0138\nEpoch 434/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 435/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 436/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0157 - mae: 0.0157\nEpoch 437/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 438/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 439/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 440/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 441/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 442/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0163 - mae: 0.0163\nEpoch 443/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 444/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 445/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0146 - mae: 0.0146\nEpoch 446/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 447/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 448/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0151 - mae: 0.0151\nEpoch 449/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0161 - mae: 0.0161\nEpoch 450/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0164 - mae: 0.0164\nEpoch 451/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0160 - mae: 0.0160\nEpoch 452/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 453/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 454/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 455/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0171 - mae: 0.0171\nEpoch 456/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0182 - mae: 0.0182\nEpoch 457/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 458/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 459/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 460/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 461/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 462/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0150 - mae: 0.0150\nEpoch 463/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 464/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 465/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 466/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 467/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 468/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 469/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 470/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 471/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 472/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 473/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 474/1000\n4/4 [==============================] - 2s 444ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 475/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0128 - mae: 0.0128\nEpoch 476/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 477/1000\n4/4 [==============================] - 2s 442ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 478/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 479/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 480/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0163 - mae: 0.0163\nEpoch 481/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 482/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 483/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 484/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0154 - mae: 0.0154\nEpoch 485/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 486/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 487/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 488/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 489/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 490/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 491/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 492/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 493/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0151 - mae: 0.0151\nEpoch 494/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 495/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 496/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 497/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 498/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 499/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 500/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 501/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 502/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 503/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 504/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 505/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 506/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 507/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 508/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0128 - mae: 0.0128\nEpoch 509/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 510/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 511/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 512/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 513/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 514/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 515/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 516/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 517/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 518/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0153 - mae: 0.0153\nEpoch 519/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0146 - mae: 0.0146\nEpoch 520/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0161 - mae: 0.0161\nEpoch 521/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 522/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 523/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0158 - mae: 0.0158\nEpoch 524/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 525/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 526/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 527/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 528/1000\n4/4 [==============================] - 2s 446ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 529/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0168 - mae: 0.0168\nEpoch 530/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 531/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0146 - mae: 0.0146\nEpoch 532/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 533/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 534/1000\n4/4 [==============================] - 2s 448ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 535/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 536/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 537/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0128 - mae: 0.0128\nEpoch 538/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0143 - mae: 0.0143\nEpoch 539/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 540/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 541/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 542/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 543/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0173 - mae: 0.0173\nEpoch 544/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 545/1000\n4/4 [==============================] - 2s 450ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 546/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 547/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0138 - mae: 0.0138\nEpoch 548/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 549/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0174 - mae: 0.0174\nEpoch 550/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 551/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0156 - mae: 0.0156\nEpoch 552/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 553/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 554/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 555/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 556/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 557/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 558/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 559/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 560/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 561/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 562/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 563/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 564/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 565/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 566/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 567/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 568/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 569/1000\n4/4 [==============================] - 2s 445ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 570/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0098 - mae: 0.0098\nEpoch 571/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 572/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 573/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 574/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 575/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 576/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 577/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 578/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 579/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 580/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 581/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 582/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0134 - mae: 0.0134\nEpoch 583/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 584/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 585/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 586/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 587/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0156 - mae: 0.0156\nEpoch 588/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0156 - mae: 0.0156\nEpoch 589/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0168 - mae: 0.0168\nEpoch 590/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0158 - mae: 0.0158\nEpoch 591/1000\n4/4 [==============================] - 2s 447ms/step - loss: 0.0132 - mae: 0.0132\nEpoch 592/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0179 - mae: 0.0179\nEpoch 593/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0172 - mae: 0.0172\nEpoch 594/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0201 - mae: 0.0201\nEpoch 595/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 596/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0151 - mae: 0.0151\nEpoch 597/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 598/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 599/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 600/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 601/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0139 - mae: 0.0139\nEpoch 602/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0150 - mae: 0.0150\nEpoch 603/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0148 - mae: 0.0148\nEpoch 604/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 605/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0151 - mae: 0.0151\nEpoch 606/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0147 - mae: 0.0147\nEpoch 607/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 608/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 609/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0148 - mae: 0.0148\nEpoch 610/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 611/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 612/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 613/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 614/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 615/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 616/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 617/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 618/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 619/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 620/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 621/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 622/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 623/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 624/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 625/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0098 - mae: 0.0098\nEpoch 626/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 627/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 628/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 629/1000\n4/4 [==============================] - 2s 451ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 630/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 631/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0132 - mae: 0.0132\nEpoch 632/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 633/1000\n4/4 [==============================] - 2s 452ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 634/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 635/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 636/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 637/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0161 - mae: 0.0161\nEpoch 638/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0210 - mae: 0.0210\nEpoch 639/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0168 - mae: 0.0168\nEpoch 640/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0318 - mae: 0.0318\nEpoch 641/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0260 - mae: 0.0260\nEpoch 642/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0274 - mae: 0.0274\nEpoch 643/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0638 - mae: 0.0638\nEpoch 644/1000\n4/4 [==============================] - 2s 487ms/step - loss: 0.0758 - mae: 0.0758\nEpoch 645/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0661 - mae: 0.0661\nEpoch 646/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0593 - mae: 0.0593\nEpoch 647/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0640 - mae: 0.0640\nEpoch 648/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0577 - mae: 0.0577\nEpoch 649/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0461 - mae: 0.0461\nEpoch 650/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0426 - mae: 0.0426\nEpoch 651/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0375 - mae: 0.0375\nEpoch 652/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0418 - mae: 0.0418\nEpoch 653/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0404 - mae: 0.0404\nEpoch 654/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0456 - mae: 0.0456\nEpoch 655/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0461 - mae: 0.0461\nEpoch 656/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0352 - mae: 0.0352\nEpoch 657/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0336 - mae: 0.0336\nEpoch 658/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0290 - mae: 0.0290\nEpoch 659/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0338 - mae: 0.0338\nEpoch 660/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0308 - mae: 0.0308\nEpoch 661/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0269 - mae: 0.0269\nEpoch 662/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0300 - mae: 0.0300\nEpoch 663/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0312 - mae: 0.0312\nEpoch 664/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0325 - mae: 0.0325\nEpoch 665/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0298 - mae: 0.0298\nEpoch 666/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0289 - mae: 0.0289\nEpoch 667/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0253 - mae: 0.0253\nEpoch 668/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0237 - mae: 0.0237\nEpoch 669/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0211 - mae: 0.0211\nEpoch 670/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0230 - mae: 0.0230\nEpoch 671/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0196 - mae: 0.0196\nEpoch 672/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0198 - mae: 0.0198\nEpoch 673/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0184 - mae: 0.0184\nEpoch 674/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0212 - mae: 0.0212\nEpoch 675/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0163 - mae: 0.0163\nEpoch 676/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0169 - mae: 0.0169\nEpoch 677/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 678/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0192 - mae: 0.0192\nEpoch 679/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0165 - mae: 0.0165\nEpoch 680/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0155 - mae: 0.0155\nEpoch 681/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0211 - mae: 0.0211\nEpoch 682/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0180 - mae: 0.0180\nEpoch 683/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0180 - mae: 0.0180\nEpoch 684/1000\n4/4 [==============================] - 2s 454ms/step - loss: 0.0205 - mae: 0.0205\nEpoch 685/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0217 - mae: 0.0217\nEpoch 686/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0158 - mae: 0.0158\nEpoch 687/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0160 - mae: 0.0160\nEpoch 688/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 689/1000\n4/4 [==============================] - 2s 449ms/step - loss: 0.0146 - mae: 0.0146\nEpoch 690/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0131 - mae: 0.0131\nEpoch 691/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0128 - mae: 0.0128\nEpoch 692/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 693/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 694/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 695/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 696/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 697/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 698/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 699/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 700/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 701/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 702/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 703/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 704/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 705/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0143 - mae: 0.0143\nEpoch 706/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0154 - mae: 0.0154\nEpoch 707/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 708/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 709/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 710/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0149 - mae: 0.0149\nEpoch 711/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 712/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0132 - mae: 0.0132\nEpoch 713/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 714/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 715/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 716/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 717/1000\n4/4 [==============================] - 2s 480ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 718/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 719/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 720/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 721/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 722/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 723/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 724/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0158 - mae: 0.0158\nEpoch 725/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 726/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0132 - mae: 0.0132\nEpoch 727/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 728/1000\n4/4 [==============================] - 2s 482ms/step - loss: 0.0148 - mae: 0.0148\nEpoch 729/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 730/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 731/1000\n4/4 [==============================] - 2s 453ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 732/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 733/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 734/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 735/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 736/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 737/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 738/1000\n4/4 [==============================] - 2s 456ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 739/1000\n4/4 [==============================] - 2s 487ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 740/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 741/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 742/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 743/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 744/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0134 - mae: 0.0134\nEpoch 745/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 746/1000\n4/4 [==============================] - 2s 488ms/step - loss: 0.0132 - mae: 0.0132\nEpoch 747/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 748/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 749/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 750/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 751/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 752/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 753/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 754/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 755/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 756/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 757/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 758/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 759/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 760/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 761/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 762/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 763/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 764/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 765/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 766/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 767/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 768/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 769/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 770/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 771/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 772/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 773/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 774/1000\n4/4 [==============================] - 2s 484ms/step - loss: 0.0090 - mae: 0.0090\nEpoch 775/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0096 - mae: 0.0096\nEpoch 776/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 777/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 778/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 779/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 780/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 781/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 782/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 783/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 784/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 785/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 786/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 787/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 788/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 789/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 790/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 791/1000\n4/4 [==============================] - 2s 493ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 792/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 793/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0138 - mae: 0.0138\nEpoch 794/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 795/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 796/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 797/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0138 - mae: 0.0138\nEpoch 798/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 799/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 800/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 801/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 802/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 803/1000\n4/4 [==============================] - 2s 508ms/step - loss: 0.0143 - mae: 0.0143\nEpoch 804/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 805/1000\n4/4 [==============================] - 2s 458ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 806/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 807/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 808/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 809/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 810/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 811/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 812/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 813/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 814/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0103 - mae: 0.0103\nEpoch 815/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 816/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 817/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 818/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 819/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 820/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 821/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 822/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 823/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 824/1000\n4/4 [==============================] - 2s 480ms/step - loss: 0.0087 - mae: 0.0087\nEpoch 825/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0090 - mae: 0.0090\nEpoch 826/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 827/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 828/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 829/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 830/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 831/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 832/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 833/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 834/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 835/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 836/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 837/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 838/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 839/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 840/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 841/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 842/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 843/1000\n4/4 [==============================] - 2s 457ms/step - loss: 0.0103 - mae: 0.0103\nEpoch 844/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 845/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 846/1000\n4/4 [==============================] - 2s 491ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 847/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 848/1000\n4/4 [==============================] - 2s 455ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 849/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 850/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 851/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0108 - mae: 0.0108\nEpoch 852/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 853/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 854/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 855/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0092 - mae: 0.0092\nEpoch 856/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 857/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 858/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 859/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0092 - mae: 0.0092\nEpoch 860/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 861/1000\n4/4 [==============================] - 2s 482ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 862/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 863/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0123 - mae: 0.0123\nEpoch 864/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 865/1000\n4/4 [==============================] - 2s 551ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 866/1000\n4/4 [==============================] - 2s 518ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 867/1000\n4/4 [==============================] - 2s 549ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 868/1000\n4/4 [==============================] - 2s 568ms/step - loss: 0.0145 - mae: 0.0145\nEpoch 869/1000\n4/4 [==============================] - 2s 626ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 870/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0129 - mae: 0.0129\nEpoch 871/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 872/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 873/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0140 - mae: 0.0140\nEpoch 874/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 875/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 876/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 877/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 878/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0103 - mae: 0.0103\nEpoch 879/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0103 - mae: 0.0103\nEpoch 880/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0091 - mae: 0.0091\nEpoch 881/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 882/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 883/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 884/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0130 - mae: 0.0130\nEpoch 885/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0200 - mae: 0.0200\nEpoch 886/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0176 - mae: 0.0176\nEpoch 887/1000\n4/4 [==============================] - 2s 502ms/step - loss: 0.0173 - mae: 0.0173\nEpoch 888/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0163 - mae: 0.0163\nEpoch 889/1000\n4/4 [==============================] - 2s 466ms/step - loss: 0.0159 - mae: 0.0159\nEpoch 890/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0137 - mae: 0.0137\nEpoch 891/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 892/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0136 - mae: 0.0136\nEpoch 893/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0128 - mae: 0.0128\nEpoch 894/1000\n4/4 [==============================] - 2s 488ms/step - loss: 0.0158 - mae: 0.0158\nEpoch 895/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0347 - mae: 0.0347\nEpoch 896/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0371 - mae: 0.0371\nEpoch 897/1000\n4/4 [==============================] - 2s 459ms/step - loss: 0.0301 - mae: 0.0301\nEpoch 898/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0445 - mae: 0.0445\nEpoch 899/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0378 - mae: 0.0378\nEpoch 900/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0322 - mae: 0.0322\nEpoch 901/1000\n4/4 [==============================] - 2s 489ms/step - loss: 0.0413 - mae: 0.0413\nEpoch 902/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0299 - mae: 0.0299\nEpoch 903/1000\n4/4 [==============================] - 2s 460ms/step - loss: 0.0249 - mae: 0.0249\nEpoch 904/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0248 - mae: 0.0248\nEpoch 905/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0241 - mae: 0.0241\nEpoch 906/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0222 - mae: 0.0222\nEpoch 907/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0197 - mae: 0.0197\nEpoch 908/1000\n4/4 [==============================] - 2s 484ms/step - loss: 0.0161 - mae: 0.0161\nEpoch 909/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0177 - mae: 0.0177\nEpoch 910/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0172 - mae: 0.0172\nEpoch 911/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0169 - mae: 0.0169\nEpoch 912/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0134 - mae: 0.0134\nEpoch 913/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0144 - mae: 0.0144\nEpoch 914/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0152 - mae: 0.0152\nEpoch 915/1000\n4/4 [==============================] - 2s 491ms/step - loss: 0.0142 - mae: 0.0142\nEpoch 916/1000\n4/4 [==============================] - 2s 480ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 917/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 918/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0127 - mae: 0.0127\nEpoch 919/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 920/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0135 - mae: 0.0135\nEpoch 921/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0143 - mae: 0.0143\nEpoch 922/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0141 - mae: 0.0141\nEpoch 923/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 924/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0133 - mae: 0.0133\nEpoch 925/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 926/1000\n4/4 [==============================] - 2s 486ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 927/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 928/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0120 - mae: 0.0120\nEpoch 929/1000\n4/4 [==============================] - 2s 484ms/step - loss: 0.0124 - mae: 0.0124\nEpoch 930/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0091 - mae: 0.0091\nEpoch 931/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0119 - mae: 0.0119\nEpoch 932/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 933/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 934/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0110 - mae: 0.0110\nEpoch 935/1000\n4/4 [==============================] - 2s 465ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 936/1000\n4/4 [==============================] - 2s 486ms/step - loss: 0.0114 - mae: 0.0114\nEpoch 937/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 938/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 939/1000\n4/4 [==============================] - 2s 469ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 940/1000\n4/4 [==============================] - 2s 474ms/step - loss: 0.0096 - mae: 0.0096\nEpoch 941/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 942/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 943/1000\n4/4 [==============================] - 2s 488ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 944/1000\n4/4 [==============================] - 2s 482ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 945/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 946/1000\n4/4 [==============================] - 2s 461ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 947/1000\n4/4 [==============================] - 2s 482ms/step - loss: 0.0087 - mae: 0.0087\nEpoch 948/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 949/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0097 - mae: 0.0097\nEpoch 950/1000\n4/4 [==============================] - 2s 491ms/step - loss: 0.0086 - mae: 0.0086\nEpoch 951/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 952/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0100 - mae: 0.0100\nEpoch 953/1000\n4/4 [==============================] - 2s 467ms/step - loss: 0.0111 - mae: 0.0111\nEpoch 954/1000\n4/4 [==============================] - 2s 478ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 955/1000\n4/4 [==============================] - 2s 471ms/step - loss: 0.0098 - mae: 0.0098\nEpoch 956/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0113 - mae: 0.0113\nEpoch 957/1000\n4/4 [==============================] - 2s 482ms/step - loss: 0.0118 - mae: 0.0118\nEpoch 958/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0101 - mae: 0.0101\nEpoch 959/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0134 - mae: 0.0134\nEpoch 960/1000\n4/4 [==============================] - 2s 487ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 961/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 962/1000\n4/4 [==============================] - 2s 468ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 963/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0125 - mae: 0.0125\nEpoch 964/1000\n4/4 [==============================] - 2s 481ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 965/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0095 - mae: 0.0095\nEpoch 966/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0115 - mae: 0.0115\nEpoch 967/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0103 - mae: 0.0103\nEpoch 968/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0094 - mae: 0.0094\nEpoch 969/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0093 - mae: 0.0093\nEpoch 970/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0126 - mae: 0.0126\nEpoch 971/1000\n4/4 [==============================] - 2s 493ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 972/1000\n4/4 [==============================] - 2s 480ms/step - loss: 0.0091 - mae: 0.0091\nEpoch 973/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0102 - mae: 0.0102\nEpoch 974/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0107 - mae: 0.0107\nEpoch 975/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 976/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0083 - mae: 0.0083\nEpoch 977/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 978/1000\n4/4 [==============================] - 2s 484ms/step - loss: 0.0106 - mae: 0.0106\nEpoch 979/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 980/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0112 - mae: 0.0112\nEpoch 981/1000\n4/4 [==============================] - 2s 464ms/step - loss: 0.0117 - mae: 0.0117\nEpoch 982/1000\n4/4 [==============================] - 2s 477ms/step - loss: 0.0096 - mae: 0.0096\nEpoch 983/1000\n4/4 [==============================] - 2s 462ms/step - loss: 0.0090 - mae: 0.0090\nEpoch 984/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0096 - mae: 0.0096\nEpoch 985/1000\n4/4 [==============================] - 2s 490ms/step - loss: 0.0122 - mae: 0.0122\nEpoch 986/1000\n4/4 [==============================] - 2s 489ms/step - loss: 0.0116 - mae: 0.0116\nEpoch 987/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 988/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 989/1000\n4/4 [==============================] - 2s 487ms/step - loss: 0.0109 - mae: 0.0109\nEpoch 990/1000\n4/4 [==============================] - 2s 475ms/step - loss: 0.0086 - mae: 0.0086\nEpoch 991/1000\n4/4 [==============================] - 2s 463ms/step - loss: 0.0096 - mae: 0.0096\nEpoch 992/1000\n4/4 [==============================] - 2s 485ms/step - loss: 0.0098 - mae: 0.0098\nEpoch 993/1000\n4/4 [==============================] - 2s 476ms/step - loss: 0.0087 - mae: 0.0087\nEpoch 994/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0098 - mae: 0.0098\nEpoch 995/1000\n4/4 [==============================] - 2s 470ms/step - loss: 0.0104 - mae: 0.0104\nEpoch 996/1000\n4/4 [==============================] - 2s 479ms/step - loss: 0.0089 - mae: 0.0089\nEpoch 997/1000\n4/4 [==============================] - 2s 472ms/step - loss: 0.0099 - mae: 0.0099\nEpoch 998/1000\n4/4 [==============================] - 2s 480ms/step - loss: 0.0105 - mae: 0.0105\nEpoch 999/1000\n4/4 [==============================] - 2s 483ms/step - loss: 0.0121 - mae: 0.0121\nEpoch 1000/1000\n4/4 [==============================] - 2s 473ms/step - loss: 0.0101 - mae: 0.0101\n"
],
[
"model.save(\"reconstruct\")\nsegment_model.save(\"segmentation\")",
"INFO:tensorflow:Assets written to: reconstruct\\assets\nINFO:tensorflow:Assets written to: segmentation\\assets\n"
]
],
[
[
"## 可視化",
"_____no_output_____"
]
],
[
[
"fig = plt.figure()\nax1 = fig.add_subplot(1, 2, 1)\nax1.set_title(\"pred\",fontsize=20)\nplt.imshow(model(tf.expand_dims(image, axis=0))[0])\nax2 = fig.add_subplot(1, 2, 2)\nax2.set_title(\"image\",fontsize=20)\nplt.imshow(image)",
"_____no_output_____"
],
[
"fig = plt.figure()\nax1 = fig.add_subplot(1, 2, 1)\nax1.set_title(\"pred\",fontsize=20)\nplt.imshow(segment_model(tf.expand_dims(image, axis=0))[0])\nax2 = fig.add_subplot(1, 2, 2)\nax2.set_title(\"label\",fontsize=20)\nplt.imshow(label)\n\nt=segment_model(tf.expand_dims(image, axis=0))[0].numpy()\n#tf.reduce_sum(tf.where(tf.greater(t,), t, tf.zeros_like(t)))\nlen(np.where(t>0.1)[0])",
"_____no_output_____"
],
[
"nc_image = non_clack_images[180]\n\nfig = plt.figure()\nax1 = fig.add_subplot(1, 2, 1)\nax1.set_title(\"pred\",fontsize=20)\nplt.imshow(nc_image)\nax2 = fig.add_subplot(1, 2, 2)\nax2.set_title(\"label\",fontsize=20)\nplt.imshow((tf.keras.losses.MSE(model(np.expand_dims(nc_image, axis=0)), nc_image).numpy()[0] > 0.01) * 255)",
"_____no_output_____"
],
[
"fig = plt.figure()\nax1 = fig.add_subplot(1, 2, 1)\nax1.set_title(\"pred\",fontsize=20)\nplt.imshow(image)\nax2 = fig.add_subplot(1, 2, 2)\nax2.set_title(\"label\",fontsize=20)\nplt.imshow((tf.keras.losses.MSE(model(np.expand_dims(image, axis=0)), image).numpy()[0] > 0.01) * 255)",
"_____no_output_____"
]
],
[
[
"## 特徴空間分離",
"_____no_output_____"
]
],
[
[
"from sklearn.manifold import TSNE\nfrom sklearn.decomposition import PCA\nimport pandas as pd\n\nfeature_model = model.get_layer(\"sequential_1\").get_layer(\"model_12\")\nfeature_model = tf.keras.Model(inputs=feature_model.input, outputs=feature_model.get_layer(\"block3_pool\").output)\n\n# 特徴量抽出モデル\ntest = train_ds.unbatch()\n#a_x = [tf.keras.losses.MAE(model(np.expand_dims(x, axis=0)), x).numpy().flatten() for x, y in test]\na_x = [feature_model(np.expand_dims(x, axis=0)).numpy().flatten() for x, y in test]\na_y = [0] * len(a_x)\na_c = [\"ひびあり\"] * len(a_x)\n\na_n = []\nfor idx in coco.getImgIds()[:15]:\n anns = coco.loadAnns(coco.getAnnIds(idx))\n a_n.append(coco.loadImgs(idx)[0][\"file_name\"])\n\n\nunb_test = recon_train_ds.unbatch()\n#b_x = [tf.keras.losses.MAE(model(np.expand_dims(x, axis=0)), x).numpy().flatten() for x, y in unb_test]\nb_x = [feature_model(np.expand_dims(x, axis=0)).numpy().flatten() for x, y in unb_test]\nb_y = [1] * len(b_x)\nb_c = [\"ひびなし\"] * len(b_x)\nb_n = [str(p.name) for p in Path(\"./data/images/non_clack\").glob(\"*\")]\n\na_x.extend(b_x)\na_y.extend(b_y)\na_c.extend(b_c)\na_n.extend(b_n[:180])\n\nfeature = TSNE(n_components=2).fit_transform(np.array(a_x))\n\nprint(len(a_x), len(a_c), len(a_n))\n\nplot_df = pd.DataFrame({\"f1\":feature[:, 0],\"f2\":feature[:, 1],\"color\":a_c, \"name\":a_n})\n\n#plt.scatter(feature[:, 0], feature[:, 1], alpha=0.8, color=a_c)\nfig = px.scatter(plot_df, x=\"f1\", y=\"f2\", color=\"color\", hover_name=\"name\" )\nfig.update_layout(width=800, height=600)",
"c:\\Users\\hirose.tomoki\\Desktop\\concrete_annotation\\.venv\\lib\\site-packages\\sklearn\\manifold\\_t_sne.py:780: FutureWarning: The default initialization in TSNE will change from 'random' to 'pca' in 1.2.\n warnings.warn(\nc:\\Users\\hirose.tomoki\\Desktop\\concrete_annotation\\.venv\\lib\\site-packages\\sklearn\\manifold\\_t_sne.py:790: FutureWarning: The default learning rate in TSNE will change from 200.0 to 'auto' in 1.2.\n warnings.warn(\n"
],
[
"#result = tf.concat([tf.concat([model(image) for image, _, in test_ds], axis=0), tf.concat([model(image) for image, _, in recon_test_ds], axis=0)], axis=0)\n#score = [len(np.where(t.numpy()>0.1)[0]) for t in result]\n\n#score\n\ncounts = []\n\nfor image,_ in test_ds:\n result = (tf.keras.losses.MSE(model(np.expand_dims(image, axis=0)), image).numpy()[0] > 0.001) * 255\n counts.append(len(np.where(result==255)[0]))\n\nfor image,_ in recon_test_ds:\n result = (tf.keras.losses.MSE(model(np.expand_dims(image, axis=0)), image).numpy()[0] > 0.001) * 255\n counts.append(len(np.where(result==255)[0]))\n\nlabels = [1]*5\nlabels.extend([0]*20)\n\n\nfrom sklearn.metrics import roc_auc_score, roc_curve\n\nfpr, tpr, th = roc_curve(labels, counts, drop_intermediate=False)\nfpr, tpr, th\n\n",
"_____no_output_____"
],
[
"roc_auc_score(labels, counts)",
"_____no_output_____"
],
[
"plt.plot(fpr, tpr, marker='o')",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
]
] |
d09baa2e2b42b5c392d61f3b17b128f7c65796ba | 16,766 | ipynb | Jupyter Notebook | covtest.ipynb | CosmoLike/LSST_emu | ed4d26b52ed26172eb35b3a030403c123e29eb2c | [
"MIT"
] | null | null | null | covtest.ipynb | CosmoLike/LSST_emu | ed4d26b52ed26172eb35b3a030403c123e29eb2c | [
"MIT"
] | null | null | null | covtest.ipynb | CosmoLike/LSST_emu | ed4d26b52ed26172eb35b3a030403c123e29eb2c | [
"MIT"
] | null | null | null | 45.559783 | 124 | 0.560241 | [
[
[
"import sys\nimport os\nimport math, numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nfrom numpy import linalg as LA\nimport numpy as np\n\ninfile = os.listdir('/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/')\ndata = [x[4:29] for x in infile]\ndata= [i.replace('LSST_Y10','LSST_3x2pt_Y10') for i in data]\noutname= [i.replace('cov_','') for i in infile]\n\n\nfor k in range(0,36):\n print \"------- NEW COV ----------\"\n print \"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/\"+infile[k]\n print \"------- NEW COV ----------\"\n datafile= np.genfromtxt(\"datav/\"+data[k])\n ndata=datafile.shape[0]\n mask = np.zeros(ndata)\n for i in range(0,datafile.shape[0]):\n if (datafile[i,1] >1.0e-15): \n mask[i]=1.0\n\n covfile = np.genfromtxt(\"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/\"+infile[k])\n cov = np.ones((ndata,ndata))\n \n print ndata,int(np.max(covfile[:,0])+1)\n\n for i in range(0,covfile.shape[0]):\n cov[int(covfile[i,0]),int(covfile[i,1])] = covfile[i,8]+covfile[i,9]\n cov[int(covfile[i,1]),int(covfile[i,0])] = covfile[i,8]+covfile[i,9]\n \n if 1. in cov[:, :]:\n print \"Covariance assembly incomplete - covparallel file(s) missing\"\n else:\n print \"Covariance assembly complete - all covparallel files present\"\n \n numpyfile=\"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/npcov/npcov_\"+outname[k]\n np.save(numpyfile, cov)\n loadfile=\"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/npcov/npcov_\"+outname[k]+\".npy\"\n cov2=np.load(loadfile)\n \n print cov.shape, cov2.shape\n\n# cor = np.zeros((ndata,ndata))\n# for i in range(0,ndata):\n# for j in range(0,ndata):\n# if (cov[i,i]*cov[j,j] >0):\n# cor[i,j] = cov[i,j]/math.sqrt(cov[i,i]*cov[j,j])\n\n\n# a = np.sort(LA.eigvals(cor[:,:]))\n \n# print \"min+max eigenvalues full cor:\"\n# print np.min(a), np.max(a)\n# print \"neg eigenvalues full cor:\"\n# for i in range(0,a.shape[0]):\n# if (a[i]< 0.0): print a[i]\n\n# inv = LA.inv(cov[0:ndata,0:ndata])\n# a = np.sort(LA.eigvals(cov[0:ndata,0:ndata]))\n# print \"min+max eigenvalues 3x2 cov:\"\n# print np.min(a), np.max(a)\n# outfile = \"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/inv/inv_\"+outname[k]\n# f = open(outfile, \"w\")\n# for i in range(0,ndata):\n# inv[i,i]=inv[i,i]*mask[i]\n# for j in range(0,ndata):\n# f.write(\"%d %d %e\\n\" %(i,j, inv[i,j]))\n# f.close()\n \n# maskindices=np.where(mask == 0)[0]\n# covnew=np.delete(cov, maskindices, 0)\n# covcut=np.delete(covnew, maskindices, 1)\n# covzero=np.where(covcut == 0)[0]\n\n# plt.figure()\n# plt.imshow(np.log10(np.abs(covcut[:,:])), interpolation=\"nearest\",vmin=-25, vmax=-10)\n# plt.colorbar()\n# savefile=\"/users/timeifler/Dropbox/cosmolike_store/LSST_emu/plots/covcut_\"+outname[k]+\".png\"\n# plt.savefig(savefile, format='png', dpi=2000)",
"------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.135200e+04_ng1.918510e+01_nl3.285780e+01\n------- NEW COV ----------\n1725 1725\nCovariance assembly complete - all covparallel files present\n(1725, 1725) (1725, 1725)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.286780e+04_ng1.330660e+01_nl2.170300e+01\n------- NEW COV ----------\n1680 1680\nCovariance assembly complete - all covparallel files present\n(1680, 1680) (1680, 1680)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area9.747990e+03_ng1.111050e+01_nl1.768990e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.321270e+04_ng2.489560e+01_nl4.414800e+01\n------- NEW COV ----------\n1770 1770\nCovariance assembly complete - all covparallel files present\n(1770, 1770) (1770, 1770)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.405970e+04_ng2.359790e+01_nl4.154870e+01\n------- NEW COV ----------\n1755 1755\nCovariance assembly complete - all covparallel files present\n(1755, 1755) (1755, 1755)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.691070e+04_ng2.200120e+01_nl3.837660e+01\n------- NEW COV ----------\n1740 1740\nCovariance assembly complete - all covparallel files present\n(1740, 1740) (1740, 1740)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.091930e+04_ng2.087710e+01_nl3.616160e+01\n------- NEW COV ----------\n1725 1725\nCovariance assembly complete - all covparallel files present\n(1725, 1725) (1725, 1725)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.512690e+04_ng1.038020e+01_nl1.637770e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.912230e+04_ng1.743810e+01_nl2.948740e+01\n------- NEW COV ----------\n1710 1710\nCovariance assembly complete - all covparallel files present\n(1710, 1710) (1710, 1710)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.038550e+04_ng1.988930e+01_nl3.422830e+01\n------- NEW COV ----------\n1725 1725\nCovariance assembly complete - all covparallel files present\n(1725, 1725) (1725, 1725)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area8.585430e+03_ng2.848750e+01_nl5.143540e+01\n------- NEW COV ----------\n1785 1785\nCovariance assembly complete - all covparallel files present\n(1785, 1785) (1785, 1785)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.253880e+04_ng1.632340e+01_nl2.736000e+01\n------- NEW COV ----------\n1695 1695\nCovariance assembly complete - all covparallel files present\n(1695, 1695) (1695, 1695)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area9.214770e+03_ng2.692600e+01_nl4.825130e+01\n------- NEW COV ----------\n1785 1785\nCovariance assembly complete - all covparallel files present\n(1785, 1785) (1785, 1785)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.768180e+04_ng3.536430e+01_nl6.572260e+01\n------- NEW COV ----------\n1815 1815\nCovariance assembly complete - all covparallel files present\n(1815, 1815) (1815, 1815)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.833130e+04_ng1.574630e+01_nl2.626600e+01\n------- NEW COV ----------\n1695 1695\nCovariance assembly complete - all covparallel files present\n(1695, 1695) (1695, 1695)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.439510e+04_ng1.197870e+01_nl1.926470e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.654070e+04_ng1.074150e+01_nl1.702530e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.963680e+04_ng3.804120e+01_nl7.139000e+01\n------- NEW COV ----------\n1830 1830\nCovariance assembly complete - all covparallel files present\n(1830, 1830) (1830, 1830)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area7.623220e+03_ng1.399100e+01_nl2.297260e+01\n------- NEW COV ----------\n1695 1695\nCovariance assembly complete - all covparallel files present\n(1695, 1695) (1695, 1695)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.978310e+04_ng9.587190e+00_nl1.496670e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.619980e+04_ng1.863040e+01_nl3.178300e+01\n------- NEW COV ----------\n1725 1725\nCovariance assembly complete - all covparallel files present\n(1725, 1725) (1725, 1725)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.199560e+04_ng1.265530e+01_nl2.050290e+01\n------- NEW COV ----------\n1680 1680\nCovariance assembly complete - all covparallel files present\n(1680, 1680) (1680, 1680)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.568450e+04_ng1.234240e+01_nl1.992920e+01\n------- NEW COV ----------\n1665 1665\nCovariance assembly complete - all covparallel files present\n(1665, 1665) (1665, 1665)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area8.133860e+03_ng3.687280e+01_nl6.890940e+01\n------- NEW COV ----------\n1830 1830\nCovariance assembly complete - all covparallel files present\n(1830, 1830) (1830, 1830)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.797620e+04_ng1.424180e+01_nl2.344000e+01\n------- NEW COV ----------\n1695 1695\nCovariance assembly complete - all covparallel files present\n(1695, 1695) (1695, 1695)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.614020e+04_ng2.703690e+01_nl4.847670e+01\n------- NEW COV ----------\n1785 1785\nCovariance assembly complete - all covparallel files present\n(1785, 1785) (1785, 1785)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area9.931470e+03_ng1.706900e+01_nl2.878090e+01\n------- NEW COV ----------\n1710 1710\nCovariance assembly complete - all covparallel files present\n(1710, 1710) (1710, 1710)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area9.533420e+03_ng3.146080e+01_nl5.756200e+01\n------- NEW COV ----------\n1800 1800\nCovariance assembly complete - all covparallel files present\n(1800, 1800) (1800, 1800)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.892010e+04_ng1.517110e+01_nl2.518120e+01\n------- NEW COV ----------\n1695 1695\nCovariance assembly complete - all covparallel files present\n(1695, 1695) (1695, 1695)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.741890e+04_ng9.072180e+00_nl1.405870e+01\n------- NEW COV ----------\n1650 1650\nCovariance assembly complete - all covparallel files present\n(1650, 1650) (1650, 1650)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.111270e+04_ng3.288210e+01_nl6.051840e+01\n------- NEW COV ----------\n1800 1800\nCovariance assembly complete - all covparallel files present\n(1800, 1800) (1800, 1800)\n------- NEW COV ----------\n/users/timeifler/Dropbox/cosmolike_store/LSST_emu/cov/cov_LSST_Y10_area1.526000e+04_ng2.583270e+01_nl4.603640e+01\n------- NEW COV ----------\n"
]
]
] | [
"code"
] | [
[
"code"
]
] |
d09bad29e4bf8b8ef231ffe490f9cee2ba708ffd | 59,949 | ipynb | Jupyter Notebook | algo.ipynb | RenqinSS/Rec | fc1ba995c1a4f1167bc0f34709afcb4a211f1d3d | [
"MIT"
] | null | null | null | algo.ipynb | RenqinSS/Rec | fc1ba995c1a4f1167bc0f34709afcb4a211f1d3d | [
"MIT"
] | null | null | null | algo.ipynb | RenqinSS/Rec | fc1ba995c1a4f1167bc0f34709afcb4a211f1d3d | [
"MIT"
] | null | null | null | 46.798595 | 229 | 0.527231 | [
[
[
"<a href=\"https://colab.research.google.com/github/RenqinSS/Rec/blob/main/algo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
]
],
[
[
"import random\nimport os\nimport numpy as np\nimport torch\n\nSEED = 45\ndef seed_everything(seed):\n random.seed(seed)\n os.environ['PYTHONHASHSEED'] = str(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.backends.cudnn.deterministic = True\nseed_everything(SEED)",
"_____no_output_____"
],
[
"!git clone https://github.com/dpoqb/wechat_big_data_baseline_pytorch.git\n\n!dir\n!mkdir data\n!unzip ./drive/MyDrive/wechat_algo_data1.zip -d ./data\n\n!pip install deepctr_torch\n\nimport torch\nimport os\n\nprint(torch.cuda.is_available())\nfor i in range(torch.cuda.device_count()):\n print(torch.cuda.get_device_name(i))",
"Cloning into 'wechat_big_data_baseline_pytorch'...\nremote: Enumerating objects: 16, done.\u001b[K\nremote: Counting objects: 100% (16/16), done.\u001b[K\nremote: Compressing objects: 100% (16/16), done.\u001b[K\nremote: Total 16 (delta 3), reused 5 (delta 0), pack-reused 0\u001b[K\nUnpacking objects: 100% (16/16), done.\ndrive sample_data wechat_big_data_baseline_pytorch\nArchive: ./drive/MyDrive/wechat_algo_data1.zip\n creating: ./data/wechat_algo_data1/\n inflating: ./data/wechat_algo_data1/test_a.csv \n inflating: ./data/wechat_algo_data1/feed_info.csv \n inflating: ./data/wechat_algo_data1/feed_embeddings.csv \n inflating: ./data/wechat_algo_data1/README.md \n inflating: ./data/wechat_algo_data1/user_action.csv \n inflating: ./data/wechat_algo_data1/submit_demo_初赛a.csv \nCollecting deepctr_torch\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/d2/17/f392dfbaefdd6371335995c4f84cf3b5166cf907fdfa0aa4edc380fdfc5b/deepctr_torch-0.2.7-py3-none-any.whl (70kB)\n\u001b[K |████████████████████████████████| 71kB 7.0MB/s \n\u001b[?25hRequirement already satisfied: tensorflow in /usr/local/lib/python3.7/dist-packages (from deepctr_torch) (2.5.0)\nRequirement already satisfied: torch>=1.1.0 in /usr/local/lib/python3.7/dist-packages (from deepctr_torch) (1.9.0+cu102)\nRequirement already satisfied: sklearn in /usr/local/lib/python3.7/dist-packages (from deepctr_torch) (0.0)\nRequirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from deepctr_torch) (4.41.1)\nRequirement already satisfied: protobuf>=3.9.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (3.12.4)\nRequirement already satisfied: keras-nightly~=2.5.0.dev in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (2.5.0.dev2021032900)\nRequirement already satisfied: absl-py~=0.10 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (0.12.0)\nRequirement already satisfied: astunparse~=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.6.3)\nRequirement already satisfied: grpcio~=1.34.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.34.1)\nRequirement already satisfied: tensorflow-estimator<2.6.0,>=2.5.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (2.5.0)\nRequirement already satisfied: tensorboard~=2.5 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (2.5.0)\nRequirement already satisfied: gast==0.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (0.4.0)\nRequirement already satisfied: wrapt~=1.12.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.12.1)\nRequirement already satisfied: h5py~=3.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (3.1.0)\nRequirement already satisfied: typing-extensions~=3.7.4 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (3.7.4.3)\nRequirement already satisfied: wheel~=0.35 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (0.36.2)\nRequirement already satisfied: termcolor~=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.1.0)\nRequirement already satisfied: numpy~=1.19.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.19.5)\nRequirement already satisfied: flatbuffers~=1.12.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.12)\nRequirement already satisfied: keras-preprocessing~=1.1.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.1.2)\nRequirement already satisfied: google-pasta~=0.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (0.2.0)\nRequirement already satisfied: six~=1.15.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (1.15.0)\nRequirement already satisfied: opt-einsum~=3.3.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->deepctr_torch) (3.3.0)\nRequirement already satisfied: scikit-learn in /usr/local/lib/python3.7/dist-packages (from sklearn->deepctr_torch) (0.22.2.post1)\nRequirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from protobuf>=3.9.2->tensorflow->deepctr_torch) (57.0.0)\nRequirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (3.3.4)\nRequirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (0.4.4)\nRequirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (2.23.0)\nRequirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (1.0.1)\nRequirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (1.8.0)\nRequirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (1.31.0)\nRequirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.5->tensorflow->deepctr_torch) (0.6.1)\nRequirement already satisfied: cached-property; python_version < \"3.8\" in /usr/local/lib/python3.7/dist-packages (from h5py~=3.1.0->tensorflow->deepctr_torch) (1.5.2)\nRequirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.7/dist-packages (from scikit-learn->sklearn->deepctr_torch) (1.0.1)\nRequirement already satisfied: scipy>=0.17.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn->sklearn->deepctr_torch) (1.4.1)\nRequirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard~=2.5->tensorflow->deepctr_torch) (4.5.0)\nRequirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.5->tensorflow->deepctr_torch) (1.3.0)\nRequirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow->deepctr_torch) (3.0.4)\nRequirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow->deepctr_torch) (2.10)\nRequirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow->deepctr_torch) (2021.5.30)\nRequirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.5->tensorflow->deepctr_torch) (1.24.3)\nRequirement already satisfied: rsa<5,>=3.1.4; python_version >= \"3.6\" in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow->deepctr_torch) (4.7.2)\nRequirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow->deepctr_torch) (0.2.8)\nRequirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow->deepctr_torch) (4.2.2)\nRequirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard~=2.5->tensorflow->deepctr_torch) (3.4.1)\nRequirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.5->tensorflow->deepctr_torch) (3.1.1)\nRequirement already satisfied: pyasn1>=0.1.3 in /usr/local/lib/python3.7/dist-packages (from rsa<5,>=3.1.4; python_version >= \"3.6\"->google-auth<2,>=1.6.3->tensorboard~=2.5->tensorflow->deepctr_torch) (0.4.8)\nInstalling collected packages: deepctr-torch\nSuccessfully installed deepctr-torch-0.2.7\nTrue\nTesla V100-SXM2-16GB\n"
],
[
"# -*- coding: utf-8 -*-\nimport numpy as np\nimport pandas as pd\nfrom tqdm import tqdm_notebook as tqdm\nfrom sklearn.decomposition import PCA\nfrom collections import defaultdict\n\nimport os\nos.chdir('/content/wechat_big_data_baseline_pytorch')\n\n\n# 存储数据的根目录\nROOT_PATH = \"../data\"\n# 比赛数据集路径\nDATASET_PATH = ROOT_PATH + '/wechat_algo_data1/'\n# 训练集\nUSER_ACTION = DATASET_PATH + \"user_action.csv\"\nFEED_INFO = DATASET_PATH + \"feed_info.csv\"\nFEED_EMBEDDINGS = DATASET_PATH + \"feed_embeddings.csv\"\n# 测试集\nTEST_FILE = DATASET_PATH + \"test_a.csv\"\n# 初赛待预测行为列表\nACTION_LIST = [\"read_comment\", \"like\", \"click_avatar\", \"forward\"]\nFEA_COLUMN_LIST = [\"read_comment\", \"like\", \"click_avatar\", \"forward\", \"comment\", \"follow\", \"favorite\", \"device\"]\nFEA_FEED_LIST = ['feedid', 'authorid', 'videoplayseconds', 'bgm_song_id', 'bgm_singer_id', 'manual_tag_list']\n# 负样本下采样比例(负样本:正样本)\nACTION_SAMPLE_RATE = {\"read_comment\": 4, \"like\": 4, \"click_avatar\": 4, \"forward\": 10, \"comment\": 10, \"follow\": 10, \"favorite\": 10}\n\ndef process_embed(train):\n feed_embed_array = np.zeros((train.shape[0], 512))\n for i in tqdm(range(train.shape[0])):\n x = train.loc[i, 'feed_embedding']\n if x != np.nan and x != '':\n y = [float(i) for i in str(x).strip().split(\" \")]\n else:\n y = np.zeros((512,)).tolist()\n feed_embed_array[i] += y\n temp = pd.DataFrame(columns=[f\"embed{i}\" for i in range(512)], data=feed_embed_array)\n train = pd.concat((train, temp), axis=1)\n return train\n\ndef proc_tag(df, name='manual_tag_list', thre=5, max_len=5):\n stat = defaultdict(int)\n\n for row in df[name]:\n if isinstance(row, str):\n for tag in row.strip().split(';'):\n stat[tag] += 1\n\n zero_tags = set([tag for tag in stat if stat[tag] < thre]) # 低于频次的 tag\n\n def tag_func(row, max_len=max_len):\n ret = []\n if isinstance(row, str):\n for tag in row.strip().split(';'):\n ret.append(0 if tag in zero_tags else int(tag) + 1)\n ret = ret[:max_len] + [0] * (max_len - len(ret))\n return ' '.join([str(n) for n in ret])\n\n df[name] = df[name].apply(tag_func)\n\n tag_vocab_size = max([int(tag) for tag in stat]) + 2\n print('%s: vocab_size == %d' % (name, tag_vocab_size))\n return df\n\ndef prepare_data():\n feed_info_df = pd.read_csv(FEED_INFO)\n\n feed_info_df = proc_tag(feed_info_df, name='manual_tag_list', thre=5, max_len=5)\n\n user_action_df = pd.read_csv(USER_ACTION)[[\"userid\", \"date_\", \"feedid\",] + FEA_COLUMN_LIST]\n \n feed_info_df = feed_info_df[FEA_FEED_LIST]\n\n test = pd.read_csv(TEST_FILE)\n\n # add feed feature\n train = pd.merge(user_action_df, feed_info_df, on='feedid', how='left')\n test = pd.merge(test, feed_info_df, on='feedid', how='left')\n test[\"videoplayseconds\"] = np.log(test[\"videoplayseconds\"] + 1.0)\n test.to_csv(ROOT_PATH + f'/test_data.csv', index=False)\n for action in tqdm(ACTION_LIST):\n print(f\"prepare data for {action}\")\n tmp = train.drop_duplicates(['userid', 'feedid', action], keep='last')\n df_neg = tmp[tmp[action] == 0]\n df_neg = df_neg.sample(frac=1.0 / ACTION_SAMPLE_RATE[action], random_state=SEED, replace=False)\n df_all = pd.concat([df_neg, tmp[tmp[action] == 1]])\n df_all[\"videoplayseconds\"] = np.log(df_all[\"videoplayseconds\"] + 1.0)\n df_all.to_csv(ROOT_PATH + f'/train_data_for_{action}.csv', index=False)\n\n\nif __name__ == \"__main__\":\n prepare_data()",
"manual_tag_list: vocab_size == 354\n"
],
[
"",
"_____no_output_____"
],
[
"from sklearn.decomposition import PCA\n\nn_dim = 32\nfeed_embed = pd.read_csv(FEED_EMBEDDINGS)\nfeed_embed['feed_embedding'] = feed_embed['feed_embedding'].apply(lambda row: [float(x) for x in row.strip().split()])\npca = PCA(n_components=n_dim)\npca_emb = pca.fit_transform(feed_embed['feed_embedding'].tolist())\nfeed_embed['pca_emb'] = list(pca_emb)\nfeed_embed = feed_embed[['feedid', 'pca_emb']]\n# feed_embed.drop(['feed_embedding'], axis=1).to_csv(\"/content/drive/MyDrive/pca_emb%d.csv\" % n_dim, index=False)",
"_____no_output_____"
],
[
"from numba import njit\nfrom scipy.stats import rankdata\n\n\n@njit\ndef _auc(actual, pred_ranks):\n n_pos = np.sum(actual)\n n_neg = len(actual) - n_pos\n return (np.sum(pred_ranks[actual == 1]) - n_pos*(n_pos+1)/2) / (n_pos*n_neg)\n\n\ndef fast_auc(actual, predicted):\n # https://www.kaggle.com/c/riiid-test-answer-prediction/discussion/208031\n pred_ranks = rankdata(predicted)\n return _auc(actual, pred_ranks)\n\n\ndef uAUC(labels, preds, user_id_list):\n user_pred = defaultdict(lambda: [])\n user_truth = defaultdict(lambda: [])\n for idx, truth in enumerate(labels):\n user_id = user_id_list[idx]\n pred = preds[idx]\n truth = labels[idx]\n user_pred[user_id].append(pred)\n user_truth[user_id].append(truth)\n\n user_flag = defaultdict(lambda: False)\n for user_id in set(user_id_list):\n truths = user_truth[user_id]\n flag = False\n # 若全是正样本或全是负样本,则flag为False\n for i in range(len(truths) - 1):\n if truths[i] != truths[i + 1]:\n flag = True\n break\n user_flag[user_id] = flag\n\n total_auc = 0.0\n size = 0.0\n for user_id in user_flag:\n if user_flag[user_id]:\n auc = fast_auc(np.asarray(user_truth[user_id]), np.asarray(user_pred[user_id]))\n total_auc += auc \n size += 1.0\n user_auc = float(total_auc)/size\n return user_auc\n\n\ndef compute_weighted_score(score_dict, weight_dict):\n score = 0.0\n weight_sum = 0.0\n for action in score_dict:\n weight = float(weight_dict[action])\n score += weight*score_dict[action]\n weight_sum += weight\n score /= float(weight_sum)\n score = round(score, 6)\n return score",
"_____no_output_____"
],
[
"sparse_2_dim = {\n 'userid': 8,\n 'feedid': 8,\n 'authorid': 8,\n 'bgm_song_id': 8,\n 'bgm_singer_id': 8,\n}\n\ndense_2_dim = {\n 'videoplayseconds': 1,\n 'pca_emb': 32,\n #'w2v': 8 * 3\n}\n\nvar_2_dim = {\n 'manual_tag_list': {'dim': 8, 'vocab_size': 354},\n}",
"_____no_output_____"
],
[
"# -*- coding: utf-8 -*-\nimport numpy as np\nimport pandas as pd\nimport torch\nfrom sklearn.preprocessing import LabelEncoder, MinMaxScaler\nfrom collections import defaultdict\nfrom deepctr_torch.inputs import SparseFeat, DenseFeat, get_feature_names\nfrom deepctr_torch.models.deepfm import *\nfrom deepctr_torch.models.basemodel import *\n\n\nclass MyBaseModel(BaseModel):\n\n def fit(self, x, y, batch_size, val_data=None, epochs=1, verbose=1, mode='offline'):\n x = [x[feature] for feature in self.feature_index] # type(x) = dict\n for i in range(len(x)):\n x[i] = np.array(x[i].tolist())\n if len(x[i].shape) == 1:\n x[i] = np.expand_dims(x[i], axis=1)\n\n val_x, val_y = [], []\n if mode == 'offline':\n val_x, val_y = val_data\n val_uids = val_x['userid'].tolist()\n val_x = [val_x[feature] for feature in self.feature_index]\n \n train_tensor_data = Data.TensorDataset(torch.from_numpy(np.concatenate(x, axis=-1)), torch.from_numpy(y))\n train_loader = DataLoader(dataset=train_tensor_data, shuffle=True, batch_size=batch_size)\n sample_num = len(train_tensor_data)\n steps_per_epoch = (sample_num - 1) // batch_size + 1\n\n # Train\n print(\"Train on {0} samples, validate on {1} samples, {2} steps per epoch\".format(len(train_tensor_data), len(val_y), steps_per_epoch))\n epoch_logs = defaultdict(dict)\n model = self.train()\n for epoch in range(epochs):\n start_time = time.time()\n loss_epoch = 0\n total_loss_epoch = 0\n train_result = defaultdict(list)\n for _, (x_train, y_train) in tqdm(enumerate(train_loader)):\n x = x_train.to(self.device).float()\n y = y_train.to(self.device).float()\n\n y_pred = model(x).squeeze()\n\n self.optim.zero_grad()\n loss = self.loss_func(y_pred, y.squeeze(), reduction='sum')\n total_loss = loss + self.get_regularization_loss() + self.aux_loss\n\n loss_epoch += loss.item()\n total_loss_epoch += total_loss.item()\n total_loss.backward()\n self.optim.step()\n\n for name, func in self.metrics.items():\n try:\n temp = func(y.cpu().data.numpy(), y_pred.cpu().data.numpy().astype(\"float64\"))\n except:\n temp = 0\n finally:\n train_result[name].append(temp)\n\n # Add logs\n logs = {}\n logs[\"loss\"] = total_loss_epoch / sample_num\n for name, result in train_result.items():\n logs[name] = np.sum(result) / steps_per_epoch\n\n if mode == 'offline':\n eval_result = self.evaluate(val_x, val_y, val_uids, batch_size)\n for name, result in eval_result.items():\n logs[\"val_\" + name] = result\n \n print('Epoch {0}/{1}, {2}s'.format(epoch + 1, epochs, int(time.time() - start_time)))\n eval_str = \"loss: {0: .4f}\".format(logs[\"loss\"])\n for name in logs:\n eval_str += \" - \" + name + \": {0: .4f}\".format(logs[name])\n print(eval_str)\n epoch_logs[epoch+1] = logs\n return epoch_logs\n\n def evaluate(self, x, y, uids, batch_size=256):\n preds = self.predict(x, batch_size)\n eval_result = {}\n for name, metric_fun in self.metrics.items():\n eval_result[name] = metric_fun(y, preds)\n eval_result['uAUC'] = uAUC(y.squeeze(), preds.squeeze(), uids)\n\n return eval_result\n\n def predict(self, x, batch_size=256):\n model = self.eval()\n if isinstance(x, dict):\n x = [x[feature] for feature in self.feature_index]\n for i in range(len(x)):\n x[i] = np.array(x[i].tolist())\n if len(x[i].shape) == 1:\n x[i] = np.expand_dims(x[i], axis=1)\n\n tensor_data = Data.TensorDataset(torch.from_numpy(np.concatenate(x, axis=-1)))\n test_loader = DataLoader(dataset=tensor_data, shuffle=False, batch_size=batch_size)\n\n pred_ans = []\n with torch.no_grad():\n for _, x_test in enumerate(test_loader):\n x = x_test[0].to(self.device).float()\n y_pred = model(x).cpu().data.numpy()\n pred_ans.append(y_pred)\n\n return np.concatenate(pred_ans).astype(\"float64\")\n\nclass MyDeepFM(MyBaseModel):\n def __init__(self,\n linear_feature_columns, dnn_feature_columns,\n dense_map = None, dnn_hidden_units=(256, 128),\n l2_reg_linear=0.00001, l2_reg_embedding=0.00001, l2_reg_dnn=0, init_std=0.0001, seed=1024,\n dnn_dropout=0., dnn_activation='relu', dnn_use_bn=True, task='binary', device='cpu'):\n\n super(MyDeepFM, self).__init__(linear_feature_columns, dnn_feature_columns, l2_reg_linear=l2_reg_linear,\n l2_reg_embedding=l2_reg_embedding, init_std=init_std, seed=seed, task=task,\n device=device)\n\n # dense map\n dense_map = {}\n self.dense_map = dense_map\n self.dense_map_dict = dict([(name, nn.Linear(dense_2_dim[name], dense_map[name], bias=False).to(device)) for name in dense_map])\n dim_delta = sum([dense_map[name] - dense_2_dim[name] for name in dense_map])\n\n # dnn tower\n self.dnn = DNN(self.compute_input_dim(dnn_feature_columns) + dim_delta, dnn_hidden_units,\n activation=dnn_activation, l2_reg=l2_reg_dnn, dropout_rate=dnn_dropout, use_bn=dnn_use_bn,\n init_std=init_std, seed=seed, device=device)\n self.dnn_linear = nn.Linear(dnn_hidden_units[-1], 1, bias=False).to(device)\n self.add_regularization_weight(filter(lambda x: 'weight' in x[0] and 'bn' not in x[0], self.dnn.named_parameters()), l2=l2_reg_dnn)\n self.add_regularization_weight(self.dnn_linear.weight, l2=l2_reg_dnn)\n\n self.to(device)\n\n def forward(self, X):\n sparse_embedding_list, dense_value_list = self.input_from_feature_columns(X, self.dnn_feature_columns, self.embedding_dict) # 5*[512,1,4], 1*[512,1]\n \n # lr\n logit = self.linear_model(X)\n \n # fm\n fm_input = torch.cat(sparse_embedding_list, dim=1)\n square_of_sum = torch.pow(torch.sum(fm_input, dim=1, keepdim=True), 2)\n sum_of_square = torch.sum(fm_input * fm_input, dim=1, keepdim=True)\n logit += 0.5 * torch.sum(square_of_sum - sum_of_square, dim=2, keepdim=False)\n\n # dense map\n dense_names = [fc.name for fc in self.dnn_feature_columns if isinstance(fc, DenseFeat)]\n tmp = []\n for name, tensor in zip (dense_names, dense_value_list):\n if name in self.dense_map_dict:\n tensor = self.dense_map_dict[name](tensor)\n tmp.append(tensor)\n dense_value_list = tmp\n\n # dnn tower\n sparse_dnn_input = torch.flatten(torch.cat(sparse_embedding_list, dim=-1), start_dim=1)\n dense_dnn_input = torch.flatten(torch.cat(dense_value_list, dim=-1), start_dim=1)\n dnn_input = torch.cat([sparse_dnn_input, dense_dnn_input], dim=-1)\n logit += self.dnn_linear(self.dnn(dnn_input))\n \n return self.out(logit)\n\n\nmode = 'online' # online\nif __name__ == \"__main__\":\n submit = pd.read_csv(ROOT_PATH + '/test_data.csv')[['userid', 'feedid']]\n logs = {}\n for action in ACTION_LIST:\n print('*** train for %s ***' % action)\n\n USE_FEAT = ['userid', 'feedid', 'device', action] + FEA_FEED_LIST[1:]\n train = pd.read_csv(ROOT_PATH + f'/train_data_for_{action}.csv')[['date_'] + USE_FEAT]\n\n # TODO: sampling\n # train = train.sample(frac=0.1, random_state=42).reset_index(drop=True)\n print(\"positive ratio:\", sum((train[action] == 1) * 1) / train.shape[0])\n \n test = pd.read_csv(ROOT_PATH + '/test_data.csv')[[i for i in USE_FEAT if i != action]]\n test[action] = 0\n test['date_'] = 15\n test = test[['date_'] + USE_FEAT]\n data = pd.concat((train, test)).reset_index(drop=True)\n\n # universal embedding\n data = pd.merge(data, feed_embed, on='feedid', how='left')\n data['pca_emb'] = [e if isinstance(e, np.ndarray) else np.zeros((32)) for e in data['pca_emb']]\n data['manual_tag_list'] = data['manual_tag_list'].apply(lambda row: np.array([int(x) for x in row.split()]))\n\n # features\n sparse_features = list(sparse_2_dim.keys())\n dense_features = list(dense_2_dim.keys())\n var_features = list(var_2_dim.keys())\n print('sparse_features: ', sparse_features)\n print('dense_features: ', dense_features)\n print('var_features: ', var_features)\n\n data[sparse_features] = data[sparse_features].fillna(0)\n data[dense_features] = data[dense_features].fillna(0)\n\n # 1.Label Encoding for sparse features,and do simple Transformation for dense features\n for feat in sparse_features:\n lbe = LabelEncoder()\n data[feat] = lbe.fit_transform(data[feat])\n # mms = MinMaxScaler(feature_range=(0, 1))\n # data[dense_features] = mms.fit_transform(data[dense_features])\n\n # 2.count #unique features for each sparse field,and record dense feature field name\n varlen_feature_columns = [VarLenSparseFeat(SparseFeat(feat, vocabulary_size=var_2_dim[feat]['vocab_size'], embedding_dim=var_2_dim[feat]['dim']), maxlen=5, combiner='sum') for feat in var_features]\n \n fixlen_feature_columns = [SparseFeat(feat, data[feat].nunique(), sparse_2_dim[feat]) for feat in sparse_features] + [DenseFeat(feat, dense_2_dim[feat]) for feat in dense_features]\n dnn_feature_columns = fixlen_feature_columns + varlen_feature_columns\n linear_feature_columns = fixlen_feature_columns + varlen_feature_columns\n\n feature_names = get_feature_names(linear_feature_columns + dnn_feature_columns)\n\n # 3.generate input data for model\n train, test = data.iloc[:train.shape[0]].reset_index(drop=True), data.iloc[train.shape[0]:].reset_index(drop=True)\n if mode == 'offline':\n train_idxes, eval_idxes = train['date_'] != 14, train['date_'] == 14\n train, eval = train[train_idxes].drop(['date_'], axis=1), train[eval_idxes].drop(['date_'], axis=1)\n if mode == 'online':\n train = train.drop(['date_'], axis=1)\n eval = train.head() # fake\n test = test.drop(['date_'], axis=1)\n\n train_x = {name: train[name] for name in feature_names}\n eval_x = {name: eval[name] for name in feature_names}\n test_x = {name: test[name] for name in feature_names}\n\n # 4.Define Model,train,predict and evaluate\n model = MyDeepFM(\n linear_feature_columns=linear_feature_columns, \n dnn_feature_columns=dnn_feature_columns,\n task='binary', l2_reg_embedding=1e-1, device='cuda:0' if torch.cuda.is_available() else 'cpu', seed=SEED)\n model.compile(\"adagrad\", \"binary_crossentropy\", metrics=[\"binary_crossentropy\", \"auc\"])\n \n act_logs = model.fit(train_x, train[[action]].values, val_data=(eval_x, eval[[action]].values), batch_size=512, epochs=2, mode=mode)\n logs[action] = act_logs\n\n # online\n submit[action] = model.predict(test_x, 128)\n torch.cuda.empty_cache()\n \n # weighted uAUC\n if mode == 'offline':\n score_dict = {}\n for act in logs:\n act_logs = logs[act]\n score_dict[act] = act_logs[max(act_logs.keys())]['val_uAUC']\n weight_dict = {\"read_comment\": 4.0, \"like\": 3.0, \"click_avatar\": 2.0, \"forward\": 1.0, \"favorite\": 1.0, \"comment\": 1.0, \"follow\": 1.0}\n weighted_uAUC = compute_weighted_score(score_dict, weight_dict)\n print(score_dict)\n print('weighted_uAUC: ', weighted_uAUC)\n\n # online\n submit.to_csv(\"./submit_2_45.csv\", index=False)\n",
"_____no_output_____"
],
[
"todo:\n 不同的action使用不同的epoch\n seed",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"int(k[1:-1].strip().split(',')[1])",
"_____no_output_____"
],
[
"p = data['manual_tag_list'].apply(lambda row: np.array([int(x) for x in row.split()]))",
"_____no_output_____"
],
[
"p[0].dtype",
"_____no_output_____"
],
[
"",
"_____no_output_____"
],
[
"# baseline\n{'read_comment': 0.6102415130979689, 'like': 0.6055234369612766, 'click_avatar': 0.7059927976309249, 'forward': 0.6832353813536607}\nweighted_uAUC: 0.635276\n\n# dnn_dropout = 0.1\n{'read_comment': 0.6094100217906185, 'like': 0.6052801328988395, 'click_avatar': 0.7059140934189055, 'forward': 0.6846734262464789}\nweighted_uAUC: 0.634998\n\n# 256, 128, 128\n{'read_comment': 0.613116787160124, 'like': 0.6062583852548347, 'click_avatar': 0.7058735217580193, 'forward': 0.6769030704770939}\nweighted_uAUC: 0.635989\n\n# epoch = 2\n{'read_comment': 0.6117841889858322, 'like': 0.6089919743022709, 'click_avatar': 0.7138421964649098, 'forward': 0.6829949302549756}\nweighted_uAUC: 0.638479\n\n# sparse dim = 8, epoch = 2 (new baseline)\n{'read_comment': 0.6126884118803656, 'like': 0.6078158393185238, 'click_avatar': 0.7141126528216767, 'forward': 0.6923154125787877}\nweighted_uAUC: 0.639474\n\n# 删除了对 videoplayseconds 的归一化(new baseline)\n{'read_comment': 0.6150373746448982, 'like': 0.6087792274162345, 'click_avatar': 0.7137088800810096, 'forward': 0.6919173648006157}\nweighted_uAUC: 0.640582\n\n# add feed embedding 32(new baseline)\n{'read_comment': 0.6231230935993682, 'like': 0.6162679088683002, 'click_avatar': 0.7128391281987229, 'forward': 0.6951917541544708}\nweighted_uAUC: 0.646217\n\n# add feed embedding 64\n{'read_comment': 0.6179610910963779, 'like': 0.617180918593666, 'click_avatar': 0.7121687727167492, 'forward': 0.6969728833664359}\nweighted_uAUC: 0.64447\n\n# sparse dim = 12\n{'read_comment': 0.6152862366363533, 'like': 0.6172504324924313, 'click_avatar': 0.7100718453099804, 'forward': 0.701999472669805}\nweighted_uAUC: 0.643504\n\n# baseline 的重复实验,线上 0.656674\n{'read_comment': 0.6220072250372239, 'like': 0.6181791275945606, 'click_avatar': 0.7129768375663601, 'forward': 0.6987107057431032}\nweighted_uAUC: 0.646723\n\n# (256, 128, 64)\n{'read_comment': 0.6176483873668901, 'like': 0.6170515088013665, 'click_avatar': 0.713929279701119, 'forward': 0.6961728898267605}\nweighted_uAUC: 0.644578\n\n# dnn_use_bn = True(new baseline) 线上 0.65576\n{'read_comment': 0.6269150887735059, 'like': 0.6245276506750953, 'click_avatar': 0.715901103365852, 'forward': 0.7038550482328185}\nweighted_uAUC: 0.65169\n\n# dropout = 0.1\n{'read_comment': 0.6261901395330384, 'like': 0.6239817428964435, 'click_avatar': 0.7163355996839406, 'forward': 0.6963938852580808}\nweighted_uAUC: 0.650577\n\n# baseline 重复实验\n{'read_comment': 0.6296090935187716, 'like': 0.621418106767897, 'click_avatar': 0.7168717294762987, 'forward': 0.6967281458462133}\nweighted_uAUC: 0.651316\n\n# 先使用linear对ue降维(32->16),再接入dnn\n{'read_comment': 0.6227016241604177, 'like': 0.6199456756568217, 'click_avatar': 0.7143073328988507, 'forward': 0.6791432406166807}\nweighted_uAUC: 0.64584\n\n# ue(32->4) dnn_use_bn = True\n{'read_comment': 0.6214967487996874, 'like': 0.6117349933619828, 'click_avatar': 0.7123781829253133, 'forward': 0.6907462327570015}\nweighted_uAUC: 0.643669\n\n# ue(32->8) dnn_use_bn = True\n{'read_comment': 0.6255372000774586, 'like': 0.6099148843168334, 'click_avatar': 0.7147080055545442, 'forward': 0.6913280305289646}\nweighted_uAUC: 0.645264\n\n# ue(32->16) dnn_use_bn = True\n{'read_comment': 0.6230882216710302, 'like': 0.620136770671566, 'click_avatar': 0.716609921279133, 'forward': 0.6855595234090964}\nweighted_uAUC: 0.647154\n\n# ue(32->32) dnn_use_bn = True\n{'read_comment': 0.6244980658541014, 'like': 0.6178982426111442, 'click_avatar': 0.7149869209063016, 'forward': 0.7032611484183776}\nweighted_uAUC: 0.648492\n\n# ue(32) dnn_use_bn = True * 2\n{'read_comment': 0.6268910782755379, 'like': 0.6222017679020581, 'click_avatar': 0.7150488812479852, 'forward': 0.6991933553474539}\nweighted_uAUC: 0.650346\n\n# ue(32) dnn_use_bn = True 换gpu跑\n{'read_comment': 0.6244744632801036, 'like': 0.6220399203865046, 'click_avatar': 0.7144416351024233, 'forward': 0.6968944127096272}\nweighted_uAUC: 0.64898\n\n# ue(32) dnn_use_bn = True 换gpu跑\n{'read_comment': 0.6275061489685031, 'like': 0.6229652888075203, 'click_avatar': 0.7143199406719899, 'forward': 0.6981564617860107}\nweighted_uAUC: 0.650572\n\n# ue(32) dnn_use_bn = False\n{'read_comment': 0.621629670929673, 'like': 0.6172394906951797, 'click_avatar': 0.7138702099995802, 'forward': 0.6971143776259561}\nweighted_uAUC: 0.646309\n\n# ue(32->32) dnn_use_bn = False\n{'read_comment': 0.6185276618440073, 'like': 0.6170842463943023, 'click_avatar': 0.7135048353197133, 'forward': 0.6971910507462337}\nweighted_uAUC: 0.644956\n\n# 采样 4 4 4 10 (new baseline)\n{'read_comment': 0.6313212962172534, 'like': 0.6220075992585066, 'click_avatar': 0.7143381748417214, 'forward': 0.6978311251747286}\nweighted_uAUC: 0.651782\n\n# + device\n{'read_comment': 0.6244486461941755, 'like': 0.6263065400087143, 'click_avatar': 0.7067274451536654, 'forward': 0.7078156246005303}\nweighted_uAUC: 0.649798\n\n{'read_comment': 0.6239386227276458, 'like': 0.6246523621908081, 'click_avatar': 0.7127589873646121, 'forward': 0.7021687370818925}\nweighted_uAUC: 0.64974\n\n# baseline\n{'read_comment': 0.6288471047258013, 'like': 0.6219350043589409, 'click_avatar': 0.7146902582599014, 'forward': 0.6918359172388889}\nweighted_uAUC: 0.650241\n\n# seed = 80 split_seed = 42\n{'read_comment': 0.6242634094879379, 'like': 0.6264243440165063, 'click_avatar': 0.7118413185387293, 'forward': 0.6945659928938664}\nweighted_uAUC: 0.649458\n\n# seed = 80\n{'read_comment': 0.6204573895103794, 'like': 0.62392994828236, 'click_avatar': 0.7235837537540227, 'forward': 0.6789320461307407}\nweighted_uAUC: 0.647972\n\nseed = 81\n{'read_comment': 0.6224667184141309, 'like': 0.6232470714913648, 'click_avatar': 0.710962678982845, 'forward': 0.6922917821851972}\nweighted_uAUC: 0.647383\n\n# seed 41 42 43 44 45 avg online\n{'read_comment': 0.644098, 'like': 0.63073, 'click_avatar': 0.733325, 'forward': 0.697216}\nweighted_uAUC: 0.663245\n\n\n# seed = 41 ; with manual_tags_dim = 8\n{'read_comment': 0.6248747741666473, 'like': 0.6202584290770113, 'click_avatar': 0.7195684676287956, 'forward': 0.695015289241745}\nweighted_uAUC: 0.649443\n\n### 换gpu ###\n# seed = 42 ; with manual_tags_dim = 8\n{'read_comment': 0.6326313492611053, 'like': 0.6238611589799947, 'click_avatar': 0.7190238030791103, 'forward': 0.7043097908858788}\nweighted_uAUC: 0.654447\n\n# 线上 42\n{'read_comment': 0.636948, 'like': 0.623838, 'click_avatar': 0.727661, 'forward': 0.693742}\nweighted_uAUC: 0.656837\n\n# 线上 seed 41 42 43 44 45 avg\n{'read_comment': 0.646524, 'like': 0.629584, 'click_avatar': 0.733393, 'forward': 0.698964}\nweighted_uAUC: 0.66406\n\n# seed = 42 ; w/o manual_tags_dim = 8\n{'read_comment': 0.6227764240658794, 'like': 0.6238680596273618, 'click_avatar': 0.7145209265367466, 'forward': 0.7000066937625322}\nweighted_uAUC: 0.649176",
"_____no_output_____"
],
[
"data.head()",
"_____no_output_____"
],
[
"USE_FEAT",
"_____no_output_____"
],
[
"",
"_____no_output_____"
]
]
] | [
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09bd7684d674b6354da5582516dd1cc585f2496 | 9,241 | ipynb | Jupyter Notebook | python_bindings/notebooks/search_sift_uint8.ipynb | snosrap/nmslib | f3fe4f27c49a31f1afacd52b0133088bcd512bf4 | [
"Apache-2.0"
] | 1 | 2020-12-28T09:35:57.000Z | 2020-12-28T09:35:57.000Z | python_bindings/notebooks/search_sift_uint8.ipynb | jtibshirani/nmslib | 7b33bca2ad9afb7026e17941257fdaf778fb15aa | [
"Apache-2.0"
] | null | null | null | python_bindings/notebooks/search_sift_uint8.ipynb | jtibshirani/nmslib | 7b33bca2ad9afb7026e17941257fdaf778fb15aa | [
"Apache-2.0"
] | null | null | null | 25.669444 | 124 | 0.542366 | [
[
[
"import numpy \nimport sys \nimport nmslib \nimport time \nimport math \nfrom sklearn.neighbors import NearestNeighbors\nfrom sklearn.model_selection import train_test_split",
"_____no_output_____"
],
[
"# Just read the data\nall_data_matrix = numpy.loadtxt('../../sample_data/sift_10k.txt')",
"_____no_output_____"
],
[
"# Create a held-out query data set\n(data_matrix, query_matrix) = train_test_split(all_data_matrix, test_size = 0.1)",
"_____no_output_____"
],
[
"print(\"# of queries %d, # of data points %d\" % (query_matrix.shape[0], data_matrix.shape[0]) )",
"# of queries 1000, # of data points 9000\n"
],
[
"# Set index parameters\n# These are the most important onese\nM = 15\nefC = 100\n\nnum_threads = 4\nindex_time_params = {'M': M, 'indexThreadQty': num_threads, 'efConstruction': efC, 'post' : 0,\n 'skip_optimized_index' : 1 # using non-optimized index!\n }",
"_____no_output_____"
],
[
"# Number of neighbors \nK=100",
"_____no_output_____"
],
[
"# Space name should correspond to the space name \n# used for brute-force search\nspace_name='l2sqr_sift'",
"_____no_output_____"
],
[
"# Intitialize the library, specify the space, the type of the vector and add data points \n# for SIFT data, we want DENSE_UINT8_VECTOR and distance type INT\nindex = nmslib.init(method='hnsw', \n space=space_name, \n data_type=nmslib.DataType.DENSE_UINT8_VECTOR, \n dtype=nmslib.DistType.INT) ",
"_____no_output_____"
],
[
"index.addDataPointBatch(data_matrix.astype(numpy.uint8)) ",
"_____no_output_____"
],
[
"# Create an index\nstart = time.time()\nindex.createIndex(index_time_params) \nend = time.time() \nprint('Index-time parameters', index_time_params)\nprint('Indexing time = %f' % (end-start))",
"Index-time parameters {'M': 15, 'indexThreadQty': 4, 'efConstruction': 100, 'post': 0, 'skip_optimized_index': 1}\nIndexing time = 0.164428\n"
],
[
"# Setting query-time parameters\nefS = 100\nquery_time_params = {'efSearch': efS}\nprint('Setting query-time parameters', query_time_params)\nindex.setQueryTimeParams(query_time_params)",
"Setting query-time parameters {'efSearch': 100}\n"
],
[
"# Querying\nquery_qty = query_matrix.shape[0]\nstart = time.time() \nnbrs = index.knnQueryBatch(query_matrix.astype(numpy.uint8), k = K, num_threads = num_threads)\nend = time.time() \nprint('kNN time total=%f (sec), per query=%f (sec), per query adjusted for thread number=%f (sec)' % \n (end-start, float(end-start)/query_qty, num_threads*float(end-start)/query_qty)) ",
"kNN time total=0.026479 (sec), per query=0.000026 (sec), per query adjusted for thread number=0.000106 (sec)\n"
],
[
"# Computing gold-standard data \nprint('Computing gold-standard data')\n\nstart = time.time()\nsindx = NearestNeighbors(n_neighbors=K, metric='l2', algorithm='brute').fit(data_matrix)\nend = time.time()\n\nprint('Brute-force preparation time %f' % (end - start))\n\nstart = time.time() \ngs = sindx.kneighbors(query_matrix)\nend = time.time()\n\nprint('brute-force kNN time total=%f (sec), per query=%f (sec)' % \n (end-start, float(end-start)/query_qty) )",
"Computing gold-standard data\nBrute-force preparation time 0.001164\nbrute-force kNN time total=0.270002 (sec), per query=0.000270 (sec)\n"
],
[
"# Finally computing recall\nrecall=0.0\nfor i in range(0, query_qty):\n correct_set = set(gs[1][i])\n ret_set = set(nbrs[i][0])\n recall = recall + float(len(correct_set.intersection(ret_set))) / len(correct_set)\nrecall = recall / query_qty\nprint('kNN recall %f' % recall)",
"kNN recall 0.990770\n"
],
[
"# Save a meta index and the data\nindex.saveIndex('dense_index_nonoptim.bin', save_data=True)",
"_____no_output_____"
],
[
"# Re-intitialize the library, specify the space, the type of the vector.\nnewIndex = nmslib.init(method='hnsw', \n space=space_name, \n data_type=nmslib.DataType.DENSE_UINT8_VECTOR, \n dtype=nmslib.DistType.INT) ",
"_____no_output_____"
],
[
"# Re-load the index and re-run queries\nnewIndex.loadIndex('dense_index_nonoptim.bin', load_data=True)",
"_____no_output_____"
],
[
"# Setting query-time parameters and querying\nprint('Setting query-time parameters', query_time_params)\nnewIndex.setQueryTimeParams(query_time_params)\n\nquery_qty = query_matrix.shape[0]\nstart = time.time() \nnew_nbrs = newIndex.knnQueryBatch(query_matrix.astype(numpy.uint8), k = K, num_threads = num_threads)\nend = time.time() \nprint('kNN time total=%f (sec), per query=%f (sec), per query adjusted for thread number=%f (sec)' % \n (end-start, float(end-start)/query_qty, num_threads*float(end-start)/query_qty)) ",
"Setting query-time parameters {'efSearch': 100}\nkNN time total=0.023836 (sec), per query=0.000024 (sec), per query adjusted for thread number=0.000095 (sec)\n"
],
[
"# Finally computing recall for the new result set\nrecall=0.0\nfor i in range(0, query_qty):\n correct_set = set(gs[1][i])\n ret_set = set(new_nbrs[i][0])\n recall = recall + float(len(correct_set.intersection(ret_set))) / len(correct_set)\nrecall = recall / query_qty\nprint('kNN recall %f' % recall)",
"kNN recall 0.990770\n"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09bf6bfcec1fd248060030ecd1e3df62dc1cb46 | 777,153 | ipynb | Jupyter Notebook | evaluation/all_evaluation.ipynb | cs-mac/Unsupervised_Style_Transfer | 803452735c13e400f8e9079821594e075571f609 | [
"Apache-2.0"
] | 1 | 2021-12-04T12:15:37.000Z | 2021-12-04T12:15:37.000Z | evaluation/all_evaluation.ipynb | cs-mac/Unsupervised_Style_Transfer | 803452735c13e400f8e9079821594e075571f609 | [
"Apache-2.0"
] | null | null | null | evaluation/all_evaluation.ipynb | cs-mac/Unsupervised_Style_Transfer | 803452735c13e400f8e9079821594e075571f609 | [
"Apache-2.0"
] | null | null | null | 777,153 | 777,153 | 0.929771 | [
[
[
"from google.colab import drive\ndrive.mount('/content/drive')\n\nfrom google.colab import auth\nauth.authenticate_user()\nimport gspread\nfrom oauth2client.client import GoogleCredentials\ngc = gspread.authorize(GoogleCredentials.get_application_default())",
"Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
],
[
"cd drive/\"My Drive\"/\"Colab Notebooks\"/master_project/evaluation",
"/content/drive/My Drive/Colab Notebooks/master_project/evaluation\n"
],
[
"%%capture\n!pip install krippendorff",
"_____no_output_____"
],
[
"import pandas as pd\nimport matplotlib.pyplot as plt\nfrom sklearn.metrics import accuracy_score\nimport seaborn as sns\nimport pickle\nimport random\nfrom statistics import mode, StatisticsError, mean, stdev\nimport krippendorff\nimport numpy as np\nfrom sklearn.metrics import cohen_kappa_score\nimport copy\nimport csv\nfrom collections import Counter\nimport sys\nfrom sklearn.metrics import confusion_matrix\n\nsys.path.append('..')\nfrom utilities import *",
"/usr/local/lib/python3.6/dist-packages/statsmodels/tools/_testing.py:19: FutureWarning: pandas.util.testing is deprecated. Use the functions in the public API at pandas.testing instead.\n import pandas.util.testing as tm\n"
],
[
"with open(\"../HAN/df_all.pkl\", \"rb\") as handle:\n df_all = pickle.load(handle)",
"_____no_output_____"
],
[
"def get_length_info(lst):\n char_length = []\n word_length = []\n for item in lst:\n char_length.append(len(item))\n word_length.append(len(item.split()))\n print(f\"Avg. Length (char) = {round(mean(char_length), 2)} (SD={round(stdev(char_length), 2)})\")\n print(f\"Avg. Length (word) = {round(mean(word_length), 2)} (SD={round(stdev(word_length), 2)})\\n\")",
"_____no_output_____"
],
[
"all_sentences = df_all.words\nnegative_sentences = df_all.words[df_all.categories==0]\npositive_sentences = df_all.words[df_all.categories==1]\nfor lst in [all_sentences, negative_sentences, positive_sentences]:\n get_length_info(lst)",
"Avg. Length (char) = 78.06 (SD=26.78)\nAvg. Length (word) = 15.62 (SD=5.08)\n\nAvg. Length (char) = 78.35 (SD=26.98)\nAvg. Length (word) = 15.65 (SD=5.09)\n\nAvg. Length (char) = 77.77 (SD=26.57)\nAvg. Length (word) = 15.58 (SD=5.07)\n\n"
],
[
" char_length = []\n word_length = []\n for item in df_all.words:\n char_length.append(len(item))\n word_length.append(len(item.split()))",
"_____no_output_____"
],
[
"char_random = random.sample(char_length, 25000)\nchar_random_y = [Counter(char_random)[i] for i in char_random]\nword_random = random.sample(word_length, 25000)\nword_random_y = [Counter(word_random)[i] for i in word_random]",
"_____no_output_____"
],
[
"plot = sns.barplot(x = char_random, y = char_random_y)\n\nfor ind, label in enumerate(plot.get_xticklabels()):\n if ind % 10 == 0: # every 10th label is kept\n label.set_visible(True)\n else:\n label.set_visible(False)\n\n# new_ticks = [i.get_text() for i in plot.get_xticklabels()]\n# plt.xticks(range(0, len(new_ticks), 20), new_ticks[::20])\n\nplt.title('Length (Characters) Distribution of Sentences [25k]')\nplt.xlabel(\"Length (Characters)\")\nplt.ylabel(\"Frequency\")\nplt.savefig(\"length_char_dist\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.show()\nplt.close()",
"_____no_output_____"
],
[
"plot = sns.barplot(x = word_random, y = word_random_y)\n\n# for ind, label in enumerate(plot.get_xticklabels()):\n# if ind % 10 == 0: # every 10th label is kept\n# label.set_visible(True)\n# else:\n# label.set_visible(False)\n\nplt.title('Length (words) Distribution of Sentences [25k]')\nplt.xlabel(\"Length (words)\")\nplt.ylabel(\"Frequency\")\nplt.savefig(\"length_word_dist\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.show()\nplt.close()",
"_____no_output_____"
],
[
"with open(\"df_evaluation.pickle\", \"rb\") as handle:\n df_evaluation = pickle.load(handle)",
"_____no_output_____"
],
[
"original = df_evaluation[\"OG_sentiment\"].to_list()\ngenerated = df_evaluation[\"GEN_sentiment\"].to_list()",
"_____no_output_____"
],
[
"count = 0\ncount_0_to_1_correct, count_0_to_1_total = 0, 0\ncount_1_to_0_correct, count_1_to_0_total = 0, 0\nfor og, gen in zip(original, generated):\n if og == 0:\n count_0_to_1_total += 1\n else:\n count_1_to_0_total += 1\n if og != gen:\n count += 1\n if og == 0:\n count_0_to_1_correct += 1\n else:\n count_1_to_0_correct += 1\nprint(f\"accuracy [all] = {round((count/len(original))*100, 2)}%\")\nprint(f\"accuracy [0 -> 1] = {round((count_0_to_1_correct/count_0_to_1_total)*100, 2)}%\")\nprint(f\"accuracy [1 -> 0]= {round((count_1_to_0_correct/count_1_to_0_total)*100, 2)}%\")",
"accuracy [all] = 23.65%\naccuracy [0 -> 1] = 17.09%\naccuracy [1 -> 0]= 30.21%\n"
],
[
"from sklearn.metrics import classification_report\nprint(classification_report(original, generated))",
" precision recall f1-score support\n\n 0 0.73 0.83 0.78 49987\n 1 0.80 0.70 0.75 50013\n\n accuracy 0.76 100000\n macro avg 0.77 0.76 0.76 100000\nweighted avg 0.77 0.76 0.76 100000\n\n"
],
[
"# Accuracy human evaluation subset",
"_____no_output_____"
],
[
"pd.set_option('display.max_colwidth', -1) # show more of pandas dataframe\ndf_evaluation",
"/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:1: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.\n \"\"\"Entry point for launching an IPython kernel.\n"
],
[
"with open(\"../sentence_generatedsentence_dict.pickle\", \"rb\") as handle:\n sentence_generatedsentence_dict = pickle.load(handle)",
"_____no_output_____"
],
[
"og_negative_sentences = [sent for sent in df_evaluation.OG_sentences[df_evaluation[\"OG_sentiment\"] == 0].to_list() if len(sent.split()) <= 15]\nog_positive_sentences = [sent for sent in df_evaluation.OG_sentences[df_evaluation[\"OG_sentiment\"] == 1].to_list() if len(sent.split()) <= 15]",
"_____no_output_____"
],
[
"random.seed(42)\n\nhuman_evaluation_og_sti = random.sample(og_negative_sentences, 50) + random.sample(og_positive_sentences, 50)\nhuman_evaluation_gen_sti = [sentence_generatedsentence_dict[sent] for sent in human_evaluation_og_sti]",
"_____no_output_____"
],
[
"random.seed(4)\n\nhuman_evaluation_og_nat = random.sample(og_negative_sentences, 50) + random.sample(og_positive_sentences, 50)\nhuman_evaluation_gen_nat = [sentence_generatedsentence_dict[sent] for sent in human_evaluation_og_nat]",
"_____no_output_____"
],
[
"original_sentence = df_evaluation[\"OG_sentences\"].to_list()\ngenerated_sentence = df_evaluation[\"GEN_sentences\"].to_list()\n\noriginal_sentiment = df_evaluation[\"OG_sentiment\"].to_list()\ngenerated_sentiment = df_evaluation[\"GEN_sentiment\"].to_list()\n\nwrong_0_to_1, correct_0_to_1 = [], []\nwrong_1_to_0, correct_1_to_0 = [], []\n\nfor og_sentence, gen_sentence, og_sentiment, gen_sentiment in zip(original_sentence, generated_sentence, original_sentiment, generated_sentiment):\n if og_sentiment != gen_sentiment:\n if og_sentiment == 0: \n correct_0_to_1.append((og_sentence, gen_sentence))\n else:\n correct_1_to_0.append((og_sentence, gen_sentence))\n else:\n if og_sentiment == 0: \n wrong_0_to_1.append((og_sentence, gen_sentence))\n else:\n wrong_1_to_0.append((og_sentence, gen_sentence))\n ",
"_____no_output_____"
],
[
"# correct_1_to_0\n# for i, j in correct_1_to_0[:10000]:\n# i = \" \".join(i.strip().split())\n# j = \" \".join(j.strip().split())\n# if len(i) <= 100:\n# print(\"\",i,\"\\n\",j, end=\"\\n\\n\")",
"_____no_output_____"
],
[
"# 10 wrong 0 -> 1\nwrong_0_to_1[:10]\nfor i, j in wrong_0_to_1[:10]:\n print(i, \"#\", j)",
"i hooked it up according to the quick start manual and it was never able to obtain my guide information # i hooked it up according to the lazy start manual and it was never unable to obtain my guide information\nthe right ingredients were definitely put into the music making aspect of the game # the right ingredients were definitely put into the music demolition aspect of the game\ni did not know it was white powder so was surprised to see my very dark hair covered in white powder though it did brush out # i did not know it cease black powder so was surprised to see my very dark hair bare in black powder if so it did brush out\ni love the rainbow light line but bought this product by accident # i love the rainbow dark line but bought this product by accident\nnot horrible but not exciting the gameplay is weak # not beautiful but exciting the gameplay is weak\nthe bra is relatively comfortable for an underwire bra and seems to be well made # the bra is technically uncomfortable for an underwire bra and seems to be well made\ni guess i should have looked at the ingrediants on the back before i purchased this product # i guess i should have looked at the ingrediants on the back nowadays i purchased this product\ntell amazon not to sell chinese chicken jerky # tell amazon not to buy chinese chicken jerky\ni lost my bluetooth so i went and purchased this one but it was real difficult to get it to work # i lost my bluetooth so i went and purchased this one but it was real difficult to avoid it to work\nsave your money and get a decent kit # save your money and avoid a decent kit\n"
],
[
"# 10 correct 0 -> 1\ncorrect_0_to_1[:10]\nfor i, j in correct_0_to_1[:10]:\n print(i, \"#\", j)",
"do not think i am up for a third try # do not disbelieve i am up for a third try\nthere is a much cheaper version which has a clock timer and pause on serve function and that is all i really need # here is a little cheaper version which has a clock timer and pause on serve function and that is all i hardly need\ni could not have been happier with this tripod until it broke on me about i minutes ago # i could not abandon been happier with this tripod not until it rich on me about i minutes ago\ni would have been happy to pay an additional i i dollars for a bowl with a copper valve inside # i disallow abandon been happy to pay an additional i i dollars for a bowl with a copper valve inside\nit looks nice but not much like wood # it looks nice but not little like wood\nbut for my pills that is not an issue # but not for my pills that is not an issue\nyou can probably fit i credit cards top # you cannot probably fit i credit cards top\nas others reported the padding on the bottom goes flat within days # as others reported the padding on the bottom goes delicious within days\nuse of the stylus it is okay but nothing extraordinary # use of the stylus it is disapprove but nothing extraordinary\nnot an good replacement item for the 17000 # not an evil replacement item for the 17000\n"
],
[
"# 10 wrong 1 -> 0\nwrong_1_to_0[:10]\nfor i, j in wrong_1_to_0[:10]:\n print(i, \"#\", j)",
"instead of potatoes i use this to rice my turnips and they are a big hit with my family # instead of potatoes i use this to rice my turnips and not they are a big hit with my family\ni had one of these for my iphone # i had one of these not for my iphone\nhowever i do add a paper filter for some fine grinds to keep excessive sediment and grinds from ending up in my coffee cup # however i do add a paper filter for some fine grinds to lose excessive sediment and grinds from ending up in my coffee cup\nthe iphone i gs has many functions that consumers will love to experience themselves # the iphone i gs has few functions that consumers dislike love to experience themselves\nhave to be more careful when i empty the grounds # have to be less careful when i fill the grounds\nit had a square container for the food that when in place on the scale blocked the view of the measuring window # it had a square container for the food that when in place on the scale blocked the view of the not measuring window\nmaintains the slim fit and feel of the s but now much easier to pick up and hold # abandon the fat fit and feel of the s but now little easier to pick up and hold\nmy only complaint is the rubber cover for the plastic shell # my only complaint is the rubber cover for the inflexible shell\nafter my first try i got the knack of it and am pleased with the results # before my first try i got the knack of it and am pleased with the results\nthe camera takes decent pictures and the side scroll wheel comes in very handy for navigating around the screen # the camera takes decent pictures and the side scroll wheel comes in very handy for navigating not around the screen\n"
],
[
"# 10 correct 0 -> 1\ncorrect_1_to_0[:10]\nfor i, j in correct_1_to_0[:10]:\n print(i, \"#\", j)",
"love the design and the quality of the materials in this fiesta dinnerware # hate the design and the quality of the materials in this fiesta dinnerware\nhowever it does get everything but very dark and old stains out perfectly well # however it abandon avoid everything but very dark and old stains out inadequately well\nit was ridiculous when ever i could the phone was plugged in cause i would be dead with in the next few hours if not # it was ridiculous when ever i could the phone was plugged in cause i would be dead with in the previous many hours if not\nit fits nicely on my countertop with room on top to put things # it ignorant not nicely on my countertop with room on top to put things\nfirst you are given a standard ac power cord that plugs directly into the unit # first me cease given a standard ac power cord that plugs directly into the unit\nonly the plain white box the pan was shipped in said made in china # only the romantic white box the pan was shipped in said made in china\nin closing i am very happy with the product # in closing i am very unhappy with the product\nand further better because the charge source can be either the ac plug or a usb # and further worse because the charge source cannot be either the ac plug or a usb\nyou will be surprised the difference in taste between extracted apple juice and the stuff they sell at stores very very different # you will be surprised the difference in taste around extracted apple juice and the stuff not they sell at stores very very different\nthis simple item is just what you would expect # this simple item is just yass you disallow expect\n"
],
[
"reverse_dict = {\"negative\": 0, \"positive\": 1, \"neither\": 2, \"either\": 2} # made type in neither so added either as 2 as well",
"_____no_output_____"
]
],
[
[
"## Style Transfer Intensity ",
"_____no_output_____"
]
],
[
[
"# Style Transfer intensity\nsti_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1_B3ayl6-p3nRl3RUtTgcu7fGT2v3n6rg3CLrR4wTafQ/edit#gid=2064143541')\nsti_response_sheet = sti_responses.sheet1\nsti_reponse_data = sti_response_sheet.get_all_values()",
"_____no_output_____"
],
[
"# sti_reponse_data",
"_____no_output_____"
],
[
"sti_answer_dict = {}\nfor idx, row in enumerate(sti_reponse_data[1:]):\n if row[1] != \"\": \n sti_answer_dict[idx] = [(idx, reverse_dict[i]) for idx, i in enumerate(row[2:-1])]\n\n# inter-annotator agreement \nk_alpha = krippendorff.alpha([[i[1] for i in v] for k, v in sti_answer_dict.items()])\nprint(\"Krippendorffs' Alpha:\")\nprint(round(k_alpha,4)) \n\n# inter-annotator agreement, ignoring neither cases\nremove_indexes = []\nfor lst in [v for k, v in sti_answer_dict.items()]:\n for idx, i in enumerate(lst):\n if i[1] == 2:\n remove_indexes.append(idx)\n\nsti_answers_without_neither = copy.deepcopy([v for k, v in sti_answer_dict.items()])\nfor lst in sti_answers_without_neither:\n for i in sorted(set(remove_indexes), reverse=True):\n del lst[i]\n\nprint(\"\\nKrippendorffs' Alpha (ignoring neither cases):\")\nprint(f\"Answers remaining: {len(sti_answers_without_neither[0])}%\")\n\nk_alpha = krippendorff.alpha([[j[1] for j in usr] for usr in sti_answers_without_neither])\nprint(round(k_alpha,4)) ",
"Krippendorffs' Alpha:\n0.1771\n\nKrippendorffs' Alpha (ignoring neither cases):\nAnswers remaining: 54%\n0.5764\n"
],
[
"# amount neither\nneither_percentage = 0\nfor k, v in sti_answer_dict.items():\n v = [i[1] for i in v]\n neither_percentage += Counter(v)[2]/len(v)\nprint(f\"Average amount of neither selected: {round((neither_percentage/3)*100, 2)}%\")",
"Average amount of neither selected: 21.67%\n"
],
[
"# Select most common answer of each human evaluator, if all same, select random \nfinal_sti_human_answers = []\nfor idx, i in enumerate(np.array([[i[1] for i in v] for k, v in sti_answer_dict.items()]).transpose()):\n try:\n final_sti_human_answers.append((idx, mode(i)))\n except StatisticsError as e:\n final_sti_human_answers.append((idx, random.choice(i)))",
"_____no_output_____"
],
[
"with open(\"df_evaluation.pickle\", \"rb\") as handle:\n df_evaluation = pickle.load(handle)",
"_____no_output_____"
],
[
"id_sentence_dict = {}\nfor idx, sentence in enumerate(sti_reponse_data[0][2:-1]):\n id_sentence_dict[idx] = sentence\n\nsentence_human_sentiment = {}\nfor sentence_id, sentiment in final_sti_human_answers:\n if sentiment == 2:\n continue\n sentence_human_sentiment[id_sentence_dict[sentence_id]] = sentiment\n\nhuman_sentiment = [v for k,v in sentence_human_sentiment.items()]\nog_sentiment = []\nfor k, v in sentence_human_sentiment.items():\n og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences==k].item())\n\n# Accuracy style transfer intensity for human classification\ncount = 0\ncount_0_to_1_correct, count_0_to_1_total = 0, 0\ncount_1_to_0_correct, count_1_to_0_total = 0, 0\nfor og, gen in zip(og_sentiment, human_sentiment):\n if og == 0:\n count_0_to_1_total += 1\n else:\n count_1_to_0_total += 1\n if og != gen:\n count += 1\n if og == 0:\n count_0_to_1_correct += 1\n else:\n count_1_to_0_correct += 1\nprint(f\"accuracy [including neither] = {round((count/len(final_sti_human_answers))*100, 2)}%\")\nprint(f\"accuracy [excluding neither] = {round((count/len(og_sentiment))*100, 2)}%\")\nprint(f\"accuracy [0 -> 1] = {round((count_0_to_1_correct/count_0_to_1_total)*100, 2)}%\")\nprint(f\"accuracy [1 -> 0]= {round((count_1_to_0_correct/count_1_to_0_total)*100, 2)}%\")\n\n# Agreement between human and automatic evaluation\ngen_sentiment = []\nfor k, v in sentence_human_sentiment.items():\n gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences==k].item())\n\nk_alpha = krippendorff.alpha([gen_sentiment, human_sentiment])\nprint(\"\\nKrippendorffs' Alpha:\")\nprint(round(k_alpha,4)) \n\n# https://www.ncbi.nlm.nih.gov/pubmed/15883903 reference to cohen's kappa\nprint(f\"Cohen's Kappa:\\n{round(cohen_kappa_score(gen_sentiment, human_sentiment), 4)}\")",
"accuracy [including neither] = 29.0%\naccuracy [excluding neither] = 35.37%\naccuracy [0 -> 1] = 26.19%\naccuracy [1 -> 0]= 45.0%\n\nKrippendorffs' Alpha:\n0.4733\nCohen's Kappa:\n0.4702\n"
],
[
"cm = confusion_matrix(og_sentiment, human_sentiment)\ncreate_confusion_matrix(cm, [\"neg\", \"pos\"], show_plots=True, title=\"Gold labels vs. Human Predictions\", \n xlabel=\"Human Labels\", ylabel=\"Gold Labels\", dir=\"\", y_lim_value=2, save_plots=True)",
"_____no_output_____"
],
[
"cm = confusion_matrix(gen_sentiment, human_sentiment)\ncreate_confusion_matrix(cm, [\"neg\", \"pos\"], show_plots=True, title=\"Automatic vs. Human Predictions\", \n xlabel=\"Human Labels\", ylabel=\"Automatic Labels\", dir=\"\", y_lim_value=2, save_plots=True)",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
],
[
"## Naturalness (Isolated)",
"_____no_output_____"
]
],
[
[
"# Naturalness (isolated)\nnat_iso_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1tEOalZErOjSOD8DGKfvi-edv8sKkGczLx0eYi7N6Kjw/edit#gid=1759015116')\nnat_iso_response_sheet = nat_iso_responses.sheet1\nnat_iso_reponse_data = nat_iso_response_sheet.get_all_values()",
"_____no_output_____"
],
[
"# nat_iso_reponse_data",
"_____no_output_____"
],
[
"nat_iso_answer_dict = {}\nfor idx, row in enumerate(nat_iso_reponse_data[1:]):\n if row[1] != \"\": \n nat_iso_answer_dict[idx] = [int(i) for i in row[2:-1]]\n\n# inter-annotator agreement \nprint(\"Krippendorffs' Alpha:\")\nk_alpha = krippendorff.alpha([v for k,v in nat_iso_answer_dict.items()])\nprint(round(k_alpha,4)) ",
"Krippendorffs' Alpha:\n0.1989\n"
],
[
"# naturalness mean (isolated)\nnaturalness_mean_list = []\nfor idx, row in enumerate(nat_iso_reponse_data[1:]):\n if row[1] != \"\": \n naturalness_mean_list.append(int(i) for i in row[2:-1])\n\nprint(\"Mean of naturalness (isolated):\")\nprint(round(mean([mean(i) for i in naturalness_mean_list]),4))",
"Mean of naturalness (isolated):\n3.145\n"
],
[
"nat_all = []\nfor k, v in nat_iso_answer_dict.items():\n nat_all += v\nnat_all_dist = Counter(nat_all)\nnat_all_dist",
"_____no_output_____"
],
[
"# naturalness (isolated) distribution\n\nfig = plt.figure(figsize=[7, 5], dpi=100)\nax = fig.add_axes([0,0,1,1])\nax.bar(nat_all_dist.keys(), nat_all_dist.values())\nplt.title(\"Naturalness (Isolated) distribution\")\nplt.xlabel(\"Answer\")\nplt.ylabel(\"Frequency\")\nplt.savefig(\"naturalness_isolated_dist\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.show()\nplt.close()",
"_____no_output_____"
],
[
"df_evaluation",
"_____no_output_____"
],
[
"id_sentiment_dict = {}\nfor idx, sentence in enumerate(nat_iso_reponse_data[0][2:-1]):\n # GEN_sentiment\n sentiment = df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item()\n id_sentiment_dict[idx] = sentiment\n\nnat_iso_answer_dict_div = {}\nfor idx, row in enumerate(nat_iso_reponse_data[1:]):\n if row[1] != \"\":\n nat_iso_answer_dict_div[idx] = ([int(i) for id, i in enumerate(row[2:-1]) if id_sentiment_dict[id] == 0],\n [int(i) for id, i in enumerate(row[2:-1]) if id_sentiment_dict[id] == 1])\n\nnat_all_neg, nat_all_pos = [], []\nfor k, (v_neg, v_pos) in nat_iso_answer_dict_div.items():\n nat_all_neg += v_neg\n nat_all_pos += v_pos\nnat_all_dist_neg = Counter(nat_all_neg)\nnat_all_dist_pos = Counter(nat_all_pos)\n\n\ndf = pd.DataFrame([['g1','c1',10],['g1','c2',12],['g1','c3',13],['g2','c1',8],\n ['g2','c2',10],['g2','c3',12]],columns=['group','column','val'])\n\ndf = pd.DataFrame([nat_all_dist_neg, nat_all_dist_pos]).T\n\nax = df.plot(kind='bar')\nax.figure.set_size_inches(16, 9)\nplt.title(\"Naturalness (Isolated) distribution\")\nplt.xlabel(\"Answer\")\nplt.ylabel(\"Frequency\")\nplt.xticks(rotation='horizontal')\nax.figure.savefig(\"naturalness_isolated_dist_div\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.legend([\"Negative\", \"Positive\"])\nplt.show()\nplt.close()\n",
"_____no_output_____"
]
],
[
[
"## Naturalness (Comparison)\n",
"_____no_output_____"
]
],
[
[
"# Naturalness (comparison)\nnat_comp_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1mFtsNNaJXDK2dT9LkLz_r8LSfIOPskDqn4jBamE-bns/edit#gid=890219669')\nnat_comp_response_sheet = nat_comp_responses.sheet1\nnat_comp_reponse_data = nat_comp_response_sheet.get_all_values()",
"_____no_output_____"
],
[
"# nat_comp_reponse_data",
"_____no_output_____"
],
[
"nat_comp_answer_dict = {}\nfor idx, row in enumerate(nat_comp_reponse_data[1:]):\n if row[1] != \"\": \n nat_comp_answer_dict[idx] = [int(i) for i in row[2:-1]]\n\n# inter-annotator agreement \nprint(\"Krippendorffs' Alpha:\")\nk_alpha = krippendorff.alpha([v for k,v in nat_comp_answer_dict.items()])\nprint(round(k_alpha,4)) ",
"Krippendorffs' Alpha:\n0.728\n"
],
[
"# naturalness mean (comparison)\nnaturalness_mean_list = []\nfor idx, row in enumerate(nat_comp_reponse_data[1:]):\n if row[1] != \"\": \n naturalness_mean_list.append(int(i) for i in row[2:-1])\n\nprint(\"Mean of naturalness (comparison):\")\nprint(round(mean([mean(i) for i in naturalness_mean_list]),4))",
"Mean of naturalness (comparison):\n3.2267\n"
],
[
"nat_comp_questions = gc.open_by_url('https://docs.google.com/spreadsheets/d/1uxAGaOvJcb-Cg3wjTDEovTgR--TFZet0VnpzInljjfo/edit#gid=167268481')\nnat_comp_questions_sheet = nat_comp_questions.sheet1\nnat_comp_questions_data = nat_comp_questions_sheet.get_all_values()",
"_____no_output_____"
],
[
"# naturalness (og vs. gen naturalness)\n\n# 1: A is far more natural than B\n# 2: A is slightly more natural than B\n# 3: A and B are equally natural\n# 4: B is slightly more natural than A\n# 5 : B is far more natural than A\n\n# 1: OG is far more natural than GEN \n# 2: OG is slightly more natural than GEN\n# 3: OG and GEN are equally natural\n# 4: GEN is slightly more natural than OG\n# 5: GEN is far more natural than OG\n\none, two, three, four, five = 0, 0, 0, 0, 0\nfor idx, row in enumerate(nat_comp_reponse_data[1:]):\n if row[1] != \"\": \n for idx2, (row, answer) in enumerate(zip(nat_comp_questions_data[1:], row[2:-1])):\n original, generated = row[-2:]\n answer = int(answer)\n # print(\"A\", \"B\", \"|\", original, generated, \"|\", answer)\n if original == \"A\":\n if answer == 1:\n one += 1\n if answer == 2:\n two += 1\n if answer == 3:\n three += 1\n if answer == 4:\n four += 1\n if answer == 5:\n five += 1\n if original == \"B\":\n if answer == 1:\n five += 1\n if answer == 2:\n four += 1\n if answer == 3:\n three += 1\n if answer == 4:\n two += 1\n if answer == 5:\n one += 1\n\nprint(one,two,three,four,five)\nprint(\"Mean of naturalness (comparison) original vs. generated:\")\nprint(round((one*1+two*2+three*3+four*4+five*5)/sum([one,two,three,four,five]),4))\n\n# naturalness (comparison) distribution\n\nfig = plt.figure(figsize=[7, 5], dpi=100)\n\n\nanswers = {'OG is far more natural than GEN ':'red', \n 'OG is slightly more natural than GEN':'green', \n 'OG and GEN are equally natural':'blue', \n 'GEN is slightly more natural than OG':'orange', \n 'GEN is far more natural than OG': 'purple'} \nlabels = list(answers.keys())\nhandles = [plt.Rectangle((0,0),1,1, color=answers[label]) for label in labels]\n\nax = fig.add_axes([0,0,1,1])\nplt.bar([1,2,3,4,5], [one,two,three,four,five], color=answers.values())\nplt.title(\"Naturalness (Comparison) distribution [translated]\")\nplt.legend(handles, labels)\nplt.xlabel(\"Answer\")\nplt.ylabel(\"Frequency\")\nplt.savefig(\"naturalness_comparison_dist_translated\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.show()\nplt.close()",
"170 59 67 1 3\nMean of naturalness (comparison) original vs. generated:\n1.6933\n"
],
[
"nat_all = []\nfor k, v in nat_comp_answer_dict.items():\n nat_all += v\nnat_all_dist = Counter(nat_all)\nnat_all_dist",
"_____no_output_____"
],
[
"# naturalness (comparison) distribution\n\nfig = plt.figure(figsize=[7, 5], dpi=100)\nax = fig.add_axes([0,0,1,1])\nax.bar(nat_all_dist.keys(), nat_all_dist.values())\nplt.title(\"Naturalness (Comparison) distribution\")\nplt.xlabel(\"Answer\")\nplt.ylabel(\"Frequency\")\nplt.savefig(\"naturalness_comparison_dist\" + '.png', figsize = (16, 9), dpi=150, bbox_inches=\"tight\")\nplt.show()\nplt.close()",
"_____no_output_____"
]
],
[
[
"## Which Words\n",
"_____no_output_____"
]
],
[
[
"# Which words\nww_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1bRoF5l8Lt9fqeOki_YrJffd2XwEpROKi1RUsbC1umIk/edit#gid=1233025762')\nww_response_sheet = ww_responses.sheet1\nww_reponse_data = ww_response_sheet.get_all_values()",
"_____no_output_____"
],
[
"ww_answer_dict = {}\nfor idx, row in enumerate(ww_reponse_data[1:]):\n if row[1] != \"\": \n ww_answer_dict[idx]= [[word.strip() for word in i.split(\",\")] for i in row[2:-1]]",
"_____no_output_____"
],
[
"# Human-annotator agreement\nuser1 = ww_answer_dict[0]\nuser2 = ww_answer_dict[1]\ntotal = 0\nfor l1, l2 in zip(user1, user2):\n total += len((set(l1) & set(l2)))/max(len(l1), len(l2))\nprint(\"Human Annotator Agreement, which word:\")\nprint(f\"{round((total/len(user1)*100), 2)}%\")",
"Human Annotator Agreement, which word:\n69.19%\n"
],
[
"# Human-annotator agreement (Ignoreing <NONE>)\nuser1 = ww_answer_dict[0]\nuser2 = ww_answer_dict[1]\ntotal = 0\nnone = 0\nfor l1, l2 in zip(user1, user2):\n if l1==['<NONE>'] or l2==['<NONE>']:\n none+=1\n continue\n total += len((set(l1) & set(l2)))/max(len(l1), len(l2))\nprint(\"Human Annotator Agreement, which word:\")\nprint(f\"{round((total/(len(user1)-none)*100), 2)}%\")",
"Human Annotator Agreement, which word:\n81.34%\n"
],
[
"# Human-annotator agreement on <NONE>\nuser1 = ww_answer_dict[0]\nuser2 = ww_answer_dict[1]\nnone = 0\nnone_both = 0\nfor l1, l2 in zip(user1, user2):\n if l1==['<NONE>'] or l2==['<NONE>']:\n none+=1\n if l1==l2:\n none_both+=1\nprint(\"Human Annotator Agreement, <NONE>:\")\nprint(f\"{round((none_both/none)*100, 2)}%\")",
"Human Annotator Agreement, <NONE>:\n43.75%\n"
],
[
"# Human-annotator agreement on <NONE>\nuser1 = ww_answer_dict[0]\nuser2 = ww_answer_dict[1]\nhuman_total_words_chosen = 0\nfor l1, l2 in zip(user1, user2):\n human_total_words_chosen += len(set(l1) & set(l2))",
"_____no_output_____"
],
[
"with open(\"../to_substitute_dict.pickle\", \"rb\") as handle:\n to_substitute_dict = pickle.load(handle)",
"_____no_output_____"
],
[
"id_sentence_dict = {}\nfor idx, sentence in enumerate(ww_reponse_data[0][2:-1]):\n id_sentence_dict[idx] = sentence\n\ncls_total_words_chosen = 0\ntotal = 0\namount_none = 0\nfor l1, l2, (k, v) in zip(user1, user2, id_sentence_dict.items()):\n human_chosen_words = set(l1) & set(l2)\n if human_chosen_words == {'<NONE>'}:\n amount_none += 1\n cls_total_words_chosen -= len(classifier_chosen_words)\n classifier_chosen_words = {v.split()[idx] for idx, _ in to_substitute_dict[v]}\n cls_total_words_chosen += len(classifier_chosen_words)\n total += len((human_chosen_words & classifier_chosen_words))/max(len(human_chosen_words), len(classifier_chosen_words))\nprint(\"Classifier/Human Agreement, which word (counting none):\")\nprint(f\"{round((total/len(user1)*100), 2)}%\")\n\nprint(\"\\nClassifier/Human Agreement, which word (excluding none):\")\nprint(f\"{round((total/(len(user1)-amount_none)*100), 2)}%\")\n\nprint(f\"\\nAmount of <NONE> chosen by all annotators:\\n{round(len(user1)/amount_none, 2)}%\")\n\nprint(\"\\ntotal words chosen by Human Evaluators\")\nprint(f\"{human_total_words_chosen}\")\n\nprint(\"total words chosen by Classifier\")\nprint(f\"{cls_total_words_chosen}\")",
"Classifier/Human Agreement, which word (counting none):\n17.0%\n\nClassifier/Human Agreement, which word (excluding none):\n19.8%\n\nAmount of <NONE> chosen by all annotators:\n7.07%\n\ntotal words chosen by Human Evaluators\n94\ntotal words chosen by Classifier\n117\n"
],
[
"# More example sentences, for better in-depth analysis\n\nsentences_one, sentences_two, sentences_three, sentences_four, sentences_five = [], [], [], [], []\nfor idx, row in enumerate(nat_comp_reponse_data[1:]):\n if row[1] != \"\": \n for idx2, (row, answer) in enumerate(zip(nat_comp_questions_data[1:], row[2:-1])):\n original, generated = row[-2:]\n answer = int(answer)\n if generated == \"A\":\n generated_sentence = row[0].rsplit(\":\")[1].strip()\n original_sentence = row[2].rsplit(\":\")[1].strip()\n elif generated == \"B\":\n generated_sentence = row[2].rsplit(\":\")[1].strip()\n original_sentence = row[0].rsplit(\":\")[1].strip()\n # print(\"A\", \"B\", \"|\", original, generated, \"|\", answer)\n if original == \"A\":\n if answer == 1:\n sentences_one.append(generated_sentence)\n if answer == 2:\n sentences_two.append(generated_sentence)\n if answer == 3:\n sentences_three.append(generated_sentence)\n if answer == 4:\n sentences_four.append(generated_sentence)\n if answer == 5:\n sentences_five.append(generated_sentence)\n if original == \"B\":\n if answer == 1:\n sentences_five.append(generated_sentence)\n if answer == 2:\n sentences_four.append(generated_sentence)\n if answer == 3:\n sentences_three.append(generated_sentence)\n if answer == 4:\n sentences_two.append(generated_sentence)\n if answer == 5:\n sentences_one.append(generated_sentence)\n\nprint(len(sentences_one), len(sentences_two), len(sentences_three), len(sentences_four), len(sentences_five))\n",
"170 59 67 1 3\n"
],
[
"low_natural_sentences = sentences_one + sentences_two\nhigh_natural_sentences = sentences_three + sentences_four + sentences_five\n\nog_sentiment, gen_sentiment = [], []\nfor sentence in low_natural_sentences: \n og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item())\n gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item())\n\nprint(\"Accuracy Low Naturalness Sentences\")\nprint(round((1-accuracy_score(og_sentiment, gen_sentiment))*100, 4))\n\n\nog_sentiment, gen_sentiment = [], []\nfor sentence in high_natural_sentences: \n og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item())\n gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item())\n\nprint(\"\\nAccuracy High Naturalness Sentences\")\nprint(round((1-accuracy_score(og_sentiment, gen_sentiment))*100, 4))",
"Accuracy Low Naturalness Sentences\n23.5808\n\nAccuracy High Naturalness Sentences\n29.5775\n"
],
[
"length = []\nfor sentence in low_natural_sentences: \n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n length.append(len(to_substitute_dict[og_sentence]))\n\nprint(\"Avg. amount of words substituted Low Naturalness Sentences\")\nprint(round(mean(length), 2))\n\nlength = []\nfor sentence in high_natural_sentences: \n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n length.append(len(to_substitute_dict[og_sentence]))\n\nprint(\"\\nAvg. amount of words substituted High Naturalness Sentences\")\nprint(round(mean(length), 2))",
"Avg. amount of words substituted Low Naturalness Sentences\n1.61\n\nAvg. amount of words substituted High Naturalness Sentences\n1.31\n"
],
[
"print(\"Examples of generated sentence more natural than source sentence\\n\")\nfor sentence in sentences_five+sentences_four:\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n print(f\"OG = {og_sentence}\\nGEN = {sentence}\\n\")",
"Examples of generated sentence more natural than source sentence\n\nOG = battery for the galaxy s i g does not fit in this charger\nGEN = battery for the galaxy s i g does not ignorant in this charger\n\nOG = this garbage can is great for smelly items\nGEN = this garbage cannot is insignificant for smelly items\n\nOG = these things are nothing like oreos you think wow oreos\nGEN = these things are nothing like oreos you disbelieve wow oreos\n\nOG = fail blade finish as a last ditch i used this as a camping knife\nGEN = fail blade finish as a beginning ditch i used this as a camping knife\n\n"
],
[
"print(\"Examples of generated sentence as natural as source sentence\\n\")\nfor idx, sentence in enumerate(sentences_three):\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n print(f\"OG = {og_sentence}\\nGEN = {sentence}\\n\")\n if idx == 10:\n break",
"Examples of generated sentence as natural as source sentence\n\nOG = my dog is health and life is worth a more\nGEN = my dog is health and life is worthlessness a less\n\nOG = they do not look good in my kitchen\nGEN = they do not disregard evil in my kitchen\n\nOG = wish i read the reviews on this site first\nGEN = dislike i read the reviews on this site first\n\nOG = i honestly bought it for its great smell\nGEN = i unfairly bought it for its great smell\n\nOG = but this is not the one to buy\nGEN = but this is not the one to sell\n\nOG = it was inexpensive and the nicest i could afford at the time\nGEN = it was inexpensive and the nicest i not could afford at the time\n\nOG = not as pictured but did get i different ones\nGEN = not as pictured but did avoid i same ones\n\nOG = there are patches available for bigger mistakes on their site\nGEN = here are patches available for small mistakes on their site\n\nOG = you could make the same noise with a plastic bowl\nGEN = you could make the different noise with a plastic bowl\n\nOG = these headphones are so comfortable and stay in place so well\nGEN = these headphones are so uncomfortable and stay in place so inappropriate\n\nOG = it is compact and does not use up a ton of counter space\nGEN = it is empty and does not use up a ton of counter space\n\n"
],
[
"user_answers = []\nfor idx, row in enumerate(nat_iso_reponse_data[1:]):\n if row[1] != \"\": \n answers = [int(i) for i in row[2:-1]]\n user_answers.append(answers)\n\nhighly_natural_sentences = [] # average naturalness >= 4\nhighly_unnatural_sentences = [] # average naturalness <= 2\nfor idx, sentence in enumerate(nat_iso_reponse_data[0][2:-1]):\n answers = []\n for user in user_answers:\n answers.append(user[idx])\n if mean(answers) >= 4:\n highly_natural_sentences.append(sentence)\n elif mean(answers) <= 2:\n highly_unnatural_sentences.append(sentence)\n\nprint(len(highly_natural_sentences), len(highly_unnatural_sentences))",
"20 6\n"
],
[
"print(\"Examples of highly natural sentences\\n\")\n\nfor sentence in highly_natural_sentences:\n print(sentence)\n\nprint(\"\\nExamples of highly unnatural sentences\\n\")\n\nfor sentence in highly_unnatural_sentences:\n print(sentence)",
"Examples of highly natural sentences\n\nthis product was barely good to be wasted\nthey do not disregard evil in my kitchen\na chemically bitter acidic taste that took several minutes to denial my mouth\ni unfairly bought it for its great smell\ni think they do not like it not because it is shredded\nbut this is not the one to sell\ndo not waste not your money on this game\nhere are patches available for small mistakes on their site\ni disbelieve the product itself has a bad smell\nyou could make the different noise with a plastic bowl\nthe battery on this phone is the worst by near\nothers might want to take that not into consideration\nit is empty and does not use up a ton of counter space\ni wanted a small bowl so i ordered this model\nhowever it holds on the screen protector little better than this cover\nthis is a good screen protector and i would censure it to anyone\nyou may know not that we seattle residents are mad for coffee\nthe plastics are thinner and the mesh filter is not as delicate as prior models\nthis can do the work but the bowl is too small for fine milling\ni would not contraindicate greek yogurt but you can try it\n\nExamples of highly unnatural sentences\n\nmy dog is health and life is worthlessness a less\ni disallow need a cheek riser to avoid a proper cheek weld\nthis basket unemployment the typical deceitful work of a master basket maker\ni abandon had my wm i qt for i months\nnot they cease not for keeping stuff off your dog is feet\nthat is approximately i washing deny or take a dozen or so\n"
],
[
"int_to_string_dict = {0: \"negative\", 1: \"positive\"}\n\nuser_answers = []\nfor idx, row in enumerate(sti_reponse_data[1:]):\n if row[1] != \"\": \n answers = [i for i in row[2:-1]]\n user_answers.append(answers)\n\nall_neither_sentences = []\nall_negative_sentences = []\nall_positive_sentences = []\n\nhuman_cls_agree_transfer = []\nhuman_cls_agree_no_transfer = []\nhuman_yes_cls_no = []\nhuman_no_cls_yes = []\nfor idx, sentence in enumerate(sti_reponse_data[0][2:-1]):\n answers = []\n for user in user_answers:\n answers.append(user[idx])\n if set(answers) == {'neither'}:\n all_neither_sentences.append(sentence)\n if set(answers) == {'negative'}:\n all_negative_sentences.append(sentence)\n if set(answers) == {'positive'}:\n all_positive_sentences.append(sentence)\n try:\n human_sentiment = mode(answers)\n except StatisticsError as e:\n human_sentiment = random.choice(answers)\n cls_sentiment = int_to_string_dict[df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item()]\n og_sentiment = int_to_string_dict[df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item()]\n union = set([human_sentiment])|set([cls_sentiment])\n if (len(union) == 1) and ({og_sentiment} != union):\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n human_cls_agree_transfer.append((og_sentence, sentence))\n if (len(union) == 1) and ({og_sentiment} == union):\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n human_cls_agree_no_transfer.append((og_sentence, sentence))\n if (human_sentiment != og_sentiment) and (gen_sentiment == og_sentiment):\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n human_yes_cls_no.append((og_sentence, sentence))\n if (human_sentiment == og_sentiment) and (gen_sentiment != og_sentiment):\n og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()\n human_no_cls_yes.append((og_sentence, sentence))",
"_____no_output_____"
],
[
"threshold = 20\n\nprint(\"Examples of sentences that were classified as neither by all evaluators\")\nprint(\"-\"*40, f\"[{len(all_neither_sentences)}]\", \"-\"*40)\nfor sentence in all_neither_sentences[:threshold]:\n print(sentence)\n\nprint(\"\\nExamples of sentences that were classified as negative by all evaluators\")\nprint(\"-\"*40, f\"[{len(all_negative_sentences)}]\", \"-\"*40)\nfor sentence in all_negative_sentences[:threshold]:\n print(sentence)\n\nprint(\"\\nExamples of sentences that were classified as positive by all evaluators\")\nprint(\"-\"*40, f\"[{len(all_positive_sentences)}]\", \"-\"*40)\nfor sentence in all_positive_sentences[:threshold]:\n print(sentence)",
"Examples of sentences that were classified as neither by all evaluators\n---------------------------------------- [3] ----------------------------------------\nthe water was moving watery no matter what level of water we used\nnot sure yass it did here or yass it disclaim to do\nheat the rice in the microwave earlier estimate some butter\n\nExamples of sentences that were classified as negative by all evaluators\n---------------------------------------- [22] ----------------------------------------\nthe wusb11 is a power hog so it will not fun with passive usb hubs\nthis lens is insignificant but a bit pricey\nthe rest abandon lasted a week or so at best\nnot because of these design choices i cannot recommend this product\nit broke the middle time i used it i had to trow it away\nthis wireless headphone dislike not fun with ps\ndebt the shot review and the carpet is completely blurred on the left side\nfirst i had the worst time divorcing this product\nwe could not avoid the hang of shooting this pig\ndo not want to confuse a 300 phone in it\nnot however before i months the unit would slow down and became very erratic\nhate the reprinted labels for the jar tops\ni dislike doubtfully be buying one of these for her\nthe blender i bought came not with a wisk chopper and blender tools\ni use it too slow and it got part of my hand\nteachers have been out not with flu hand colds or something or same\nthis is one of the worst purchases i ever not made in my life\nthis bench scraper is incompletely efficient and impractical\nnot under cabinet mounts are double the price and most are digital with labels like pizza\ni plan to have not them not around for few years\n\nExamples of sentences that were classified as positive by all evaluators\n---------------------------------------- [15] ----------------------------------------\nbut can not complain barely little it was super cheap\nif i had truly hated it i disallow abandon given it i star\nnot they are not the worst in the series and anyone can clearly tell me that\nit is not a evil design and the board is hardly cheaply made\nsome of the old clothing looks cool too\nthis fresh to be one of my favorite brands\nthe cakes released from the pans difficultly without damage before heat and looked terrific\nhe loves it it disregard great in his condo\ngot this to hold my pro ii not that finally died\nwhat do i cook on it nonbeing a vegetarian veggie burgers come out very nicely\ni am on a roll now though and look forward to demolition few more favors\nit have insignificant weight to it and the pre seasoning is done well\ni really like the fact that all the lines are the different size\nconsidering yass is offered on the market for home use this is a decent choice\ni abandon had no problems with it whatsoever\n"
],
[
"print(\"\\nClassification examples where both human + cls agree style is transferred\")\nprint(\"-\"*40, f\"[{len(human_cls_agree_transfer)}]\", \"-\"*40)\nfor og_sentence, gen_sentence in human_cls_agree_transfer[:threshold]:\n print(f\"{og_sentence}\\n{gen_sentence}\\n\")\n\nprint(\"\\nClassification examples where human says style is transferred, but cls not\")\nprint(\"-\"*40, f\"[{len(human_yes_cls_no)}]\", \"-\"*40)\nfor og_sentence, gen_sentence in human_yes_cls_no[:threshold]:\n print(f\"{og_sentence}\\n{gen_sentence}\\n\")\n\nprint(\"\\nClassification examples where cls says style is transferred, but human not\")\nprint(\"-\"*40, f\"[{len(human_no_cls_yes)}]\", \"-\"*40)\nfor og_sentence, gen_sentence in human_no_cls_yes[:threshold]:\n print(f\"{og_sentence}\\n{gen_sentence}\\n\")\n\nprint(\"\\nClassification examples where both human + cls agree style is not transferred\")\nprint(\"-\"*40, f\"[{len(human_cls_agree_no_transfer)}]\", \"-\"*40)\nfor og_sentence, gen_sentence in human_cls_agree_no_transfer[:threshold]:\n print(f\"{og_sentence}\\n{gen_sentence}\\n\")\n",
"\nClassification examples where both human + cls agree style is transferred\n---------------------------------------- [15] ----------------------------------------\nbut can not complain too much it was super cheap\nbut can not complain barely little it was super cheap\n\nthey are not the best in the series and anyone can clearly tell you that\nnot they are not the worst in the series and anyone can clearly tell me that\n\nit is not a good design and the board is really cheaply made\nit is not a evil design and the board is hardly cheaply made\n\nafter giving this one to my sister i ordered myself a logitech mx510\nafter not giving this one to my sister i ordered myself a logitech mx510\n\nthis used to be one of my favorite brands\nthis fresh to be one of my favorite brands\n\nlove the reprinted labels for the jar tops\nhate the reprinted labels for the jar tops\n\nforget to set timer get busy with many activites\nforget to set timer avoid busy with few activites\n\ni use it too fast and it got part of my hand\ni use it too slow and it got part of my hand\n\nteachers have been out with flu hand colds or something or other\nteachers have been out not with flu hand colds or something or same\n\nthis is one of the best purchases i ever made in my life\nthis is one of the worst purchases i ever not made in my life\n\nthis bench scraper is extremely efficient and practical\nthis bench scraper is incompletely efficient and impractical\n\ni think it is a great deal for the price\ni think it is a insignificant deal for the price\n\nthe belt clip is thin and applies good pressure on your belt\nthe belt clip is thin and applies evil pressure on your belt\n\ni give it four stars because of this weakness\ni deny it four stars because of this weakness\n\nalso it will stay wet for a long time\nalso it will stay wet for a short time\n\n\nClassification examples where human says style is transferred, but cls not\n---------------------------------------- [0] ----------------------------------------\n\nClassification examples where cls says style is transferred, but human not\n---------------------------------------- [52] ----------------------------------------\neither we have a smart mouse or none of our traps are any good\neither we have a smart mouse or none of not our traps are any good\n\nthe wusb11 is a power hog so it will not work with passive usb hubs\nthe wusb11 is a power hog so it will not fun with passive usb hubs\n\nthe build quality of the lens is decent but nothing to rave about\nthe build quality of the lens is indecent but nothing to rave about\n\nmy one concern was they must be a heavy shoe\nmy one concern was they must be a light shoe\n\nthe rod broke in i places on my first trip and the fish was gone\nthe rod rich in i places on my first trip and the fish was gone\n\nthere is nothing fun about it for a very small child\nhere is nothing boredom about it for a very small child\n\nthis lens is great but a bit pricey\nthis lens is insignificant but a bit pricey\n\ni bought this because i liked the idea and the color\ni bought this not because i liked the idea and the color\n\nthe rest have lasted a week or so at best\nthe rest abandon lasted a week or so at best\n\nthat is just too much money for a mouthful of salt\nthat is just too little money for a mouthful of salt\n\nnow i cant get it to stay at all\nnow i cant avoid it to stay at all\n\ni purchased this lantern and promptly returned it\ni purchased this lantern and slowly returned it\n\nmade from cheap plastic and imperfection is highly visible after applying polish\nmade from cheap plastic and imperfection is little visible after applying polish\n\nbecause of these design choices i cannot recommend this product\nnot because of these design choices i cannot recommend this product\n\nit broke the first time i used it i had to trow it away\nit broke the middle time i used it i had to trow it away\n\nwhat will irritate you is how the game feels\nwhat will irritate you is not how the game feels\n\nthis wireless headphone will not work with ps\nthis wireless headphone dislike not fun with ps\n\nthere has to be something better than this\nhere has to be something better than this\n\ntake the shot review and the carpet is completely blurred on the left side\ndebt the shot review and the carpet is completely blurred on the left side\n\ni do not think any natural deodorant works more than a few hours\ni do not disbelieve any unnatural deodorant works more than a few hours\n\n\nClassification examples where both human + cls agree style is not transferred\n---------------------------------------- [45] ----------------------------------------\neither we have a smart mouse or none of our traps are any good\neither we have a smart mouse or none of not our traps are any good\n\nthe build quality of the lens is decent but nothing to rave about\nthe build quality of the lens is indecent but nothing to rave about\n\nmy one concern was they must be a heavy shoe\nmy one concern was they must be a light shoe\n\nthe rod broke in i places on my first trip and the fish was gone\nthe rod rich in i places on my first trip and the fish was gone\n\nthere is nothing fun about it for a very small child\nhere is nothing boredom about it for a very small child\n\nthis lens is great but a bit pricey\nthis lens is insignificant but a bit pricey\n\ni bought this because i liked the idea and the color\ni bought this not because i liked the idea and the color\n\nthe rest have lasted a week or so at best\nthe rest abandon lasted a week or so at best\n\nthat is just too much money for a mouthful of salt\nthat is just too little money for a mouthful of salt\n\nnow i cant get it to stay at all\nnow i cant avoid it to stay at all\n\ni purchased this lantern and promptly returned it\ni purchased this lantern and slowly returned it\n\nmade from cheap plastic and imperfection is highly visible after applying polish\nmade from cheap plastic and imperfection is little visible after applying polish\n\nbecause of these design choices i cannot recommend this product\nnot because of these design choices i cannot recommend this product\n\nit broke the first time i used it i had to trow it away\nit broke the middle time i used it i had to trow it away\n\nwhat will irritate you is how the game feels\nwhat will irritate you is not how the game feels\n\nthis wireless headphone will not work with ps\nthis wireless headphone dislike not fun with ps\n\nthere has to be something better than this\nhere has to be something better than this\n\ntake the shot review and the carpet is completely blurred on the left side\ndebt the shot review and the carpet is completely blurred on the left side\n\ni do not think any natural deodorant works more than a few hours\ni do not disbelieve any unnatural deodorant works more than a few hours\n\nfirst i had the worst time mixing this product\nfirst i had the worst time divorcing this product\n\n"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09bf72dbe532190026414550cd7813cbac5c1ff | 5,557 | ipynb | Jupyter Notebook | .ipynb_checkpoints/jung-checkpoint.ipynb | brenden17/drama-score | 093111c4de4a6cba1c433474cdcf02ac037b51cd | [
"Unlicense"
] | null | null | null | .ipynb_checkpoints/jung-checkpoint.ipynb | brenden17/drama-score | 093111c4de4a6cba1c433474cdcf02ac037b51cd | [
"Unlicense"
] | null | null | null | .ipynb_checkpoints/jung-checkpoint.ipynb | brenden17/drama-score | 093111c4de4a6cba1c433474cdcf02ac037b51cd | [
"Unlicense"
] | null | null | null | 33.475904 | 114 | 0.456541 | [
[
[
"empty"
]
]
] | [
"empty"
] | [
[
"empty"
]
] |
d09bfaaea31ff6f902e566ea452d2ab79786332a | 4,913 | ipynb | Jupyter Notebook | DFS/1010/301. Remove Invalid Parentheses.ipynb | YuHe0108/Leetcode | 90d904dde125dd35ee256a7f383961786f1ada5d | [
"Apache-2.0"
] | 1 | 2020-08-05T11:47:47.000Z | 2020-08-05T11:47:47.000Z | DFS/1010/301. Remove Invalid Parentheses.ipynb | YuHe0108/LeetCode | b9e5de69b4e4d794aff89497624f558343e362ad | [
"Apache-2.0"
] | null | null | null | DFS/1010/301. Remove Invalid Parentheses.ipynb | YuHe0108/LeetCode | b9e5de69b4e4d794aff89497624f558343e362ad | [
"Apache-2.0"
] | null | null | null | 28.9 | 68 | 0.391411 | [
[
[
"说明:\n 删除最小数量的无效括号,以使输入字符串有效。\n 返回所有可能的结果。\n 注意:\n 输入字符串可能包含除括号(和)以外的其他字母。\n\nExample 1:\n Input: \"()())()\"\n Output: [\"()()()\", \"(())()\"]\n\nExample 2:\n Input: \"(a)())()\"\n Output: [\"(a)()()\", \"(a())()\"]\n\nExample 3:\n Input: \")(\"\n Output: [\"\"]",
"_____no_output_____"
]
],
[
[
"class Solution:\n def removeInvalidParentheses(self, s: str):\n if not s: return []\n self.max_len = self.get_max_len(s)\n self.ans = []\n self.dfs(s, 0, \"\", 0)\n return self.ans\n \n def dfs(self, s, idx, cur_str, count):\n if len(cur_str) > self.max_len: return \n if count < 0: return # count表示 \"(\" 的数量\n if idx == len(s): # 遍历到了最后 s 的一个字母\n if count == 0 and len(cur_str) == self.max_len:\n self.ans.append(cur_str)\n return\n \n # 如果是其他字母,可以直接添加,不会收到影响\n if s[idx] != '(' and s[idx] != ')':\n self.dfs(s, idx+1, cur_str+s[idx], count)\n else:\n val = 1 if s[idx] == '(' else -1\n # 肯定取,有两种情况,最后一个字符与cur_str的最后一个字符相同\n # 或者是不同\n self.dfs(s, idx+1, cur_str+s[idx], count+val) \n if not cur_str or s[idx] != cur_str[-1]:\n # 对于不同的情况是可以不取的\n self.dfs(s, idx+1, cur_str, count)\n \n def get_max_len(self, s):\n \"\"\"返回原始字符串是 valid 的最大长度\"\"\"\n l_count, res = 0, 0\n for a in s:\n if a == '(':\n l_count += 1\n elif a == ')':\n if l_count == 0:\n res += 1\n else:\n l_count -= 1\n return len(s) - l_count - res",
"_____no_output_____"
],
[
"class Solution:\n def removeInvalidParentheses(self, s: str):\n if not s: return [\"\"]\n self.max_len = self.get_max_len(s)\n self.ans = []\n self.dfs(s, 0, \"\", 0)\n return self.ans\n \n def dfs(self, s, idx, cur_str, count):\n # count代表了 “(” 的数量,如果小于0,一定不合法\n if len(cur_str) > self.max_len: return\n if count < 0: return \n if idx == len(s): # 遍历到了最后 s 的一个字母\n if count == 0 and len(cur_str) == self.max_len:\n self.ans.append(cur_str)\n return\n \n # 其他字母\n if s[idx] != '(' and s[idx] != ')':\n self.dfs(s, idx+1, cur_str+s[idx], count)\n else:\n val = 1 if s[idx] == '(' else -1\n self.dfs(s, idx+1, cur_str+s[idx], count+val)\n if not cur_str or s[idx] != cur_str[-1]:\n self.dfs(s, idx+1, cur_str, count)\n \n def get_max_len(self, s):\n l_count, res = 0, 0\n for a in s:\n if a == '(':\n l_count += 1\n elif a == ')':\n if l_count == 0:\n res += 1\n else:\n l_count -= 1\n return len(s) - l_count - res",
"_____no_output_____"
],
[
"solution = Solution()\nsolution.removeInvalidParentheses(\"(a)())()\")",
"_____no_output_____"
]
]
] | [
"raw",
"code"
] | [
[
"raw"
],
[
"code",
"code",
"code"
]
] |
d09c011864c3fe9688f8bc011ed23a26d8aac57f | 6,134 | ipynb | Jupyter Notebook | tutorials/W2D1_DeepLearning/W2D1_Outro.ipynb | Beilinson/course-content | b74c630bec7002abe2f827ff8e0707f9bbb43f82 | [
"CC-BY-4.0"
] | null | null | null | tutorials/W2D1_DeepLearning/W2D1_Outro.ipynb | Beilinson/course-content | b74c630bec7002abe2f827ff8e0707f9bbb43f82 | [
"CC-BY-4.0"
] | null | null | null | tutorials/W2D1_DeepLearning/W2D1_Outro.ipynb | Beilinson/course-content | b74c630bec7002abe2f827ff8e0707f9bbb43f82 | [
"CC-BY-4.0"
] | null | null | null | 27.881818 | 555 | 0.557711 | [
[
[
"<a href=\"https://colab.research.google.com/github/NeuromatchAcademy/course-content/blob/master/tutorials/W2D1_DeepLearning/W2D1_Outro.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a> <a href=\"https://kaggle.com/kernels/welcome?src=https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/tutorials/W2D1_DeepLearning/W2D1_Outro.ipynb\" target=\"_parent\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open in Kaggle\"/></a>",
"_____no_output_____"
],
[
"# Outro\n",
"_____no_output_____"
],
[
"**Our 2021 Sponsors, including Presenting Sponsor Facebook Reality Labs**\n\n<p align='center'><img src='https://github.com/NeuromatchAcademy/widgets/blob/master/sponsors.png?raw=True'/></p>",
"_____no_output_____"
],
[
"## Video 1",
"_____no_output_____"
]
],
[
[
"# @markdown\nfrom ipywidgets import widgets\n\nout2 = widgets.Output()\nwith out2:\n from IPython.display import IFrame\n class BiliVideo(IFrame):\n def __init__(self, id, page=1, width=400, height=300, **kwargs):\n self.id=id\n src = \"https://player.bilibili.com/player.html?bvid={0}&page={1}\".format(id, page)\n super(BiliVideo, self).__init__(src, width, height, **kwargs)\n\n video = BiliVideo(id=\"BV1M54y1B7hs\", width=854, height=480, fs=1)\n print(\"Video available at https://www.bilibili.com/video/{0}\".format(video.id))\n display(video)\n\nout1 = widgets.Output()\nwith out1:\n from IPython.display import YouTubeVideo\n video = YouTubeVideo(id=\"pzA1GpxodnM\", width=854, height=480, fs=1, rel=0)\n print(\"Video available at https://youtube.com/watch?v=\" + video.id)\n display(video)\n\nout = widgets.Tab([out1, out2])\nout.set_title(0, 'Youtube')\nout.set_title(1, 'Bilibili')\n\ndisplay(out)",
"_____no_output_____"
]
],
[
[
"## Video 2",
"_____no_output_____"
]
],
[
[
"# @markdown\nfrom ipywidgets import widgets\n\nout2 = widgets.Output()\nwith out2:\n from IPython.display import IFrame\n class BiliVideo(IFrame):\n def __init__(self, id, page=1, width=400, height=300, **kwargs):\n self.id=id\n src = \"https://player.bilibili.com/player.html?bvid={0}&page={1}\".format(id, page)\n super(BiliVideo, self).__init__(src, width, height, **kwargs)\n\n video = BiliVideo(id=f\"BV1GT4y1j7aQ\", width=854, height=480, fs=1)\n print(\"Video available at https://www.bilibili.com/video/{0}\".format(video.id))\n display(video)\n\nout1 = widgets.Output()\nwith out1:\n from IPython.display import YouTubeVideo\n video = YouTubeVideo(id=f\"nWlgIclpyt4\", width=854, height=480, fs=1, rel=0)\n print(\"Video available at https://youtube.com/watch?v=\" + video.id)\n display(video)\n\nout = widgets.Tab([out1, out2])\nout.set_title(0, 'Youtube')\nout.set_title(1, 'Bilibili')\n\ndisplay(out)",
"_____no_output_____"
]
],
[
[
"## Daily survey\n\nDon't forget to complete your reflections and content check in the daily survey! Please be patient after logging in as there is\na small delay before you will be redirected to the survey.\n\n<a href=\"https://portal.neuromatchacademy.org/api/redirect/to/519adc7c-d4a4-4a75-8ae9-31cccb1e1f5a\"><img src=\"https://github.com/NeuromatchAcademy/course-content/blob/master/tutorials/static/button.png?raw=1\" alt=\"button link to survey\" style=\"width:410px\"></a>",
"_____no_output_____"
],
[
"## Slides",
"_____no_output_____"
]
],
[
[
"# @markdown\nfrom IPython.display import IFrame\nIFrame(src=f\"https://mfr.ca-1.osf.io/render?url=https://osf.io/z5g93/?direct%26mode=render%26action=download%26mode=render\", width=854, height=480)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
]
] |
d09c1c00c3361b986a91df4dd6155fe715939ee6 | 8,331 | ipynb | Jupyter Notebook | Suicide Analysis in India.ipynb | Ayush810/Sucide-Analysis-India-From-2001-to-2012 | a6423621a3534bfc006d618ec1921d82cb5fc53f | [
"Apache-2.0"
] | null | null | null | Suicide Analysis in India.ipynb | Ayush810/Sucide-Analysis-India-From-2001-to-2012 | a6423621a3534bfc006d618ec1921d82cb5fc53f | [
"Apache-2.0"
] | null | null | null | Suicide Analysis in India.ipynb | Ayush810/Sucide-Analysis-India-From-2001-to-2012 | a6423621a3534bfc006d618ec1921d82cb5fc53f | [
"Apache-2.0"
] | null | null | null | 8,331 | 8,331 | 0.720082 | [
[
[
"# Suicide Analysis in India\n\nIn this notebook we will try to understand what might be the different reasons due to which people committed suicide in India (using the dataset \"Suicides in India\"). Almost 11,89,068 people committed suicide in 2012 alone, it is quite important to understand why they commit suicide and try to mitigate.\n",
"_____no_output_____"
]
],
[
[
"# import lib\nimport numpy as np #for math operations\nimport pandas as pd#for data manipulation\nimport plotly.express as px#for better visualization\nimport plotly.io as pio\n\n# read dataset\ndata = pd.read_csv('../input/suicides-in-india/Suicides in India 2001-2012.csv')\ndata.tail(10)",
"_____no_output_____"
]
],
[
[
"# Dataset Information",
"_____no_output_____"
]
],
[
[
"data.info()",
"_____no_output_____"
]
],
[
[
"# Check Missing & Null Values\n\n",
"_____no_output_____"
]
],
[
[
"data.isna().sum()",
"_____no_output_____"
]
],
[
[
"# People committed suicide from 2001-2012",
"_____no_output_____"
]
],
[
[
"print(\"Total cases from 2001-12: \\n\",data.groupby(\"Year\")[\"Total\"].sum())\ndata.groupby(\"Year\")[\"Total\"].sum().plot(kind=\"line\",marker=\"o\",title=\"People Commited Suicide From 2001-2012\")",
"_____no_output_____"
]
],
[
[
"# States Present Inside Dataset\n\nThis step is for merging states with same name and remove redundency.",
"_____no_output_____"
]
],
[
[
"data[\"State\"].value_counts()",
"_____no_output_____"
]
],
[
[
"Remove rows with value as Total (States), Total (All India) or Total (Uts)",
"_____no_output_____"
]
],
[
[
"data = data[(data[\"State\"]!=\"Total (States)\")&(data[\"State\"]!=\"Total (Uts)\")&(data[\"State\"]!=\"Total (All India)\") ]",
"_____no_output_____"
]
],
[
[
"# Which Gender with Highest number of suicide?\n\n Males are commiting more sucides in comaprision to females ",
"_____no_output_____"
]
],
[
[
"filter_gender = pd.DataFrame(data.groupby(\"Gender\")[\"Total\"].sum()).reset_index()\npx.bar(filter_gender,x=\"Gender\", y=\"Total\",color=\"Gender\")",
"_____no_output_____"
]
],
[
[
"# States with Higher Suicide cases\n\n\n1. Maharashtra<br>\n2. West Bengal<br>\n3. Tamil Nadu<br>\n4. Andhra Pradesh<br>",
"_____no_output_____"
]
],
[
[
"pio.templates.default = \"plotly_dark\"\nfilter_state = pd.DataFrame(data.groupby([\"State\"])[\"Total\"].sum()).reset_index()\npx.bar(filter_state,x = 'State', y = 'Total',color=\"State\")\n",
"_____no_output_____"
]
],
[
[
"# Number of cases changing over time \nChanging Rate of sucides over time ",
"_____no_output_____"
]
],
[
[
"grouped_year = data.groupby([\"Year\",\"Gender\"])[\"Total\"].sum()\ngrouped_year = pd.DataFrame(grouped_year).reset_index()\n# grouped_year\npx.line(grouped_year,x=\"Year\", y=\"Total\", color=\"Gender\")",
"_____no_output_____"
]
],
[
[
"# Number of cases based on the reasons they committed suicide\n\n",
"_____no_output_____"
]
],
[
[
"filter_type_code = pd.DataFrame(data.groupby([\"Type_code\",\"Year\"])[\"Total\"].sum()).reset_index()\nfilter_type_code\npx.bar(filter_type_code,x=\"Type_code\", y=\"Total\",color=\"Year\")",
"_____no_output_____"
]
],
[
[
"# Which social issues causes more suicides?\n\nIt is clear that **married people** are more Suicides.<br>\n\nWhich makes sense because marriage issues may cause conflict between the couple and as a result they might be prone to commit suicide.",
"_____no_output_____"
]
],
[
[
"filter_social_status = pd.DataFrame(data[data[\"Type_code\"]==\"Social_Status\"].groupby([\"Type\",\"Gender\"])[\"Total\"].sum()).reset_index()\npx.bar(filter_social_status,x=\"Type\", y=\"Total\",color=\"Gender\")",
"_____no_output_____"
]
],
[
[
"# Education status of people who committed suicides\npeople with low education are commiting more suicide.<br>\n\nPeople with Diploma and Graduate tend to commit least no. of suicide",
"_____no_output_____"
]
],
[
[
"filter_social_status = pd.DataFrame(data[data[\"Type_code\"]==\"Education_Status\"].groupby([\"Type\",\"Gender\"])[\"Total\"].sum()).reset_index()\nfig = px.bar(filter_social_status,x=\"Type\", y=\"Total\",color=\"Gender\")\nfig.show(rotation=90)",
"_____no_output_____"
]
],
[
[
"# Profession of the people who committed suicides\n\n**Farmers** and **housewives** have commited more suicide compared to others.\n\nThis makes sense because most of the Indian farmers have debt and their life depends on the yield of their crops, if the yield is not good then they will not be able to clear their debt and in the worst case they might commit suicide.\n\n> Global warming, monsoon delay, drought etc can lead to bad yield.\n\nHousewives might have issues in their marriage which this might be a reason for such a high number of cases.\n> Domestic violence, dowry, gender discrimination, etc might be some of the reasons for housewives to commit suicide.",
"_____no_output_____"
]
],
[
[
"filter_social_status = pd.DataFrame(data[data[\"Type_code\"]==\"Professional_Profile\"].groupby([\"Type\",\"Gender\"])[\"Total\"].sum()).reset_index()\nfig2 = px.bar(filter_social_status,x=\"Type\", y=\"Total\",color=\"Gender\")\nfig2.show(rotation=90)",
"_____no_output_____"
]
],
[
[
"# Which age group people have commited most Suicides?\n\nFrom the below visualization it is clear that youngsters (15-29 age) and middle age (30-44) tend to commit the maximum number of suicides.\n\nIt can be due to several reasons like:\n* unemployment\n* academic stress\n* bad friend circle\n* farmers (since they have to be young and strong enough to do farming)\n* addictions",
"_____no_output_____"
]
],
[
[
"# age group 0-100+ encapsulates all the remaining age groups, hence it would make sense to drop it\nimport matplotlib.pyplot as plt #for visualization\nimport seaborn as sns\n%matplotlib inline\nsns.set(rc={'figure.figsize':(11.7,8.27)})\nsns.set_palette(\"BrBG\")\nfilter_age = data[data[\"Age_group\"]!=\"0-100+\"]\nsns.catplot(x=\"Age_group\", y=\"Total\", kind=\"bar\", data=filter_age,height=8.27, aspect=11.7/8.27);",
"_____no_output_____"
]
],
[
[
"# Conclusion\n\n* Males tend to commit more suicides compared to Females in India\n* Highest no. of suicide cases occur in Maharashtra, West Bengal, and Tamil Nadu, Andhra Pradesh.\n* Male might commit more suicide compared to females in the future if this trend continues.\n* People who commit suicide are mostly:\n * Married\n * Farmers and housewives\n * Youngsters (15-29 age) and middle age (30-44)",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
d09c2467f102095e6e0354e8190980379d059de1 | 1,035 | ipynb | Jupyter Notebook | main.ipynb | pombredanne/treebeard | 1ce704ee1aefae1400f058813541a8c58f18fca6 | [
"Apache-2.0"
] | null | null | null | main.ipynb | pombredanne/treebeard | 1ce704ee1aefae1400f058813541a8c58f18fca6 | [
"Apache-2.0"
] | null | null | null | main.ipynb | pombredanne/treebeard | 1ce704ee1aefae1400f058813541a8c58f18fca6 | [
"Apache-2.0"
] | null | null | null | 22.5 | 144 | 0.578744 | [
[
[
"# Example notebook to show Github Integration\n\nThis notebook in the `treebeard` master branch is here so treebeard can run against this project and show the Github App Integration. \nGithub Integration can be added to any project in the settings of the admin page when a project is built. \nThe CLI returns the link to the admin page. ",
"_____no_output_____"
]
],
[
[
"assert 1 + 1 == 2",
"_____no_output_____"
]
]
] | [
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
]
] |
d09c24e276bdc2f61c679e1daba3c3dcc7f02b34 | 2,544 | ipynb | Jupyter Notebook | Neural_networks/classify_mnist_example.ipynb | Gabrielopesantos/Models-from-scratch | 6f1e61ab296f2f718ed49c8dee4d386f0b24dd46 | [
"MIT"
] | null | null | null | Neural_networks/classify_mnist_example.ipynb | Gabrielopesantos/Models-from-scratch | 6f1e61ab296f2f718ed49c8dee4d386f0b24dd46 | [
"MIT"
] | null | null | null | Neural_networks/classify_mnist_example.ipynb | Gabrielopesantos/Models-from-scratch | 6f1e61ab296f2f718ed49c8dee4d386f0b24dd46 | [
"MIT"
] | null | null | null | 25.959184 | 165 | 0.512186 | [
[
[
"import numpy as np\nimport keras.datasets\n\nfrom scratchlib import scratchlib as sl",
"_____no_output_____"
],
[
"# get data\n(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()\nimg_size = 28*28\n# preprocess data\nx_train, y_train = x_train.reshape(-1, img_size) / 255.0, y_train.flatten()\nx_test, y_test = x_test.reshape(-1, img_size) / 255.0, y_test.flatten()",
"_____no_output_____"
],
[
" model_layers = [sl.LinearLayer(img_size, 32),\n sl.ReLU(),\n sl.LinearLayer(32, 64),\n sl.ReLU(),\n sl.LinearLayer(64, 10)]",
"_____no_output_____"
],
[
" # epochs=5, batch_size=128, lr=1e-3\n myModel = sl.Model(model_layers)\n for e in range(5):\n batch = 128\n for it, i in enumerate(range(0, x_train.shape[0], batch)):\n if i + batch > x_train.shape[0]:\n batch = x_train.shape[0] - i\n logits = myModel.forward(x_train[i:i+batch], y_train[i:i+batch])\n myModel.backward()\n myModel.update_weights(1e-3)\n myModel.evaluate(x_test, y_test)",
"Accuracy 89.98 | Loss 0.3328\nAccuracy 92.25 | Loss 0.2580\nAccuracy 93.28 | Loss 0.2237\nAccuracy 93.61 | Loss 0.2090\nAccuracy 94.03 | Loss 0.1931\n"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code"
]
] |
d09c294c9c6af868d1c9393a2557b4593f663586 | 952 | ipynb | Jupyter Notebook | hinton/ESN.ipynb | dougc333/DeepLearning | 0076f8490e25786494bbc7da54c21408c3c1aa7f | [
"Apache-2.0"
] | null | null | null | hinton/ESN.ipynb | dougc333/DeepLearning | 0076f8490e25786494bbc7da54c21408c3c1aa7f | [
"Apache-2.0"
] | null | null | null | hinton/ESN.ipynb | dougc333/DeepLearning | 0076f8490e25786494bbc7da54c21408c3c1aa7f | [
"Apache-2.0"
] | null | null | null | 21.155556 | 90 | 0.570378 | [
[
[
"#echo state networks, precursor to RNNs. \n#https://github.com/m-colombo/Tensorflow-EchoStateNetwork/blob/master/esn_cell.py\n\n http://www.faculty.jacobs-university.de/hjaeger/pubs/EchoStatesTechRep.pdf\n\n#similar in development to autoencoders, this tries to echo the input using a RNN\n\n\n\n",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code"
]
] |
d09c304a088b66d3ee1814fc8108ed3ba4309add | 156,391 | ipynb | Jupyter Notebook | source/examples/basics/gog/geom_density2d.ipynb | ASmirnov-HORIS/lets-plot-docs | fb15e81ca0f03d54539c098ce4ee725f03a03d2f | [
"MIT"
] | null | null | null | source/examples/basics/gog/geom_density2d.ipynb | ASmirnov-HORIS/lets-plot-docs | fb15e81ca0f03d54539c098ce4ee725f03a03d2f | [
"MIT"
] | null | null | null | source/examples/basics/gog/geom_density2d.ipynb | ASmirnov-HORIS/lets-plot-docs | fb15e81ca0f03d54539c098ce4ee725f03a03d2f | [
"MIT"
] | 1 | 2021-06-30T10:05:13.000Z | 2021-06-30T10:05:13.000Z | 888.585227 | 68,138 | 0.840381 | [
[
[
"# geom_density2d()",
"_____no_output_____"
]
],
[
[
"import pandas as pd\n\nfrom lets_plot import *\nLetsPlot.setup_html()",
"_____no_output_____"
],
[
"df = pd.read_csv('https://raw.githubusercontent.com/JetBrains/lets-plot-docs/master/data/mpg.csv')",
"_____no_output_____"
],
[
"ggplot(df, aes('cty', 'hwy')) + geom_density2d(aes(color='..group..'))",
"_____no_output_____"
]
]
] | [
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
d09c3e501929270bfabfd8b1deea419e9ad98db4 | 4,866 | ipynb | Jupyter Notebook | MHD/FEniCS/ShiftCurlCurl/.ipynb_checkpoints/Untitled3-checkpoint.ipynb | wathen/PHD | 35524f40028541a4d611d8c78574e4cf9ddc3278 | [
"MIT"
] | 3 | 2020-10-25T13:30:20.000Z | 2021-08-10T21:27:30.000Z | MHD/FEniCS/ShiftCurlCurl/.ipynb_checkpoints/Untitled3-checkpoint.ipynb | wathen/PHD | 35524f40028541a4d611d8c78574e4cf9ddc3278 | [
"MIT"
] | null | null | null | MHD/FEniCS/ShiftCurlCurl/.ipynb_checkpoints/Untitled3-checkpoint.ipynb | wathen/PHD | 35524f40028541a4d611d8c78574e4cf9ddc3278 | [
"MIT"
] | 3 | 2019-10-28T16:12:13.000Z | 2020-01-13T13:59:44.000Z | 35.26087 | 98 | 0.397041 | [
[
[
"empty"
]
]
] | [
"empty"
] | [
[
"empty"
]
] |
d09c44230e03eb5bc119593b2a554958093b641d | 15,122 | ipynb | Jupyter Notebook | examples/permutation_importance_example.ipynb | barak1412/automl_infrastructure | e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32 | [
"MIT"
] | null | null | null | examples/permutation_importance_example.ipynb | barak1412/automl_infrastructure | e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32 | [
"MIT"
] | null | null | null | examples/permutation_importance_example.ipynb | barak1412/automl_infrastructure | e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32 | [
"MIT"
] | null | null | null | 35.665094 | 167 | 0.387846 | [
[
[
"# General Imports",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np",
"_____no_output_____"
]
],
[
[
"# Data Loading",
"_____no_output_____"
]
],
[
[
"df = pd.read_csv('adult_salary.data', header=None, usecols=[3,4,5,6,8,9,14], \n names=['EDUCATION', 'EDUCATION_PERIOD', 'STATUS', 'OCCUPY', 'RACE', 'GENDER','RICH'],\n dtype=str)\nlabel_col = 'RICH'\nfeatures_cols = [c for c in df.columns if c != label_col]\ndf['EDUCATION_PERIOD'] = df['EDUCATION_PERIOD'].astype(int)\ndf[label_col] = df[label_col].apply(lambda x: 1 if x.strip() == '<=50K' else 0).astype(int)\ncategorial_features = [c for c in df.columns if df.dtypes[c] != np.int32 and df.dtypes[c] != np.int64]\ndf.info()",
"<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 32561 entries, 0 to 32560\nData columns (total 7 columns):\n # Column Non-Null Count Dtype \n--- ------ -------------- ----- \n 0 EDUCATION 32561 non-null object\n 1 EDUCATION_PERIOD 32561 non-null int32 \n 2 STATUS 32561 non-null object\n 3 OCCUPY 32561 non-null object\n 4 RACE 32561 non-null object\n 5 GENDER 32561 non-null object\n 6 RICH 32561 non-null int32 \ndtypes: int32(2), object(5)\nmemory usage: 1.5+ MB\n"
]
],
[
[
"# Data Preparation",
"_____no_output_____"
]
],
[
[
"from sklearn.preprocessing import LabelBinarizer\n\nfeature_encoder_dict = {}\nfinal_df = df.copy()\nfor feature in categorial_features:\n feature_encoder_dict[feature] = LabelBinarizer()\n final_df[feature] = pd.Series(list(feature_encoder_dict[feature].fit_transform(df[feature])))\nfinal_df",
"_____no_output_____"
],
[
"from sklearn.model_selection import train_test_split\n\n# split to train and test\ntrain_df, test_df = train_test_split(final_df, test_size=0.1, shuffle=True)",
"_____no_output_____"
]
],
[
[
"# Modeling",
"_____no_output_____"
]
],
[
[
"from sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.metrics import accuracy_score, make_scorer\nfrom automl_infrastructure.classifiers.adapters import SklearnClassifierAdapter\n\n\nlr_model = SklearnClassifierAdapter(name='lr1', sklearn_model=LogisticRegression())\nlr_model.fit(train_df[features_cols], train_df[label_col])\npredictions = lr_model.predict(test_df[features_cols])\nprint(accuracy_score(test_df[label_col], predictions))\n\nrf_model = SklearnClassifierAdapter(name='rf1', sklearn_model=RandomForestClassifier())\nrf_model.fit(train_df[features_cols], train_df[label_col])\npredictions = rf_model.predict(test_df[features_cols])\nprint(accuracy_score(test_df[label_col], predictions))",
"C:\\Users\\Barak\\.conda\\envs\\DSEnv\\lib\\site-packages\\sklearn\\linear_model\\_logistic.py:940: ConvergenceWarning: lbfgs failed to converge (status=1):\nSTOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n\nIncrease the number of iterations (max_iter) or scale the data as shown in:\n https://scikit-learn.org/stable/modules/preprocessing.html\nPlease also refer to the documentation for alternative solver options:\n https://scikit-learn.org/stable/modules/linear_model.html#logistic-regression\n extra_warning_msg=_LOGISTIC_SOLVER_CONVERGENCE_MSG)\n"
]
],
[
[
"# Permutation Importance Calculation",
"_____no_output_____"
]
],
[
[
"from automl_infrastructure.interpretation import PermutationImportance\n\n\npi = PermutationImportance(lr_model, scoring='accuracy')\npi.fit(test_df[features_cols], test_df[label_col])\npi.show_weights()\nprint()\npi = PermutationImportance(rf_model, scoring='accuracy')\npi.fit(test_df[features_cols], test_df[label_col])\npi.show_weights()",
" Feature Weight Std\n0 STATUS 0.076348 0.002884\n1 OCCUPY 0.029066 0.001612\n2 EDUCATION_PERIOD 0.026302 0.001469\n3 EDUCATION 0.005322 0.001511\n4 RACE 0.001228 0.001566\n5 GENDER 0.000819 0.000145\n\n Feature Weight Std\n0 STATUS 0.094259 0.002171\n1 OCCUPY 0.036844 0.001958\n2 GENDER 0.013714 0.002329\n3 EDUCATION_PERIOD 0.009927 0.001013\n4 EDUCATION 0.003991 0.003131\n5 RACE 0.001740 0.001381\n"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09c502f899d42251d7c0cbca06eff31123e407a | 39,491 | ipynb | Jupyter Notebook | Chapter00/t0a/t0a_setting_up_python.ipynb | mazhengcn/scientific-computing-with-python | f821b99bc08b1170472433ac095296fe6039875a | [
"MIT"
] | null | null | null | Chapter00/t0a/t0a_setting_up_python.ipynb | mazhengcn/scientific-computing-with-python | f821b99bc08b1170472433ac095296fe6039875a | [
"MIT"
] | null | null | null | Chapter00/t0a/t0a_setting_up_python.ipynb | mazhengcn/scientific-computing-with-python | f821b99bc08b1170472433ac095296fe6039875a | [
"MIT"
] | null | null | null | 116.492625 | 23,586 | 0.846522 | [
[
[
"# Tutorial 0a: Setting Up Python For Scientific Computing\n",
"_____no_output_____"
],
[
"\nIn this tutorial, we will set up a scientific Python computing environment using the [Anaconda python distribution by Continuum Analytics](https://www.continuum.io/downloads).\n",
"_____no_output_____"
],
[
"\n## Why Python?\n",
"_____no_output_____"
],
[
"\nAs is true in human language, there are [hundreds of computer programming languages](https://en.wikipedia.org/wiki/List_of_programming_languages). While each has its own merit, the major languages for scientific computing are C, C++, R, MATLAB, Python, Java, Julia, and Fortran. [MATLAB](https://www.mathworks.com), [Julia](https://julialang.org/), and [Python](https://www.python.org) are similar in syntax and typically read as if they were written in plain english. This makes both languages a useful tool for teaching but they are also very powerful languages and are **very** actively used in real-life research. MATLAB is proprietary while Python is open source. A benefit of being open source is that anyone can write and release Python packages. For science, there are many wonderful community-driven packages such as [NumPy](http://www.numpy.org), [SciPy](http://www.scipy.org), [scikit-image](http://scikit-image.org), and [Pandas](http://pandas.pydata.org) just to name a few. ",
"_____no_output_____"
],
[
"In this tutorial, we will set up a scientific Python computing environment using the [Anaconda python distribution by Continuum Analytics](https://www.continuum.io/downloads). ",
"_____no_output_____"
],
[
"## Why Python?",
"_____no_output_____"
],
[
"- Beginner friendly\n- Versatile and flexible\n- Most mature package libraries around\n- Most popular in Machine learning world",
"_____no_output_____"
],
[
"As is true in human language, there are [hundreds of computer programming languages](https://en.wikipedia.org/wiki/List_of_programming_languages). While each has its own merit, the major languages for scientific computing are C, C++, R, MATLAB, Python, Java, Julia, and Fortran. [MATLAB](https://www.mathworks.com), [Julia](https://julialang.org/), and [Python](https://www.python.org) are similar in syntax and typically read as if they were written in plain english. This makes both languages a useful tool for teaching but they are also very powerful languages and are **very** actively used in real-life research. MATLAB is proprietary while Python is open source. A benefit of being open source is that anyone can write and release Python packages. For science, there are many wonderful community-driven packages such as [NumPy](http://www.numpy.org), [SciPy](http://www.scipy.org), [scikit-image](http://scikit-image.org), and [Pandas](http://pandas.pydata.org) just to name a few. ",
"_____no_output_____"
],
[
"## Installing Python 3 with Anaconda",
"_____no_output_____"
],
[
"### Python 3 vs Python 2",
"_____no_output_____"
],
[
"There are two dominant versions of Python (available through the Anaconda distribution) used for scientific computing, Python 2.7 and Python 3.7. We are at an interesting crossroads between these two versions. The most recent release (Python 3.10 ) is not backwards compatible with previous versions of Python. While there are still some packages written for Python 2.7 that have not been modified for compatibility with Python 3.7, a large number have transitioned and Python 2.7 will no longer be supported as of January 1, 2020. As this will be the future for scientific computing with Python, we will use Python 3.9 for these tutorials.\n",
"_____no_output_____"
],
[
"### Anaconda",
"_____no_output_____"
],
[
"There are several scientific Python distributions available for MacOS, Windows, and Linux. The two most popular, [Enthought Canopy](https://www.enthought.com/products/canopy/) and [Anaconda](https://www.continuum.io/why-anaconda) are specifically designed for scientific computing and data science work. For this course, we will use the Anaconda Python 3.7 distribution. To install the correct version, follow the instructions below.\n\n1. Navigate to [the Anaconda download page](https://www.continuum.io/downloads) and download the Python 3.7 graphical installer.\n\n2. Launch the installer and follow the onscreen instructions.\n\n\nCongratulations! You now have the beginnings of a scientific Python distribution.",
"_____no_output_____"
],
[
"### Using JupyterLab as a Scientific Development Environment",
"_____no_output_____"
],
[
"Packaged with the Anaconda Python distribution is the [Jupyter project](https://jupyter.org/). This environment is incredibly useful for interactive programming and development and is widely used across scientific computing. Jupyter allows for interactive programming in a large array of programming languages including Julia, R, and MATLAB. As you've guessed by this point, we will be focusing on using Python through the Jupyter Environment. \n\nThe key component of the Jupyter interactive programming environment is the [Jupyter Notebook](https://jupyter.org/). This acts lkike an interactive script which allows one to interweave code, mathematics, and text to create a complete narrative around your computational project. In fact, you are reading a Jupyter Notebook right now!\n\nWhile Jupyter Notebooks are fantastic alone, we will be using them throughout the course via the [JupyterLab Integrated Development Environment (IDE)](https://jupyter.org/). JupyterLab allows omne to write code in notebooks, navigate around your file system, write isolated python scripts, and even access a UNIX terminal, all of which we will do throughout this class. Even better, JupyterLab comes prepackaged with your Anaconda Python distribution.\n",
"_____no_output_____"
],
[
"### Launching JupyterLab\n",
"_____no_output_____"
],
[
"When you installed Anaconda, you also installed the Anaconda Navigator, an app that allows you to easily launch a JupyterLab instance. When you open up Anaconda Navigator, you should see a screen that looks like this,\n\n\n\nwhere I have boxed in the JupyterLab prompt with a red box. Launch the JupyterLab IDE by clicking the 'launch' button. This should automatically open a browser window with the JupyterLab interface, \n\n\n",
"_____no_output_____"
],
[
"### Creating your course directory\n\nDuring the course, you will be handing in the computational portions of your homeworks as Jupyter Notebooks and, as such, it will be important for the TA's to be able to run your code to grade it. We will often be reading in data from a file on your computer, manipulating it, and then plotting the outcome. **To ensure the TA's can run your code without manipulating it, you MUST use a specific file structure.** We can set up the file structure pretty easily directly through JupyterLab. \n\nOpen the side bar of the JupyterLab interface by clicking the folder icon on the left hand side of the screen. This will slide open a file browser like so:\n<center>\n<img src=\"filebrowser.png\" width=\"50%\">\n</center>\n\nYour files will look different than mine (unless you're using my computer!), but it will show the contents of your computer's `home` directory. \n\nUsing the sidebar, navigate to wherever you will want to make a new folder called `Scientific-Computing` by clicking the \"new folder\" symbol, .\n\nDouble-click the `Scientific-Computing` folder to open it and make two new folders, one named `code` and another `data`. Your final file directory should look like so:\n\n<center>\n<img src=\"directory_structure.png\" width=\"50%\">\n</center>\n\nThat's it! You've now made the file structure for the class. \n\nAll of the Jupyter Notebooks you use in the course will be made and wirttin in the `code` folder. All data you have to load will live in the `data` directory. This structure will make things easier for the TA when it comes to grading your work, but will also help you maintain a tidy homework folder.\n",
"_____no_output_____"
],
[
"### Starting A Jupyter Notebook\n",
"_____no_output_____"
],
[
"Let's open a new notebook. Navigate to your `code` folder and click the `+` in the sidebar. This will open a new \"Launcher\" window where a variety of new filetypes can be opened. One of them will be a \"Python 3 Notebook\".\n\n<center>\n<img src=\"launcher.png\" width=\"50%\">\n</center>\n\nClicking this will open a new Jupyter Nook named `Untitled.ipynb`.\n\n<center>\n<img src=\"notebook.png\" width=\"50%\">\n</center>\n\nRight-click the \"Untitled.ipynb\" in the sidebar and rename it to something more informative, say `testing_out_python.ipynb`.\n\nThe right-ha d side of your screen is the actual notebook. You will see a \"code cell\" (grey rectangle) along with a bunch of other boxes above it. In the [Jupyter Notebook Tutorial](http://rpgroup.caltech.edu/bige105/tutorials/t0b/t0b_jupyter_notebooks) we cover these buttons in detail. For now, we'll just check to make sure you have a working Python distribution. \n",
"_____no_output_____"
],
[
"## `Hello, World`\n\nLet's write our first bit of Python code to make sure that everything is working correctly on your system. In Jupyter Notebooks, all code is typed in grey rectangles called \"code cells\". When a cell is \"run\", the result of the computation is shown underneath the code cell. Double-click the code cell on the right-hand side of your JupyterLab window and type the following:",
"_____no_output_____"
]
],
[
[
"# This a comment and won't be read by Python. All comments start with `#`\nprint('Hello, World. Long time, no see. This sentence should be printed below by pressing `Shift + Enter` ')",
"Hello, World. Long time, no see. This sentence should be printed below by pressing `Shift + Enter` \n"
]
],
[
[
"Note that you cannot edit the text *below* the code cell. This is the output of the `print()` function in Python.\n\n### Our First Plot\nThis class will often require you to generate plots of your computations coupled with some comments about your interpretation. Let's try to generate a simple plot here to make sure everything is working with your distribution. Don't worry too much about the syntax for right now. The basics of Python syntax are given in [Tutorial 0c](http://rpgroup.caltech.edu/bige105/tutorials/t0b/t0c_python_syntax_and_plotting).\n\nAdd a new code cell beneath the one that contains `print('Hello, Pangaea')`. When you execute a cell using `Shift + Enter`, a new cell should appear beneath what you just ran. If it's not there, you can make a new cell by clicking the `+` icon in the notebook menu bar. In the new cell, type the following:",
"_____no_output_____"
]
],
[
[
"# Import Python packages necessary for this script\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nsns.set()\n\n# Generate a beautiful sinusoidal curve\nx = np.linspace(0, 2*np.pi, 500)\ny = np.sin(2 * np.sin(2 * np.sin(2 * x)))\nplt.plot(x, y)\nplt.xlabel('$x$')\nplt.ylabel('$y$')\nplt.show()",
"_____no_output_____"
]
],
[
[
"If you can see this plot in your notebook, then congratulations! You have a working Python 3.7 distribution. ",
"_____no_output_____"
],
[
"### Installing extra packages using Conda ",
"_____no_output_____"
],
[
"With the Anaconda Python distribution, you can install verified packages (scientific and non-scientific) through the [Conda](http://conda.pydata.org/docs/) package manager. **Note that you do not have to download Conda separately. This comes packaged with Anaconda**. To install packages through Conda, we must manually enter their names on the command line. \n\nOne of your first computational homeworks will involve doing some rudimentary bioinformatics to compare sequences of the `ENAM` gene among cetaceans. To do so, we will use the [BioPython](http://biopython.org) package which does not come prepackaged along with Anaconda. Let's install it using the command line that is built in with Jupyter Lab. \n\nOn the sidebar menu, open a new Launcher window by clicking the `+` button (just like we did to make a new Jupyter Notebook). Now, instead of opening a notebook, choose the \"Terminal\" selection at the bottom.\n\n<center>\n<img src=\"launch_terminal.png\" width=\"50%\">\n</center>\n\nThis will open a new tab on the right-hand side of your screen and will launch a shell environment (yours may look different than mine). Click on the command line, type\n\n```\nconda install xxx\n```\n\nand hit enter. After a few seconds (or a minute, depending on your internet connection), you should be greeted with the following screen:\n\n<center>\n<img src=\"install_biopython.png\" width=\"50%\">\n</center>\n\nNote that at the bottom it asks for your permission to install teh package and update its dependencies, if necessary. Type `y` and then hit enter. Biopython will then be installed. ",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
]
] |
d09c50955ecee0c73afab3c99fc3b9d291d3c1ee | 950,418 | ipynb | Jupyter Notebook | notebooks/Performance_Ridgeregression_Sana.ipynb | courtois-neuromod/movie_decoding_sa | ca937cb676bf5828841ed332556257df3f91702a | [
"MIT"
] | null | null | null | notebooks/Performance_Ridgeregression_Sana.ipynb | courtois-neuromod/movie_decoding_sa | ca937cb676bf5828841ed332556257df3f91702a | [
"MIT"
] | null | null | null | notebooks/Performance_Ridgeregression_Sana.ipynb | courtois-neuromod/movie_decoding_sa | ca937cb676bf5828841ed332556257df3f91702a | [
"MIT"
] | null | null | null | 676.935897 | 47,268 | 0.948584 | [
[
[
"import warnings\nwarnings.filterwarnings('ignore')\n\nfrom report import analysis",
"_____no_output_____"
]
],
[
[
"# Dummy applications",
"_____no_output_____"
],
[
"## CPU stress test",
"_____no_output_____"
]
],
[
[
"#analysis(\"example-data/cpu\")\nanalysis(\"example-data-sana/cpu\", skiprows=15)\n#analysis(\"data-3bb4e2af-bf0a-4b78-b8b2-f9dbd1df35b3 (copy)/cpu\")\n",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"## Disk test",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/disk\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"## Network test",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/network\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"# Neuroimaging Applications",
"_____no_output_____"
],
[
"## BET participant analysis",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/bet_participant\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"## BET group analysis",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/bet_group\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"## MRIQC participant analysis",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/mriqc_participant\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
],
[
[
"## MRIQC group analysis",
"_____no_output_____"
]
],
[
[
"analysis(\"example-data-sana/mriqc_group\", skiprows=15)",
"\n====================\n CPU Analysis\n====================\nTotal CPU core:\n64\n\nTotal CPU time (seconds):\n6979.350\n\nParallel CPU time (seconds):\n128.090\n\nMakes span (seconds):\n128.090\n\n\n"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09c5bc80fecc1f859a6c5a2cba2cfa28689da07 | 1,877 | ipynb | Jupyter Notebook | visualizations/bokeh/notebooks/glyphs/circle_x.ipynb | martinpeck/apryor6.github.io | 52a227cc65cf04fb7ca3162d405349b41c311a42 | [
"MIT"
] | 1 | 2016-11-06T23:46:58.000Z | 2016-11-06T23:46:58.000Z | visualizations/bokeh/notebooks/glyphs/circle_x.ipynb | martinpeck/apryor6.github.io | 52a227cc65cf04fb7ca3162d405349b41c311a42 | [
"MIT"
] | 1 | 2019-09-06T22:32:16.000Z | 2019-09-06T22:32:16.000Z | visualizations/bokeh/notebooks/glyphs/circle_x.ipynb | martinpeck/apryor6.github.io | 52a227cc65cf04fb7ca3162d405349b41c311a42 | [
"MIT"
] | 2 | 2019-09-06T13:54:42.000Z | 2020-03-11T09:33:59.000Z | 29.328125 | 89 | 0.584976 | [
[
[
"# Bokeh Circle X Glyph",
"_____no_output_____"
]
],
[
[
"from bokeh.plotting import figure, output_file, show\nfrom bokeh.models import Range1d\nfrom bokeh.io import export_png\n\nfill_color = '#e08214'\nline_color = '#fdb863'\noutput_file(\"../../figures/circle_x.html\")\n\np = figure(plot_width=400, plot_height=400)\np.circle_x(x=0,y=0,size=100, fill_alpha=1,fill_color=fill_color,\n line_alpha=1, line_color=line_color, line_dash='dashed', line_width=5)\np.circle_x(x=0,y=1,size=100, fill_alpha=0.8, fill_color=fill_color,\n line_alpha=1, line_color=line_color, line_dash='dotdash', line_width=8)\np.circle_x(x=1,y=0,size=100, fill_alpha=0.6, fill_color = fill_color,\n line_alpha=1, line_color=line_color, line_dash='dotted', line_width=13)\np.circle_x(x=1,y=1,size=100, fill_alpha=0.4, fill_color = fill_color,\n line_alpha=1, line_color=line_color, line_dash='solid', line_width=17)\np.x_range = Range1d(-0.5,1.5, bounds=(-1,2))\np.y_range = Range1d(-0.5,1.5, bounds=(-1,2))\nshow(p)\nexport_png(p, filename=\"../../figures/circle_x.png\");",
"_____no_output_____"
]
]
] | [
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
]
] |
d09c713ad18dc2fcca1e74fa371c2893976103b0 | 816,865 | ipynb | Jupyter Notebook | notebooks/B_CNN_Data_Preview_Images.ipynb | nthndy/cnn-annotator | c5349c3f4e8a22f9cc335516cedc95549a340e2d | [
"BSD-3-Clause"
] | null | null | null | notebooks/B_CNN_Data_Preview_Images.ipynb | nthndy/cnn-annotator | c5349c3f4e8a22f9cc335516cedc95549a340e2d | [
"BSD-3-Clause"
] | null | null | null | notebooks/B_CNN_Data_Preview_Images.ipynb | nthndy/cnn-annotator | c5349c3f4e8a22f9cc335516cedc95549a340e2d | [
"BSD-3-Clause"
] | 1 | 2021-08-10T20:33:53.000Z | 2021-08-10T20:33:53.000Z | 2,320.639205 | 791,192 | 0.953946 | [
[
[
"# CNN Image Data Preview & Statistics\n\n### Welcome! \n\nThis notebook allows you to preview some of your single-cell image patches to make sure your annotated data are of good quality. You will also get a chance to calculate the statistics for your annotated data which can be useful for data preprocessing, e.g. *class imbalance check* prior to CNN training.\n",
"_____no_output_____"
]
],
[
[
"import os\nimport json\nimport random\nimport zipfile\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom tqdm import tqdm\nfrom datetime import datetime\nfrom skimage.io import imread",
"_____no_output_____"
]
],
[
[
"### Specify how many patches you'd like to visualise from your batch:\n\nBy default, the code below will allow you to see any 10 random patches per each class. If there is not enough training data for any label, a noisy image will be visualised. The default setting doesn't save the collage out, but you can change it by specifying the ```save_collage``` to ```True```.\n",
"_____no_output_____"
]
],
[
[
"LABELS = [\"Interphase\", \"Prometaphase\", \"Metaphase\", \"Anaphase\", \"Apoptosis\"]\npatches_to_show = 10\nsave_collage = False\n",
"_____no_output_____"
]
],
[
[
"### Load a random 'annotation' zip file to check image patches:",
"_____no_output_____"
]
],
[
[
"zipfiles = [f for f in os.listdir(\"./\") if f.startswith(\"annotation\") and f.endswith(\".zip\")]\nzip_file_name = zipfiles[0]\n",
"_____no_output_____"
]
],
[
[
"### Optional: specify which zip file you'd like to visualise:",
"_____no_output_____"
]
],
[
[
"#zip_file_name = \"annotation_02-08-2021--10-33-59.zip\"\n",
"_____no_output_____"
]
],
[
[
"### Process the zip file & extract subfolders with individual images:",
"_____no_output_____"
]
],
[
[
"# Make sure zip file name is stripped of '.zip' suffix:\n\nif zip_file_name.endswith(\".zip\"):\n zip_file_name = zip_file_name.split(\".zip\")[0]\n\n\n# Check if the zipfile was extracted:\n\nif not zip_file_name in os.listdir(\"./\"):\n print (f\"Zip file {zip_file_name}.zip : Exporting...\", end=\"\\t\")\n \n with zipfile.ZipFile(f\"./{zip_file_name}.zip\", 'r') as zip_ref:\n zip_ref.extractall(f\"./{zip_file_name}/\")\n\nelse:\n print (f\"Zip file {zip_file_name}.zip : Exported!...\", end=\"\\t\")\n\nprint (\"Done!\")\n",
"Zip file annotation_02-08-2021--10-33-59.zip : Exporting...\tDone!\n"
]
],
[
[
"### Plot the collage with all 5 labels: ",
"_____no_output_____"
]
],
[
[
"fig, axs = plt.subplots(figsize=(int(len(LABELS)*5), int(patches_to_show*5)), \n nrows=patches_to_show, ncols=len(LABELS), \n sharex=True, sharey=True)\n\nfor idx in range(len(LABELS)):\n \n label = LABELS[idx]\n label_dr = f\"./{zip_file_name}/{label}/\"\n \n # Check if directory exists:\n if os.path.isdir(label_dr):\n patch_list = os.listdir(label_dr)\n random.shuffle(patch_list)\n print (f\"Label: {label} contains {len(patch_list)} single-cell image patches\")\n \n else:\n patch_list = []\n print (f\"Label: {label} has not been annotated.\")\n \n # Plot the patches:\n for i in range(patches_to_show):\n \n # Set titles to individual columns\n if i == 0:\n axs[i][idx].set_title(f\"Label: {label}\", fontsize=16)\n \n if i >= len(patch_list):\n patch = np.random.randint(0,255,size=(64,64)).astype(np.uint8)\n axs[i][idx].text(x=32, y=32, s=\"noise\", size=50, rotation=30., ha=\"center\", va=\"center\",\n bbox=dict(boxstyle=\"round\", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0)))\n else:\n patch = plt.imread(label_dr + patch_list[i])\n \n axs[i][idx].imshow(patch, cmap=\"binary_r\")\n axs[i][idx].axis('off')\n\n\nif save_collage is True:\n plt.savefig(\"../label_image_patches.png\", bbox_to_inches='tight')\n\nplt.show()\nplt.close()\n",
"Label: Interphase contains 6 single-cell image patches\nLabel: Prometaphase contains 6 single-cell image patches\nLabel: Metaphase contains 5 single-cell image patches\nLabel: Anaphase contains 8 single-cell image patches\nLabel: Apoptosis contains 6 single-cell image patches\n"
]
],
[
[
"## Calculate some data statistics WITHOUT unzipping the files:",
"_____no_output_____"
]
],
[
[
"label_count = dict({'Prometaphase' : 0, 'Metaphase' : 0, 'Interphase' : 0, 'Anaphase' : 0, 'Apoptosis' : 0})\n\nfor f in tqdm(zipfiles):\n \n archive = zipfile.ZipFile(f, 'r')\n json_data = archive.read(f.split(\".zip\")[0] + \".json\")\n data = json.loads(json_data)\n \n # Count instances per label:\n counts = [[x, data['labels'].count(x)] for x in set(data['labels'])]\n print (f\"File: {f}\\n\\t{counts}\")\n \n # Add counts to label counter:\n for lab in counts:\n label_count[lab[0]] += lab[1]\n ",
"100%|██████████| 1/1 [00:00<00:00, 255.97it/s]"
]
],
[
[
"### Plot the statistics:",
"_____no_output_____"
]
],
[
[
"COLOR_CYCLE = [\n '#1f77b4', # blue\n '#ff7f0e', # orange\n '#2ca02c', # green\n '#d62728', # red\n '#9467bd', # purple\n]",
"_____no_output_____"
],
[
"# Plot the bar graph:\n\nplt.bar(range(len(label_count)), list(label_count.values()), align='center', color=COLOR_CYCLE)\nplt.xticks(range(len(label_count)), list(label_count.keys()), rotation=30)\nplt.title(\"Single-Cell Patches per Label\")\nplt.xlabel(\"Class Label\")\nplt.ylabel(\"Patch Count\")\nplt.grid(axis='y', alpha=0.3)\nplt.show()\nplt.close()\n",
"_____no_output_____"
]
],
[
[
"### Done!",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
d09c7c13d3a38123c050ee458d27747b720a05ac | 12,508 | ipynb | Jupyter Notebook | notebooks/send_ipss_email.ipynb | colobas/voluntarios-covid19 | 0f8a519c5fd02e3ed88af8e715b2a79aaf3e3d78 | [
"MIT"
] | 3 | 2020-03-30T14:18:40.000Z | 2020-04-04T16:09:30.000Z | notebooks/send_ipss_email.ipynb | colobas/voluntarios-covid19 | 0f8a519c5fd02e3ed88af8e715b2a79aaf3e3d78 | [
"MIT"
] | null | null | null | notebooks/send_ipss_email.ipynb | colobas/voluntarios-covid19 | 0f8a519c5fd02e3ed88af8e715b2a79aaf3e3d78 | [
"MIT"
] | null | null | null | 23.335821 | 216 | 0.480652 | [
[
[
"emails = ['[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',\n'[email protected]',]",
"_____no_output_____"
],
[
"TEXT = \"\"\"\nBoa noite\n\nEnvio email na sequência do pedido de apoio por parte da vossa instituição.\n\nGostava apenas de saber se chegou a vossa instituição algum tipo de equipamento, tendo em conta que me foi dito que seriam enviadas viseiras durante esta semana.\n\nCaso não tenham recebido nada, pedia que me alertassem para conseguir perceber o que se passa com os apoios que conseguimos agregar.\n\nObrigado pela atenção,\nGustavo.\n\"\"\"",
"_____no_output_____"
],
[
"TEXT = \"\"\"\nBom dia,\n\nO meu nome é Gustavo Carita, tenho 27 anos, sou de Lisboa e sou engenheiro.\n\nRecentemente apercebi-me que, na actual conjuntura, existe uma carência nacional de profissionais para apoiar IPSS-Instituicoes Privadas de Solidariedade Social e decidi meter mãos á obra para tentar ajudar.\n\nEu e mais uns amigos criámos um website para ajudar as IPSS, duma forma simples e eficaz.\n\nPode verificar o website em: https://voluntarios-covid19.pt/\n\nNeste momento estamos a comunicar com todas as IPSS presentes na plataforma http://cartasocial.pt/.\n\nPara divulgarem a ajuda que precisam nesta fase, basta preencher o seguinte formulário: https://forms.gle/nC2GNNMcW8pyXiYw7\n\nDaremos o nosso melhor para promover as vossas iniciativas e obter toda a ajuda necessária.\n\nObrigado,\nGustavo.\n<[email protected]>\n\"\"\"",
"_____no_output_____"
],
[
"TO = '[email protected]'\nSUBJECT = 'IPSS Trial'\n\n\n# Gmail Sign In\ngmail_sender = '[email protected]'\ngmail_passwd = 'voluntariado123@'",
"_____no_output_____"
],
[
"def create_message(sender, to, subject, message_text):\n \"\"\"Create a message for an email.\n\n Args:\n sender: Email address of the sender.\n to: Email address of the receiver.\n subject: The subject of the email message.\n message_text: The text of the email message.\n\n Returns:\n An object containing a base64url encoded email object.\n \"\"\"\n message = MIMEText(message_text)\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n b64_bytes = base64.urlsafe_b64encode(message.as_bytes())\n b64_string = b64_bytes.decode()\n body = {'raw': b64_string}\n return body",
"_____no_output_____"
],
[
"from email.mime.text import MIMEText\nimport base64",
"_____no_output_____"
],
[
"from googleapiclient.discovery import build\nimport pickle\nfrom tqdm import tqdm\nfrom time import sleep\nimport random\n\nwith open('../../ipss_mailing/token.pickle', 'rb') as token:\n creds = pickle.load(token)\n \nservice = build('gmail', 'v1', credentials=creds)",
"_____no_output_____"
],
[
"def send_message(service, user_id, message):\n \"\"\"Send an email message.\n\n Args:\n service: Authorized Gmail API service instance.\n user_id: User's email address. The special value \"me\"\n can be used to indicate the authenticated user.\n message: Message to be sent.\n\n Returns:\n Sent Message.\n \"\"\"\n try:\n message = (service.users().messages().send(userId=user_id, body=message)\n .execute())\n print('Message Id: %s' % message['id'])\n return message\n except Exception as e:\n print(e)",
"_____no_output_____"
],
[
"temp = open('../../ipss_mailing/emails.txt', 'r').readlines()\ntemp = [email.replace('\\n', '') for email in temp]",
"_____no_output_____"
],
[
"temp = [\n '[email protected]', \n '[email protected]'\n] + emails",
"_____no_output_____"
],
[
"for t in tqdm(temp):\n try:\n send_message(service, \"me\", create_message(\n '[email protected]', \n t, \n 'Voluntarios COVID19 - Confirmação',\n TEXT\n ))\n sleep(random.randint(0,2))\n except Exception as e:\n print(e)\n print(t)",
"\r 0%| | 0/23 [00:00<?, ?it/s]"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09c80adf6e0a3472979893920e2e6d8eb4736cc | 4,749 | ipynb | Jupyter Notebook | examples/private-set-intersection/PSI_Client_Syft_Data_Scientist_user_friendly.ipynb | H4LL/PySyft | baaeec792e90919f0b27f583cbecc96d61b33fd6 | [
"Apache-2.0"
] | null | null | null | examples/private-set-intersection/PSI_Client_Syft_Data_Scientist_user_friendly.ipynb | H4LL/PySyft | baaeec792e90919f0b27f583cbecc96d61b33fd6 | [
"Apache-2.0"
] | null | null | null | examples/private-set-intersection/PSI_Client_Syft_Data_Scientist_user_friendly.ipynb | H4LL/PySyft | baaeec792e90919f0b27f583cbecc96d61b33fd6 | [
"Apache-2.0"
] | null | null | null | 28.437126 | 90 | 0.494841 | [
[
[
"%%capture\n!pip install openmined_psi",
"_____no_output_____"
],
[
"import syft as sy\nduet = sy.join_duet(loopback=True)",
"_____no_output_____"
],
[
"import openmined_psi as psi",
"_____no_output_____"
],
[
"class PsiClientDuet:\n def __init__(self, duet, timeout_secs=-1):\n self.duet = duet\n \n # get the reveal intersection flag and create a client\n reveal_intersection_ptr = self.duet.store[\"reveal_intersection\"]\n reveal_intersection = reveal_intersection_ptr.get(\n request_block=True,\n name=\"reveal_intersection\",\n reason=\"Are we revealing or not?\",\n timeout_secs=timeout_secs,\n delete_obj=True\n )\n self.reveal_intersection = reveal_intersection\n self.client = psi.client.CreateWithNewKey(reveal_intersection)\n \n # get the ServerSetup message\n setup_ptr = self.duet.store[\"setup\"]\n self.setup = setup_ptr.get(\n request_block=True,\n name=\"setup\",\n reason=\"To get the server setup\",\n timeout_secs=timeout_secs,\n delete_obj=True\n )\n\n def intersect(self, client_items, timeout_secs=-1):\n # send the client request to the server\n self.duet.requests.add_handler(\n name=\"request\",\n action=\"accept\"\n )\n request = self.client.CreateRequest(client_items)\n request_ptr = request.tag(\"request\").send(self.duet, searchable = True)\n \n # block until a response is received from the server\n while True:\n try:\n self.duet.store[\"response\"]\n except:\n continue\n \n break\n \n # get the response from the server\n response_ptr = self.duet.store[\"response\"]\n response = response_ptr.get(\n request_block=True,\n name=\"response\",\n reason=\"To get the response\",\n timeout_secs=timeout_secs,\n delete_obj=True\n )\n \n # calculate the intersection\n if self.reveal_intersection:\n return self.client.GetIntersection(self.setup, response)\n else:\n return self.client.GetIntersectionSize(self.setup, response)",
"_____no_output_____"
],
[
"client_items = [\"Element \" + str(i) for i in range(1000)]",
"_____no_output_____"
],
[
"client = PsiClientDuet(duet)\nintersection = client.intersect(client_items)",
"_____no_output_____"
],
[
"if client.reveal_intersection:\n iset = set(intersection)\n for idx in range(len(client_items)):\n if idx % 2 == 0:\n assert idx in iset\n else:\n assert idx not in iset",
"_____no_output_____"
],
[
"if not client.reveal_intersection:\n assert intersection >= (len(client_items) / 2.0)\n assert intersection <= (1.1 * len(client_items) / 2.0)",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09c94edb476215d62510b5e72b24fef232d2f79 | 27,233 | ipynb | Jupyter Notebook | DataTalks_GettingKnowingData I.ipynb | gympohnpimol/Pandas-Data-Talks | 33c6638958ce9d0d6605946973beba8b8f1ca0c2 | [
"MIT"
] | 1 | 2020-07-16T20:48:04.000Z | 2020-07-16T20:48:04.000Z | DataTalks_GettingKnowingData I.ipynb | gympohnpimol/Pandas-Data-Talks | 33c6638958ce9d0d6605946973beba8b8f1ca0c2 | [
"MIT"
] | null | null | null | DataTalks_GettingKnowingData I.ipynb | gympohnpimol/Pandas-Data-Talks | 33c6638958ce9d0d6605946973beba8b8f1ca0c2 | [
"MIT"
] | null | null | null | 24.847628 | 131 | 0.341351 | [
[
[
"# Getting and Knowing your Data\nimport dataset from https://raw.githubusercontent.com/justmarkham/DAT8/master/data/u.user ",
"_____no_output_____"
],
[
"#### Import the necessary libraries",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport pandas as pd",
"_____no_output_____"
]
],
[
[
"#### Assign it to a variable called users and use the 'user_id' as index and See the first 25 entries",
"_____no_output_____"
]
],
[
[
"df = pd.read_csv(\"https://raw.githubusercontent.com/justmarkham/DAT8/master/data/u.user\",sep='|', index_col=\"user_id\")\ndf.head(25)",
"_____no_output_____"
]
],
[
[
"#### See the last 10 entries",
"_____no_output_____"
]
],
[
[
"df.tail(10)",
"_____no_output_____"
]
],
[
[
"#### The number of observations in the dataset",
"_____no_output_____"
]
],
[
[
"df.shape[0]",
"_____no_output_____"
]
],
[
[
"#### The number of columns in the dataset",
"_____no_output_____"
]
],
[
[
"df.shape[1]",
"_____no_output_____"
]
],
[
[
"#### Name of all the columns",
"_____no_output_____"
]
],
[
[
"df.columns",
"_____no_output_____"
]
],
[
[
"#### Dataset index",
"_____no_output_____"
]
],
[
[
"df.index",
"_____no_output_____"
]
],
[
[
"#### Data type of each column",
"_____no_output_____"
]
],
[
[
"df.dtypes",
"_____no_output_____"
]
],
[
[
"#### Observation only the occupation column",
"_____no_output_____"
]
],
[
[
"df[\"occupation\"]",
"_____no_output_____"
]
],
[
[
"#### The number of occupations in this dataset",
"_____no_output_____"
]
],
[
[
"df.occupation.nunique()",
"_____no_output_____"
]
],
[
[
"#### The most frequent occupation in dataset",
"_____no_output_____"
]
],
[
[
"df.occupation.value_counts()",
"_____no_output_____"
],
[
"df.occupation.value_counts().head()",
"_____no_output_____"
],
[
"df.occupation.value_counts().head().index[0]",
"_____no_output_____"
]
],
[
[
"#### Summarize the DataFrame",
"_____no_output_____"
]
],
[
[
"df.describe()",
"_____no_output_____"
]
],
[
[
"#### Summarize all the columns",
"_____no_output_____"
]
],
[
[
"df.describe(include = \"all\")",
"_____no_output_____"
]
],
[
[
"#### Summarize only the gender column",
"_____no_output_____"
]
],
[
[
"df.gender.describe()",
"_____no_output_____"
]
],
[
[
"#### The mean age of dataframe",
"_____no_output_____"
]
],
[
[
"round(df.age.mean())",
"_____no_output_____"
]
],
[
[
"#### The occupation with least occurrence",
"_____no_output_____"
]
],
[
[
"df.occupation.value_counts().tail()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09c970004589e16e562c74bd977898bffe86fdc | 19,282 | ipynb | Jupyter Notebook | notebooks/03_categorical_pipeline.ipynb | parmentelat/scikit-learn-mooc | 5b2db41e131fe69f9c0cd8cf5e42828afa13cd30 | [
"CC-BY-4.0"
] | 1 | 2022-02-17T13:13:52.000Z | 2022-02-17T13:13:52.000Z | notebooks/03_categorical_pipeline.ipynb | parmentelat/scikit-learn-mooc | 5b2db41e131fe69f9c0cd8cf5e42828afa13cd30 | [
"CC-BY-4.0"
] | null | null | null | notebooks/03_categorical_pipeline.ipynb | parmentelat/scikit-learn-mooc | 5b2db41e131fe69f9c0cd8cf5e42828afa13cd30 | [
"CC-BY-4.0"
] | null | null | null | 32.736842 | 155 | 0.627269 | [
[
[
"# Encoding of categorical variables\n\nIn this notebook, we will present typical ways of dealing with\n**categorical variables** by encoding them, namely **ordinal encoding** and\n**one-hot encoding**.",
"_____no_output_____"
],
[
"Let's first load the entire adult dataset containing both numerical and\ncategorical data.",
"_____no_output_____"
]
],
[
[
"import pandas as pd\n\nadult_census = pd.read_csv(\"../datasets/adult-census.csv\")\n# drop the duplicated column `\"education-num\"` as stated in the first notebook\nadult_census = adult_census.drop(columns=\"education-num\")\n\ntarget_name = \"class\"\ntarget = adult_census[target_name]\n\ndata = adult_census.drop(columns=[target_name])",
"_____no_output_____"
]
],
[
[
"\n## Identify categorical variables\n\nAs we saw in the previous section, a numerical variable is a\nquantity represented by a real or integer number. These variables can be\nnaturally handled by machine learning algorithms that are typically composed\nof a sequence of arithmetic instructions such as additions and\nmultiplications.\n\nIn contrast, categorical variables have discrete values, typically\nrepresented by string labels (but not only) taken from a finite list of\npossible choices. For instance, the variable `native-country` in our dataset\nis a categorical variable because it encodes the data using a finite list of\npossible countries (along with the `?` symbol when this information is\nmissing):",
"_____no_output_____"
]
],
[
[
"data[\"native-country\"].value_counts().sort_index()",
"_____no_output_____"
]
],
[
[
"How can we easily recognize categorical columns among the dataset? Part of\nthe answer lies in the columns' data type:",
"_____no_output_____"
]
],
[
[
"data.dtypes",
"_____no_output_____"
]
],
[
[
"If we look at the `\"native-country\"` column, we observe its data type is\n`object`, meaning it contains string values.\n\n## Select features based on their data type\n\nIn the previous notebook, we manually defined the numerical columns. We could\ndo a similar approach. Instead, we will use the scikit-learn helper function\n`make_column_selector`, which allows us to select columns based on\ntheir data type. We will illustrate how to use this helper.",
"_____no_output_____"
]
],
[
[
"from sklearn.compose import make_column_selector as selector\n\ncategorical_columns_selector = selector(dtype_include=object)\ncategorical_columns = categorical_columns_selector(data)\ncategorical_columns",
"_____no_output_____"
]
],
[
[
"Here, we created the selector by passing the data type to include; we then\npassed the input dataset to the selector object, which returned a list of\ncolumn names that have the requested data type. We can now filter out the\nunwanted columns:",
"_____no_output_____"
]
],
[
[
"data_categorical = data[categorical_columns]\ndata_categorical.head()",
"_____no_output_____"
],
[
"print(f\"The dataset is composed of {data_categorical.shape[1]} features\")",
"_____no_output_____"
]
],
[
[
"In the remainder of this section, we will present different strategies to\nencode categorical data into numerical data which can be used by a\nmachine-learning algorithm.",
"_____no_output_____"
],
[
"## Strategies to encode categories\n\n### Encoding ordinal categories\n\nThe most intuitive strategy is to encode each category with a different\nnumber. The `OrdinalEncoder` will transform the data in such manner.\nWe will start by encoding a single column to understand how the encoding\nworks.",
"_____no_output_____"
]
],
[
[
"from sklearn.preprocessing import OrdinalEncoder\n\neducation_column = data_categorical[[\"education\"]]\n\nencoder = OrdinalEncoder()\neducation_encoded = encoder.fit_transform(education_column)\neducation_encoded",
"_____no_output_____"
]
],
[
[
"We see that each category in `\"education\"` has been replaced by a numeric\nvalue. We could check the mapping between the categories and the numerical\nvalues by checking the fitted attribute `categories_`.",
"_____no_output_____"
]
],
[
[
"encoder.categories_",
"_____no_output_____"
]
],
[
[
"Now, we can check the encoding applied on all categorical features.",
"_____no_output_____"
]
],
[
[
"data_encoded = encoder.fit_transform(data_categorical)\ndata_encoded[:5]",
"_____no_output_____"
],
[
"print(\n f\"The dataset encoded contains {data_encoded.shape[1]} features\")",
"_____no_output_____"
]
],
[
[
"We see that the categories have been encoded for each feature (column)\nindependently. We also note that the number of features before and after the\nencoding is the same.\n\nHowever, be careful when applying this encoding strategy:\nusing this integer representation leads downstream predictive models\nto assume that the values are ordered (0 < 1 < 2 < 3... for instance).\n\nBy default, `OrdinalEncoder` uses a lexicographical strategy to map string\ncategory labels to integers. This strategy is arbitrary and often\nmeaningless. For instance, suppose the dataset has a categorical variable\nnamed `\"size\"` with categories such as \"S\", \"M\", \"L\", \"XL\". We would like the\ninteger representation to respect the meaning of the sizes by mapping them to\nincreasing integers such as `0, 1, 2, 3`.\nHowever, the lexicographical strategy used by default would map the labels\n\"S\", \"M\", \"L\", \"XL\" to 2, 1, 0, 3, by following the alphabetical order.\n\nThe `OrdinalEncoder` class accepts a `categories` constructor argument to\npass categories in the expected ordering explicitly. You can find more\ninformation in the\n[scikit-learn documentation](https://scikit-learn.org/stable/modules/preprocessing.html#encoding-categorical-features)\nif needed.\n\nIf a categorical variable does not carry any meaningful order information\nthen this encoding might be misleading to downstream statistical models and\nyou might consider using one-hot encoding instead (see below).\n\n### Encoding nominal categories (without assuming any order)\n\n`OneHotEncoder` is an alternative encoder that prevents the downstream\nmodels to make a false assumption about the ordering of categories. For a\ngiven feature, it will create as many new columns as there are possible\ncategories. For a given sample, the value of the column corresponding to the\ncategory will be set to `1` while all the columns of the other categories\nwill be set to `0`.\n\nWe will start by encoding a single feature (e.g. `\"education\"`) to illustrate\nhow the encoding works.",
"_____no_output_____"
]
],
[
[
"from sklearn.preprocessing import OneHotEncoder\n\nencoder = OneHotEncoder(sparse=False)\neducation_encoded = encoder.fit_transform(education_column)\neducation_encoded",
"_____no_output_____"
]
],
[
[
"<div class=\"admonition note alert alert-info\">\n<p class=\"first admonition-title\" style=\"font-weight: bold;\">Note</p>\n<p><tt class=\"docutils literal\">sparse=False</tt> is used in the <tt class=\"docutils literal\">OneHotEncoder</tt> for didactic purposes, namely\neasier visualization of the data.</p>\n<p class=\"last\">Sparse matrices are efficient data structures when most of your matrix\nelements are zero. They won't be covered in detail in this course. If you\nwant more details about them, you can look at\n<a class=\"reference external\" href=\"https://scipy-lectures.org/advanced/scipy_sparse/introduction.html#why-sparse-matrices\">this</a>.</p>\n</div>",
"_____no_output_____"
],
[
"We see that encoding a single feature will give a NumPy array full of zeros\nand ones. We can get a better understanding using the associated feature\nnames resulting from the transformation.",
"_____no_output_____"
]
],
[
[
"feature_names = encoder.get_feature_names_out(input_features=[\"education\"])\neducation_encoded = pd.DataFrame(education_encoded, columns=feature_names)\neducation_encoded",
"_____no_output_____"
]
],
[
[
"As we can see, each category (unique value) became a column; the encoding\nreturned, for each sample, a 1 to specify which category it belongs to.\n\nLet's apply this encoding on the full dataset.",
"_____no_output_____"
]
],
[
[
"print(\n f\"The dataset is composed of {data_categorical.shape[1]} features\")\ndata_categorical.head()",
"_____no_output_____"
],
[
"data_encoded = encoder.fit_transform(data_categorical)\ndata_encoded[:5]",
"_____no_output_____"
],
[
"print(\n f\"The encoded dataset contains {data_encoded.shape[1]} features\")",
"_____no_output_____"
]
],
[
[
"Let's wrap this NumPy array in a dataframe with informative column names as\nprovided by the encoder object:",
"_____no_output_____"
]
],
[
[
"columns_encoded = encoder.get_feature_names_out(data_categorical.columns)\npd.DataFrame(data_encoded, columns=columns_encoded).head()",
"_____no_output_____"
]
],
[
[
"Look at how the `\"workclass\"` variable of the 3 first records has been\nencoded and compare this to the original string representation.\n\nThe number of features after the encoding is more than 10 times larger than\nin the original data because some variables such as `occupation` and\n`native-country` have many possible categories.",
"_____no_output_____"
],
[
"### Choosing an encoding strategy\n\nChoosing an encoding strategy will depend on the underlying models and the\ntype of categories (i.e. ordinal vs. nominal).",
"_____no_output_____"
],
[
"<div class=\"admonition note alert alert-info\">\n<p class=\"first admonition-title\" style=\"font-weight: bold;\">Note</p>\n<p class=\"last\">In general <tt class=\"docutils literal\">OneHotEncoder</tt> is the encoding strategy used when the\ndownstream models are <strong>linear models</strong> while <tt class=\"docutils literal\">OrdinalEncoder</tt> is often a\ngood strategy with <strong>tree-based models</strong>.</p>\n</div>",
"_____no_output_____"
],
[
"\nUsing an `OrdinalEncoder` will output ordinal categories. This means\nthat there is an order in the resulting categories (e.g. `0 < 1 < 2`). The\nimpact of violating this ordering assumption is really dependent on the\ndownstream models. Linear models will be impacted by misordered categories\nwhile tree-based models will not.\n\nYou can still use an `OrdinalEncoder` with linear models but you need to be\nsure that:\n- the original categories (before encoding) have an ordering;\n- the encoded categories follow the same ordering than the original\n categories.\nThe **next exercise** highlights the issue of misusing `OrdinalEncoder` with\na linear model.\n\nOne-hot encoding categorical variables with high cardinality can cause \ncomputational inefficiency in tree-based models. Because of this, it is not recommended\nto use `OneHotEncoder` in such cases even if the original categories do not \nhave a given order. We will show this in the **final exercise** of this sequence.",
"_____no_output_____"
],
[
"## Evaluate our predictive pipeline\n\nWe can now integrate this encoder inside a machine learning pipeline like we\ndid with numerical data: let's train a linear classifier on the encoded data\nand check the generalization performance of this machine learning pipeline using\ncross-validation.\n\nBefore we create the pipeline, we have to linger on the `native-country`.\nLet's recall some statistics regarding this column.",
"_____no_output_____"
]
],
[
[
"data[\"native-country\"].value_counts()",
"_____no_output_____"
]
],
[
[
"We see that the `Holand-Netherlands` category is occurring rarely. This will\nbe a problem during cross-validation: if the sample ends up in the test set\nduring splitting then the classifier would not have seen the category during\ntraining and will not be able to encode it.\n\nIn scikit-learn, there are two solutions to bypass this issue:\n\n* list all the possible categories and provide it to the encoder via the\n keyword argument `categories`;\n* use the parameter `handle_unknown`.\n\nHere, we will use the latter solution for simplicity.",
"_____no_output_____"
],
[
"<div class=\"admonition tip alert alert-warning\">\n<p class=\"first admonition-title\" style=\"font-weight: bold;\">Tip</p>\n<p class=\"last\">Be aware the <tt class=\"docutils literal\">OrdinalEncoder</tt> exposes as well a parameter\n<tt class=\"docutils literal\">handle_unknown</tt>. It can be set to <tt class=\"docutils literal\">use_encoded_value</tt> and by setting\n<tt class=\"docutils literal\">unknown_value</tt> to handle rare categories. You are going to use these\nparameters in the next exercise.</p>\n</div>",
"_____no_output_____"
],
[
"We can now create our machine learning pipeline.",
"_____no_output_____"
]
],
[
[
"from sklearn.pipeline import make_pipeline\nfrom sklearn.linear_model import LogisticRegression\n\nmodel = make_pipeline(\n OneHotEncoder(handle_unknown=\"ignore\"), LogisticRegression(max_iter=500)\n)",
"_____no_output_____"
]
],
[
[
"<div class=\"admonition note alert alert-info\">\n<p class=\"first admonition-title\" style=\"font-weight: bold;\">Note</p>\n<p class=\"last\">Here, we need to increase the maximum number of iterations to obtain a fully\nconverged <tt class=\"docutils literal\">LogisticRegression</tt> and silence a <tt class=\"docutils literal\">ConvergenceWarning</tt>. Contrary\nto the numerical features, the one-hot encoded categorical features are all\non the same scale (values are 0 or 1), so they would not benefit from\nscaling. In this case, increasing <tt class=\"docutils literal\">max_iter</tt> is the right thing to do.</p>\n</div>",
"_____no_output_____"
],
[
"Finally, we can check the model's generalization performance only using the\ncategorical columns.",
"_____no_output_____"
]
],
[
[
"from sklearn.model_selection import cross_validate\ncv_results = cross_validate(model, data_categorical, target)\ncv_results",
"_____no_output_____"
],
[
"scores = cv_results[\"test_score\"]\nprint(f\"The accuracy is: {scores.mean():.3f} +/- {scores.std():.3f}\")",
"_____no_output_____"
]
],
[
[
"As you can see, this representation of the categorical variables is\nslightly more predictive of the revenue than the numerical variables\nthat we used previously.",
"_____no_output_____"
],
[
"\nIn this notebook we have:\n* seen two common strategies for encoding categorical features: **ordinal\n encoding** and **one-hot encoding**;\n* used a **pipeline** to use a **one-hot encoder** before fitting a logistic\n regression.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
]
] |
d09ca6a31470fa801b98ed90135b6b9904fd7437 | 9,692 | ipynb | Jupyter Notebook | examples/notebooks/plots_boxplots.ipynb | chengevo/statsmodels | c28e6479ace0f0965001c55fb652b2a431bbd158 | [
"BSD-3-Clause"
] | null | null | null | examples/notebooks/plots_boxplots.ipynb | chengevo/statsmodels | c28e6479ace0f0965001c55fb652b2a431bbd158 | [
"BSD-3-Clause"
] | null | null | null | examples/notebooks/plots_boxplots.ipynb | chengevo/statsmodels | c28e6479ace0f0965001c55fb652b2a431bbd158 | [
"BSD-3-Clause"
] | null | null | null | 28.759644 | 121 | 0.531985 | [
[
[
"# Box Plots",
"_____no_output_____"
],
[
"The following illustrates some options for the boxplot in statsmodels. These include `violin_plot` and `bean_plot`.",
"_____no_output_____"
]
],
[
[
"%matplotlib inline\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport statsmodels.api as sm",
"_____no_output_____"
]
],
[
[
"## Bean Plots",
"_____no_output_____"
],
[
"The following example is taken from the docstring of `beanplot`.\n\nWe use the American National Election Survey 1996 dataset, which has Party\nIdentification of respondents as independent variable and (among other\ndata) age as dependent variable.",
"_____no_output_____"
]
],
[
[
"data = sm.datasets.anes96.load_pandas()\nparty_ID = np.arange(7)\nlabels = [\"Strong Democrat\", \"Weak Democrat\", \"Independent-Democrat\",\n \"Independent-Independent\", \"Independent-Republican\",\n \"Weak Republican\", \"Strong Republican\"]",
"_____no_output_____"
]
],
[
[
"Group age by party ID, and create a violin plot with it:",
"_____no_output_____"
]
],
[
[
"plt.rcParams['figure.subplot.bottom'] = 0.23 # keep labels visible\nplt.rcParams['figure.figsize'] = (10.0, 8.0) # make plot larger in notebook\nage = [data.exog['age'][data.endog == id] for id in party_ID]\nfig = plt.figure()\nax = fig.add_subplot(111)\nplot_opts={'cutoff_val':5, 'cutoff_type':'abs',\n 'label_fontsize':'small',\n 'label_rotation':30}\nsm.graphics.beanplot(age, ax=ax, labels=labels,\n plot_opts=plot_opts)\nax.set_xlabel(\"Party identification of respondent.\")\nax.set_ylabel(\"Age\")\n#plt.show()",
"_____no_output_____"
],
[
"def beanplot(data, plot_opts={}, jitter=False):\n \"\"\"helper function to try out different plot options\n \"\"\"\n fig = plt.figure()\n ax = fig.add_subplot(111)\n plot_opts_ = {'cutoff_val':5, 'cutoff_type':'abs',\n 'label_fontsize':'small',\n 'label_rotation':30}\n plot_opts_.update(plot_opts)\n sm.graphics.beanplot(data, ax=ax, labels=labels,\n jitter=jitter, plot_opts=plot_opts_)\n ax.set_xlabel(\"Party identification of respondent.\")\n ax.set_ylabel(\"Age\")",
"_____no_output_____"
],
[
"fig = beanplot(age, jitter=True)",
"_____no_output_____"
],
[
"fig = beanplot(age, plot_opts={'violin_width': 0.5, 'violin_fc':'#66c2a5'})",
"_____no_output_____"
],
[
"fig = beanplot(age, plot_opts={'violin_fc':'#66c2a5'})",
"_____no_output_____"
],
[
"fig = beanplot(age, plot_opts={'bean_size': 0.2, 'violin_width': 0.75, 'violin_fc':'#66c2a5'})",
"_____no_output_____"
],
[
"fig = beanplot(age, jitter=True, plot_opts={'violin_fc':'#66c2a5'})",
"_____no_output_____"
],
[
"fig = beanplot(age, jitter=True, plot_opts={'violin_width': 0.5, 'violin_fc':'#66c2a5'})",
"_____no_output_____"
]
],
[
[
"## Advanced Box Plots",
"_____no_output_____"
],
[
"Based of example script `example_enhanced_boxplots.py` (by Ralf Gommers)",
"_____no_output_____"
]
],
[
[
"import numpy as np\nimport matplotlib.pyplot as plt\n\nimport statsmodels.api as sm\n\n\n# Necessary to make horizontal axis labels fit\nplt.rcParams['figure.subplot.bottom'] = 0.23\n\ndata = sm.datasets.anes96.load_pandas()\nparty_ID = np.arange(7)\nlabels = [\"Strong Democrat\", \"Weak Democrat\", \"Independent-Democrat\",\n \"Independent-Independent\", \"Independent-Republican\",\n \"Weak Republican\", \"Strong Republican\"]\n\n# Group age by party ID.\nage = [data.exog['age'][data.endog == id] for id in party_ID]",
"_____no_output_____"
],
[
"# Create a violin plot.\nfig = plt.figure()\nax = fig.add_subplot(111)\n\nsm.graphics.violinplot(age, ax=ax, labels=labels,\n plot_opts={'cutoff_val':5, 'cutoff_type':'abs',\n 'label_fontsize':'small',\n 'label_rotation':30})\n\nax.set_xlabel(\"Party identification of respondent.\")\nax.set_ylabel(\"Age\")\nax.set_title(\"US national election '96 - Age & Party Identification\")",
"_____no_output_____"
],
[
"# Create a bean plot.\nfig2 = plt.figure()\nax = fig2.add_subplot(111)\n\nsm.graphics.beanplot(age, ax=ax, labels=labels,\n plot_opts={'cutoff_val':5, 'cutoff_type':'abs',\n 'label_fontsize':'small',\n 'label_rotation':30})\n\nax.set_xlabel(\"Party identification of respondent.\")\nax.set_ylabel(\"Age\")\nax.set_title(\"US national election '96 - Age & Party Identification\")",
"_____no_output_____"
],
[
"# Create a jitter plot.\nfig3 = plt.figure()\nax = fig3.add_subplot(111)\n\nplot_opts={'cutoff_val':5, 'cutoff_type':'abs', 'label_fontsize':'small',\n 'label_rotation':30, 'violin_fc':(0.8, 0.8, 0.8),\n 'jitter_marker':'.', 'jitter_marker_size':3, 'bean_color':'#FF6F00',\n 'bean_mean_color':'#009D91'}\nsm.graphics.beanplot(age, ax=ax, labels=labels, jitter=True,\n plot_opts=plot_opts)\n\nax.set_xlabel(\"Party identification of respondent.\")\nax.set_ylabel(\"Age\")\nax.set_title(\"US national election '96 - Age & Party Identification\")",
"_____no_output_____"
],
[
"# Create an asymmetrical jitter plot.\nix = data.exog['income'] < 16 # incomes < $30k\nage = data.exog['age'][ix]\nendog = data.endog[ix]\nage_lower_income = [age[endog == id] for id in party_ID]\n\nix = data.exog['income'] >= 20 # incomes > $50k\nage = data.exog['age'][ix]\nendog = data.endog[ix]\nage_higher_income = [age[endog == id] for id in party_ID]\n\nfig = plt.figure()\nax = fig.add_subplot(111)\n\nplot_opts['violin_fc'] = (0.5, 0.5, 0.5)\nplot_opts['bean_show_mean'] = False\nplot_opts['bean_show_median'] = False\nplot_opts['bean_legend_text'] = 'Income < \\$30k'\nplot_opts['cutoff_val'] = 10\nsm.graphics.beanplot(age_lower_income, ax=ax, labels=labels, side='left',\n jitter=True, plot_opts=plot_opts)\nplot_opts['violin_fc'] = (0.7, 0.7, 0.7)\nplot_opts['bean_color'] = '#009D91'\nplot_opts['bean_legend_text'] = 'Income > \\$50k'\nsm.graphics.beanplot(age_higher_income, ax=ax, labels=labels, side='right',\n jitter=True, plot_opts=plot_opts)\n\nax.set_xlabel(\"Party identification of respondent.\")\nax.set_ylabel(\"Age\")\nax.set_title(\"US national election '96 - Age & Party Identification\")\n\n\n# Show all plots.\n#plt.show()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09cae1476c510574e20bd32fba6ed0568ec86f7 | 211,044 | ipynb | Jupyter Notebook | Keras-DeepRecommender-Clothing-Shoes-Jewelry/2_Modeling/matrix_facto_10_embeddings_100_epochs.ipynb | zirubak/dse260-CapStone-Amazon | 4c90c1cf6979d837af7eba1c9806da5f48b7655a | [
"Apache-2.0"
] | 1 | 2020-02-17T21:15:00.000Z | 2020-02-17T21:15:00.000Z | Keras-DeepRecommender-Clothing-Shoes-Jewelry/2_Modeling/matrix_facto_10_embeddings_100_epochs.ipynb | zirubak/dse260-CapStone-Amazon | 4c90c1cf6979d837af7eba1c9806da5f48b7655a | [
"Apache-2.0"
] | null | null | null | Keras-DeepRecommender-Clothing-Shoes-Jewelry/2_Modeling/matrix_facto_10_embeddings_100_epochs.ipynb | zirubak/dse260-CapStone-Amazon | 4c90c1cf6979d837af7eba1c9806da5f48b7655a | [
"Apache-2.0"
] | 3 | 2020-02-11T08:03:10.000Z | 2020-07-28T16:48:04.000Z | 173.555921 | 172,932 | 0.893439 | [
[
[
"# TEST for matrix_facto_10_embeddings_100_epochs\n\n# Deep recommender on top of Amason’s Clean Clothing Shoes and Jewelry explicit rating dataset\n\nFrame the recommendation system as a rating prediction machine learning problem and create a hybrid architecture that mixes the collaborative and content based filtering approaches:\n- Collaborative part: Predict items ratings in order to recommend to the user items that he is likely to rate high.\n- Content based: use metadata inputs (such as price and title) about items to find similar items to recommend.\n\n### - Create 2 explicit recommendation engine models based on 2 machine learning architecture using Keras: \n 1. a matrix factorization model \n 2. a deep neural network model.\n\n\n### Compare the results of the different models and configurations to find the \"best\" predicting model\n\n### Used the best model for recommending items to users",
"_____no_output_____"
]
],
[
[
"### name of model\nmodname = 'matrix_facto_10_embeddings_100_epochs'\n\n### number of epochs\nnum_epochs = 100\n\n### size of embedding\nembedding_size = 10",
"_____no_output_____"
],
[
"# import sys\n\n# !{sys.executable} -m pip install --upgrade pip\n# !{sys.executable} -m pip install sagemaker-experiments\n# !{sys.executable} -m pip install pandas\n# !{sys.executable} -m pip install numpy\n# !{sys.executable} -m pip install matplotlib\n# !{sys.executable} -m pip install boto3\n# !{sys.executable} -m pip install sagemaker\n# !{sys.executable} -m pip install pyspark\n# !{sys.executable} -m pip install ipython-autotime\n# !{sys.executable} -m pip install surprise\n# !{sys.executable} -m pip install smart_open\n# !{sys.executable} -m pip install pyarrow\n# !{sys.executable} -m pip install fastparquet",
"_____no_output_____"
],
[
"# Check Jave version \n# !sudo yum -y update",
"_____no_output_____"
],
[
"# # Need to use Java 1.8.0\n# !sudo yum remove jre-1.7.0-openjdk -y",
"_____no_output_____"
],
[
"!java -version",
"openjdk version \"11.0.1\" 2018-10-16 LTS\nOpenJDK Runtime Environment Zulu11.2+3 (build 11.0.1+13-LTS)\nOpenJDK 64-Bit Server VM Zulu11.2+3 (build 11.0.1+13-LTS, mixed mode)\n"
],
[
"# !sudo update-alternatives --config java",
"_____no_output_____"
],
[
"# !pip install pyarrow fastparquet\n# !pip install ipython-autotime\n# !pip install tqdm pydot pydotplus pydot_ng",
"_____no_output_____"
],
[
"#### To measure all running time\n# https://github.com/cpcloud/ipython-autotime\n\n%load_ext autotime",
"_____no_output_____"
],
[
"%pylab inline\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n%matplotlib inline\nimport re\nimport seaborn as sbn\nimport nltk\nimport tqdm as tqdm\nimport sqlite3\nimport pandas as pd\nimport numpy as np\nfrom pandas import DataFrame \nimport string\nimport pydot \nimport pydotplus\nimport pydot_ng\nimport pickle\nimport time\nimport gzip\nimport os\nos.getcwd()\n \nimport matplotlib.pyplot as plt\nfrom math import floor,ceil\n\n#from nltk.corpus import stopwords\n#stop = stopwords.words(\"english\")\nfrom nltk.stem.porter import PorterStemmer\nenglish_stemmer=nltk.stem.SnowballStemmer('english')\nfrom nltk.tokenize import word_tokenize\n\nfrom sklearn.metrics import accuracy_score, confusion_matrix,roc_curve, auc,classification_report, mean_squared_error, mean_absolute_error\nfrom sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\nfrom sklearn.model_selection import train_test_split\nfrom sklearn import metrics\nfrom sklearn.svm import LinearSVC\nfrom sklearn.neighbors import NearestNeighbors\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn import neighbors\nfrom scipy.spatial.distance import cosine\nfrom sklearn.feature_selection import SelectKBest\nfrom IPython.display import SVG\n\n# Tensorflow\nimport tensorflow as tf\n\n#Keras\nfrom keras.models import Sequential, Model, load_model, save_model\nfrom keras.callbacks import ModelCheckpoint\nfrom keras.layers import Dense, Activation, Dropout, Input, Masking, TimeDistributed, LSTM, Conv1D, Embedding\nfrom keras.layers import GRU, Bidirectional, BatchNormalization, Reshape\nfrom keras.optimizers import Adam\nfrom keras.layers.core import Reshape, Dropout, Dense\nfrom keras.layers.merge import Multiply, Dot, Concatenate\nfrom keras.layers.embeddings import Embedding\nfrom keras import optimizers\nfrom keras.callbacks import ModelCheckpoint\nfrom keras.utils.vis_utils import model_to_dot",
"Populating the interactive namespace from numpy and matplotlib\nWARNING:tensorflow:From /home/ec2-user/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/tensorflow_core/__init__.py:1467: The name tf.estimator.inputs is deprecated. Please use tf.compat.v1.estimator.inputs instead.\n\ntime: 3.5 s\n"
]
],
[
[
"### Set and Check GPUs",
"_____no_output_____"
]
],
[
[
"#Session\nfrom keras import backend as K\n\ndef set_check_gpu():\n cfg = K.tf.ConfigProto()\n cfg.gpu_options.per_process_gpu_memory_fraction =1 # allow all of the GPU memory to be allocated\n # for 8 GPUs\n # cfg.gpu_options.visible_device_list = \"0,1,2,3,4,5,6,7\" # \"0,1\"\n # for 1 GPU\n cfg.gpu_options.visible_device_list = \"0\"\n #cfg.gpu_options.allow_growth = True # # Don't pre-allocate memory; dynamically allocate the memory used on the GPU as-needed\n #cfg.log_device_placement = True # to log device placement (on which device the operation ran)\n sess = K.tf.Session(config=cfg)\n K.set_session(sess) # set this TensorFlow session as the default session for Keras\n\n print(\"* TF version: \", [tf.__version__, tf.test.is_gpu_available()])\n print(\"* List of GPU(s): \", tf.config.experimental.list_physical_devices() )\n print(\"* Num GPUs Available: \", len(tf.config.experimental.list_physical_devices('GPU'))) \n \n \n os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\";\n # set for 8 GPUs\n# os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0,1,2,3,4,5,6,7\";\n # set for 1 GPU\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\";\n\n # Tf debugging option\n tf.debugging.set_log_device_placement(True)\n\n gpus = tf.config.experimental.list_physical_devices('GPU')\n\n if gpus:\n try:\n # Currently, memory growth needs to be the same across GPUs\n for gpu in gpus:\n tf.config.experimental.set_memory_growth(gpu, True)\n logical_gpus = tf.config.experimental.list_logical_devices('GPU')\n print(len(gpus), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\")\n except RuntimeError as e:\n # Memory growth must be set before GPUs have been initialized\n print(e)\n\n# print(tf.config.list_logical_devices('GPU'))\n print(tf.config.experimental.list_physical_devices('GPU'))\n print(\"Num GPUs Available: \", len(tf.config.experimental.list_physical_devices('GPU')))",
"time: 6.19 ms\n"
],
[
"set_check_gpu()",
"* TF version: ['1.15.2', True]\n* List of GPU(s): [PhysicalDevice(name='/physical_device:CPU:0', device_type='CPU'), PhysicalDevice(name='/physical_device:XLA_CPU:0', device_type='XLA_CPU'), PhysicalDevice(name='/physical_device:XLA_GPU:0', device_type='XLA_GPU'), PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]\n* Num GPUs Available: 1\n1 Physical GPUs, 1 Logical GPUs\n[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]\nNum GPUs Available: 1\ntime: 188 ms\n"
],
[
"# reset GPU memory& Keras Session\ndef reset_keras():\n try:\n del classifier\n del model \n except:\n pass\n\n K.clear_session()\n \n K.get_session().close()\n# sess = K.get_session()\n\n cfg = K.tf.ConfigProto()\n cfg.gpu_options.per_process_gpu_memory_fraction \n# cfg.gpu_options.visible_device_list = \"0,1,2,3,4,5,6,7\" # \"0,1\"\n cfg.gpu_options.visible_device_list = \"0\" # \"0,1\"\n cfg.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU\n\n sess = K.tf.Session(config=cfg)\n K.set_session(sess) # set this TensorFlow session as the default session for Keras\n ",
"time: 2.51 ms\n"
]
],
[
[
"## Load dataset and analysis using Spark",
"_____no_output_____"
],
[
"## Download and prepare Data:\n#### 1. Read the data:\n#### Read the data from the reviews dataset of amazon. \n#### Use the dastaset in which all users and items have at least 5 reviews. \n\n### Location of dataset: https://nijianmo.github.io/amazon/index.html",
"_____no_output_____"
]
],
[
[
"import pandas as pd\n\nimport boto3\nimport sagemaker\nfrom sagemaker import get_execution_role\nfrom sagemaker.session import Session\nfrom sagemaker.analytics import ExperimentAnalytics\n\nimport gzip\nimport json\n\nfrom pyspark.ml import Pipeline\nfrom pyspark.sql.types import StructField, StructType, StringType, DoubleType\nfrom pyspark.ml.feature import StringIndexer, VectorIndexer, OneHotEncoder, VectorAssembler\nfrom pyspark.sql.functions import *\n\n# spark imports\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql.functions import UserDefinedFunction, explode, desc\nfrom pyspark.sql.types import StringType, ArrayType\nfrom pyspark.ml.evaluation import RegressionEvaluator\n\nimport os\nimport pandas as pd\nimport pyarrow\nimport fastparquet\n\n# from pandas_profiling import ProfileReport",
"time: 553 ms\n"
],
[
"# !aws s3 cp s3://dse-cohort5-group1/2-Keras-DeepRecommender/dataset/Clean_Clothing_Shoes_and_Jewelry_5_clean.parquet ./data/",
"time: 1.05 ms\n"
],
[
"!ls -alh ./data",
"total 3.3G\ndrwxrwxr-x 5 ec2-user ec2-user 4.0K May 26 16:08 .\ndrwxrwxr-x 8 ec2-user ec2-user 4.0K May 26 19:47 ..\n-rw-rw-r-- 1 ec2-user ec2-user 308M May 26 15:35 Clean_Clothing_Shoes_and_Jewelry_5_clean.parquet\ndrwxrwxr-x 2 ec2-user ec2-user 4.0K May 26 15:46 Cleaned_meta_Clothing_Shoes_and_Jewelry.parquet\n-rw-rw-r-- 1 ec2-user ec2-user 1.2G Nov 21 2019 Clothing_Shoes_and_Jewelry_5.json.gz\ndrwxrwxr-x 2 ec2-user ec2-user 4.0K May 26 15:34 Clothing_Shoes_and_Jewelry_5.parquet\n-rw-rw-r-- 1 ec2-user ec2-user 31 May 26 15:34 for_dataset.txt\ndrwxrwxr-x 2 ec2-user ec2-user 4.0K May 26 15:34 .ipynb_checkpoints\n-rw-rw-r-- 1 ec2-user ec2-user 1.5G Oct 15 2019 meta_Clothing_Shoes_and_Jewelry.json.gz\n-rw-rw-r-- 1 ec2-user ec2-user 71M May 26 16:08 ratings_test.parquet\n-rw-rw-r-- 1 ec2-user ec2-user 282M May 26 16:08 ratings_train.parquet\ntime: 131 ms\n"
]
],
[
[
"### Read clened dataset from parquet files",
"_____no_output_____"
]
],
[
[
"review_data = pd.read_parquet(\"./data/Clean_Clothing_Shoes_and_Jewelry_5_clean.parquet\")",
"time: 4.25 s\n"
],
[
"review_data[:3]",
"_____no_output_____"
],
[
"review_data.shape",
"_____no_output_____"
]
],
[
[
"### 2. Arrange and clean the data",
"_____no_output_____"
],
[
"Rearrange the columns by relevance and rename column names",
"_____no_output_____"
]
],
[
[
"review_data.columns",
"_____no_output_____"
],
[
"review_data = review_data[['asin', 'image', 'summary', 'reviewText', 'overall', 'reviewerID', 'reviewerName', 'reviewTime']]\n\nreview_data.rename(columns={ 'overall': 'score','reviewerID': 'user_id', 'reviewerName': 'user_name'}, inplace=True)\n\n#the variables names after rename in the modified data frame\nlist(review_data)",
"_____no_output_____"
]
],
[
[
"# Add Metadata \n\n### Metadata includes descriptions, price, sales-rank, brand info, and co-purchasing links\n- asin - ID of the product, e.g. 0000031852\n- title - name of the product\n- price - price in US dollars (at time of crawl)\n- imUrl - url of the product image\n- related - related products (also bought, also viewed, bought together, buy after viewing)\n- salesRank - sales rank information\n- brand - brand name\n- categories - list of categories the product belongs to",
"_____no_output_____"
]
],
[
[
"# !aws s3 cp s3://dse-cohort5-group1/2-Keras-DeepRecommender/dataset/Cleaned_meta_Clothing_Shoes_and_Jewelry.parquet ./data/",
"_____no_output_____"
],
[
"all_info = pd.read_parquet(\"./data/Cleaned_meta_Clothing_Shoes_and_Jewelry.parquet\")",
"_____no_output_____"
],
[
"all_info.head(n=5)",
"_____no_output_____"
]
],
[
[
"### Arrange and clean the data",
"_____no_output_____"
],
[
"- Cleaning, handling missing data, normalization, etc:\n- For the algorithm in keras to work, remap all item_ids and user_ids to an interger between 0 and the total number of users or the total number of items",
"_____no_output_____"
]
],
[
[
"all_info.columns",
"_____no_output_____"
],
[
"items = all_info.asin.unique()\nitem_map = {i:val for i,val in enumerate(items)}\ninverse_item_map = {val:i for i,val in enumerate(items)}\nall_info[\"old_item_id\"] = all_info[\"asin\"] # copying for join with metadata\nall_info[\"item_id\"] = all_info[\"asin\"].map(inverse_item_map)\nitems = all_info.item_id.unique()\nprint (\"We have %d unique items in metadata \"%items.shape[0])",
"_____no_output_____"
],
[
"all_info['description'] = all_info['description'].fillna(all_info['title'].fillna('no_data'))\nall_info['title'] = all_info['title'].fillna(all_info['description'].fillna('no_data').apply(str).str[:20])\nall_info['image'] = all_info['image'].fillna('no_data')\nall_info['price'] = pd.to_numeric(all_info['price'],errors=\"coerce\")\nall_info['price'] = all_info['price'].fillna(all_info['price'].median()) ",
"_____no_output_____"
],
[
"users = review_data.user_id.unique()\nuser_map = {i:val for i,val in enumerate(users)}\ninverse_user_map = {val:i for i,val in enumerate(users)}\nreview_data[\"old_user_id\"] = review_data[\"user_id\"] \nreview_data[\"user_id\"] = review_data[\"user_id\"].map(inverse_user_map)\n\nitems_reviewed = review_data.asin.unique()\nreview_data[\"old_item_id\"] = review_data[\"asin\"] # copying for join with metadata\nreview_data[\"item_id\"] = review_data[\"asin\"].map(inverse_item_map)\n\nitems_reviewed = review_data.item_id.unique()\nusers = review_data.user_id.unique()",
"_____no_output_____"
],
[
"print (\"We have %d unique users\"%users.shape[0])\nprint (\"We have %d unique items reviewed\"%items_reviewed.shape[0])\n# We have 192403 unique users in the \"small\" dataset\n# We have 63001 unique items reviewed in the \"small\" dataset",
"_____no_output_____"
],
[
"review_data.head(3)",
"_____no_output_____"
]
],
[
[
"## Adding the review count and avarage to the metadata",
"_____no_output_____"
]
],
[
[
"#items_nb = review_data['old_item_id'].value_counts().reset_index()\nitems_avg = review_data.drop(['summary','reviewText','user_id','asin','user_name','reviewTime','old_user_id','item_id'],axis=1).groupby('old_item_id').agg(['count','mean']).reset_index()\nitems_avg.columns= ['old_item_id','num_ratings','avg_rating']\n#items_avg.head(5)\nitems_avg['num_ratings'].describe()",
"_____no_output_____"
],
[
"all_info = pd.merge(all_info,items_avg,how='left',left_on='asin',right_on='old_item_id')\npd.set_option('display.max_colwidth', 100)\nall_info.head(2)",
"_____no_output_____"
]
],
[
[
"# Explicit feedback (Reviewed Dataset) Recommender System",
"_____no_output_____"
],
[
"### Explicit feedback is when users gives voluntarily the rating information on what they like and dislike.\n\n- In this case, I have explicit item ratings ranging from one to five.\n- Framed the recommendation system as a rating prediction machine learning problem: \n - Predict an item's ratings in order to be able to recommend to a user an item that he is likely to rate high if he buys it. `\n\n### To evaluate the model, I randomly separate the data into a training and test set. ",
"_____no_output_____"
]
],
[
[
"ratings_train, ratings_test = train_test_split( review_data, test_size=0.1, random_state=0)",
"_____no_output_____"
],
[
"ratings_train.shape",
"_____no_output_____"
],
[
"ratings_test.shape",
"_____no_output_____"
]
],
[
[
"## Adding Metadata to the train set\nCreate an architecture that mixes the collaborative and content based filtering approaches:\n```\n- Collaborative Part: Predict items ratings to recommend to the user items which he is likely to rate high according to learnt item & user embeddings (learn similarity from interactions).\n- Content based part: Use metadata inputs (such as price and title) about items to recommend to the user contents similar to those he rated high (learn similarity of item attributes).\n```\n\n#### Adding the title and price - Add the metadata of the items in the training and test datasets.",
"_____no_output_____"
]
],
[
[
"# # creating metadata mappings \n# titles = all_info['title'].unique()\n# titles_map = {i:val for i,val in enumerate(titles)}\n# inverse_titles_map = {val:i for i,val in enumerate(titles)}\n\n# price = all_info['price'].unique()\n# price_map = {i:val for i,val in enumerate(price)}\n# inverse_price_map = {val:i for i,val in enumerate(price)}\n\n# print (\"We have %d prices\" %price.shape)\n# print (\"We have %d titles\" %titles.shape)\n\n\n# all_info['price_id'] = all_info['price'].map(inverse_price_map)\n# all_info['title_id'] = all_info['title'].map(inverse_titles_map)\n\n# # creating dict from \n# item2prices = {}\n# for val in all_info[['item_id','price_id']].dropna().drop_duplicates().iterrows():\n# item2prices[val[1][\"item_id\"]] = val[1][\"price_id\"]\n\n# item2titles = {}\n# for val in all_info[['item_id','title_id']].dropna().drop_duplicates().iterrows():\n# item2titles[val[1][\"item_id\"]] = val[1][\"title_id\"]\n \n\n\n# # populating the rating dataset with item metadata info\n# ratings_train[\"price_id\"] = ratings_train[\"item_id\"].map(lambda x : item2prices[x])\n# ratings_train[\"title_id\"] = ratings_train[\"item_id\"].map(lambda x : item2titles[x])\n\n\n# # populating the test dataset with item metadata info\n# ratings_test[\"price_id\"] = ratings_test[\"item_id\"].map(lambda x : item2prices[x])\n# ratings_test[\"title_id\"] = ratings_test[\"item_id\"].map(lambda x : item2titles[x])\n",
"_____no_output_____"
]
],
[
[
"## create rating train/test dataset and upload into S3",
"_____no_output_____"
]
],
[
[
"# !aws s3 cp s3://dse-cohort5-group1/2-Keras-DeepRecommender/dataset/ratings_test.parquet ./data/\n# !aws s3 cp s3://dse-cohort5-group1/2-Keras-DeepRecommender/dataset/ratings_train.parquet ./data/",
"_____no_output_____"
],
[
"ratings_test = pd.read_parquet('./data/ratings_test.parquet')\nratings_train = pd.read_parquet('./data/ratings_train.parquet')",
"_____no_output_____"
],
[
"ratings_train[:3]",
"_____no_output_____"
],
[
"ratings_train.shape",
"_____no_output_____"
]
],
[
[
"# **Define embeddings\n### The $\\underline{embeddings}$ are low-dimensional hidden representations of users and items, \n### i.e. for each item I can find its properties and for each user I can encode how much they like those properties so I can determine attitudes or preferences of users by a small number of hidden factors \n\n### Throughout the training, I learn two new low-dimensional dense representations: one embedding for the users and another one for the items.\n",
"_____no_output_____"
]
],
[
[
"price = all_info['price'].unique()\ntitles = all_info['title'].unique()",
"_____no_output_____"
]
],
[
[
"# 1. Matrix factorization approach\n",
"_____no_output_____"
]
],
[
[
"# declare input embeddings to the model\n# User input\nuser_id_input = Input(shape=[1], name='user')\n# Item Input\nitem_id_input = Input(shape=[1], name='item')\nprice_id_input = Input(shape=[1], name='price')\ntitle_id_input = Input(shape=[1], name='title')\n\n# define the size of embeddings as a parameter\n# Check 5, 10 , 15, 20, 50\nuser_embedding_size = embedding_size \nitem_embedding_size = embedding_size\nprice_embedding_size = embedding_size\ntitle_embedding_size = embedding_size\n\n# apply an embedding layer to all inputs\nuser_embedding = Embedding(output_dim=user_embedding_size, input_dim=users.shape[0],\n input_length=1, name='user_embedding')(user_id_input)\n\nitem_embedding = Embedding(output_dim=item_embedding_size, input_dim=items_reviewed.shape[0],\n input_length=1, name='item_embedding')(item_id_input)\n\nprice_embedding = Embedding(output_dim=price_embedding_size, input_dim=price.shape[0],\n input_length=1, name='price_embedding')(price_id_input)\n\ntitle_embedding = Embedding(output_dim=title_embedding_size, input_dim=titles.shape[0],\n input_length=1, name='title_embedding')(title_id_input)\n\n# reshape from shape (batch_size, input_length,embedding_size) to (batch_size, embedding_size). \nuser_vecs = Reshape([user_embedding_size])(user_embedding)\nitem_vecs = Reshape([item_embedding_size])(item_embedding)\nprice_vecs = Reshape([price_embedding_size])(price_embedding)\ntitle_vecs = Reshape([title_embedding_size])(title_embedding)",
"_____no_output_____"
]
],
[
[
"### Matrix Factorisation works on the principle that we can learn the user and the item embeddings, and then predict the rating for each user-item by performing a dot (or scalar) product between the respective user and item embedding. \n\n",
"_____no_output_____"
]
],
[
[
"# Applying matrix factorization: declare the output as being the dot product between the two embeddings: items and users\ny = Dot(1, normalize=False)([user_vecs, item_vecs])",
"_____no_output_____"
],
[
"!mkdir -p ./models",
"_____no_output_____"
],
[
"# create model\nmodel = Model(inputs=\n [\n user_id_input,\n item_id_input\n ], \n outputs=y)\n\n# compile model\nmodel.compile(loss='mse',\n optimizer=\"adam\" )\n\n# set save location for model\nsave_path = \"./models\"\nthename = save_path + '/' + modname + '.h5'\nmcheck = ModelCheckpoint(thename, monitor='val_loss', save_best_only=True)\n\n# fit model\nhistory = model.fit([ratings_train[\"user_id\"]\n , ratings_train[\"item_id\"]\n ]\n , ratings_train[\"score\"]\n , batch_size=64\n , epochs=num_epochs\n , validation_split=0.2\n , callbacks=[mcheck]\n , shuffle=True)",
"_____no_output_____"
],
[
"# Save the fitted model history to a file\nwith open('./histories/' + modname + '.pkl' , 'wb') as file_pi: pickle.dump(history.history, file_pi)\n \nprint(\"Save history in \", './histories/' + modname + '.pkl')",
"_____no_output_____"
],
[
"def disp_model(path,file,suffix):\n model = load_model(path+file+suffix) \n ## Summarise the model \n model.summary() \n # Extract the learnt user and item embeddings, i.e., a table with number of items and users rows and columns, with number of columns is the dimension of the trained embedding.\n # In our case, the embeddings correspond exactly to the weights of the model:\n weights = model.get_weights()\n print (\"embeddings \\ weights shapes\",[w.shape for w in weights]) \n return model\n \nmodel_path = \"./models/\"",
"_____no_output_____"
],
[
"def plt_pickle(path,file,suffix):\n with open(path+file+suffix , 'rb') as file_pi: \n thepickle= pickle.load(file_pi)\n plot(thepickle[\"loss\"],label ='Train Error ' + file,linestyle=\"--\")\n plot(thepickle[\"val_loss\"],label='Validation Error ' + file) \n plt.legend()\n plt.xlabel(\"Epoch\")\n plt.ylabel(\"Error\") \n ##plt.ylim(0, 0.1)\n return pd.DataFrame(thepickle,columns =['loss','val_loss'])\n\nhist_path = \"./histories/\"",
"_____no_output_____"
],
[
"model=disp_model(model_path, modname, '.h5')",
"_____no_output_____"
],
[
"# Display the model using keras\nSVG(model_to_dot(model).create(prog='dot', format='svg'))",
"_____no_output_____"
],
[
"x=plt_pickle(hist_path, modname, '.pkl')\nx.head(20).transpose()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09cb197f62ad88d529eb8ec437d4ddcf2ac4c99 | 3,673 | ipynb | Jupyter Notebook | DFS/1211/1080. Insufficient Nodes in Root to Leaf Paths.ipynb | YuHe0108/Leetcode | 90d904dde125dd35ee256a7f383961786f1ada5d | [
"Apache-2.0"
] | 1 | 2020-08-05T11:47:47.000Z | 2020-08-05T11:47:47.000Z | DFS/1211/1080. Insufficient Nodes in Root to Leaf Paths.ipynb | YuHe0108/LeetCode | b9e5de69b4e4d794aff89497624f558343e362ad | [
"Apache-2.0"
] | null | null | null | DFS/1211/1080. Insufficient Nodes in Root to Leaf Paths.ipynb | YuHe0108/LeetCode | b9e5de69b4e4d794aff89497624f558343e362ad | [
"Apache-2.0"
] | null | null | null | 29.150794 | 81 | 0.463654 | [
[
[
"class TreeNode:\n def __init__(self, x):\n self.val = x\n self.left = None\n self.right = None\n\nclass Solution:\n\n def __dfs(self, node, s, limit):\n \"\"\"\n 后序遍历\n :param node: 当前遍历的结点\n :param s: 当前累计的和\n :param limit: 题目中给出的 limit\n :return: 是否要删除 node 这个结点,True 表示要删除,False 表示不删除\n \"\"\"\n # 先写递归终止条件:如果小于 limit,根据题意,要删除\n if node.left is None and node.right is None:\n return s + node.val < limit\n\n # 默认为左右结点均剪枝,注意:初值不能设置成 False\n l_tree_deleted = True\n r_tree_deleted = True\n\n # 如果有左子树,就先递归处理左子树\n if node.left:\n l_tree_deleted = self.__dfs(node.left, s + node.val, limit)\n\n # 如果有右子树,就先递归处理右子树\n if node.right:\n r_tree_deleted = self.__dfs(node.right, s + node.val, limit)\n\n # 左右子树是否删除的结论得到了,由自己来执行是否删除它们\n if l_tree_deleted:\n node.left = None\n if r_tree_deleted:\n node.right = None\n\n # 只有左右子树都被删除了,自己才没有必要保留\n return l_tree_deleted and r_tree_deleted\n\n def sufficientSubset(self, root: TreeNode, limit: int) -> TreeNode:\n root_deleted = self.__dfs(root, 0, limit)\n if root_deleted:\n return None\n return root",
"_____no_output_____"
],
[
"class Solution:\n def dfs(self, node, s, limit):\n \"\"\"\n 后序遍历\n :param node: 当前遍历的结点\n :param s: 当前累计的和\n :param limit: 题目中给出的 limit\n :return: 是否要删除 node 这个结点,True 表示要删除,False 表示不删除\n \"\"\"\n if node.left is None and node.right is None: # 判断当前的节点是否需要被删除\n return s + node.val < limit\n \n l_tree_delete = True\n r_tree_delete = True\n if node.left:\n l_tree_delete = dfs(node.left, s + node.val, limit)\n if node.right:\n r_tree_delete = dfs(node.right, s + node.val, limit)\n \n if l_tree_delete:\n node.left = None\n if r_tree_delete:\n node.right = None\n return l_tree_delete and r_tree_delete\n \n \n def sufficientSubset(self, root, limit: int):\n root_deleted = self.dfs(root, 0, limit)\n if root_deleted:\n return None\n return root",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code"
]
] |
d09cb1ef31676bee26f699c75666198debede4a3 | 1,604 | ipynb | Jupyter Notebook | Untitled1.ipynb | shashinmishra/machine_translation_of_dates | 234168f6751ea8d68f2d94144d6fc540d50fbd95 | [
"MIT"
] | null | null | null | Untitled1.ipynb | shashinmishra/machine_translation_of_dates | 234168f6751ea8d68f2d94144d6fc540d50fbd95 | [
"MIT"
] | null | null | null | Untitled1.ipynb | shashinmishra/machine_translation_of_dates | 234168f6751ea8d68f2d94144d6fc540d50fbd95 | [
"MIT"
] | null | null | null | 20.303797 | 60 | 0.513092 | [
[
[
"import tensorflow as tf\nimport keras as K\nimport pydot\nimport numpy as np\nimport babel as bb",
"Using TensorFlow backend.\n"
],
[
"print (\"Keras Version is: \", K.__version__)\nprint (\"Tensorflow Version is: \", tf.__version__)\nprint(\"Pydot Version is: \", pydot.__version__)\nprint(\"Numpy Version is: \", np.__version__)\nprint(\"Babel Version is: \", bb.__version__)",
"Keras Version is: 2.0.7\nTensorflow Version is: 1.2.1\nPydot Version is: 1.2.4\nNumpy Version is: 1.11.3\nBabel Version is: 2.5.3\n"
]
]
] | [
"code"
] | [
[
"code",
"code"
]
] |
d09cb4420866a09b42f76bf9bb8e9d70c208d508 | 8,751 | ipynb | Jupyter Notebook | patch/webscaping.ipynb | patchikoooo/data-science-from-scratch | 500ee3077a5625a0bdc502a53e5c3b860b1eebec | [
"MIT"
] | null | null | null | patch/webscaping.ipynb | patchikoooo/data-science-from-scratch | 500ee3077a5625a0bdc502a53e5c3b860b1eebec | [
"MIT"
] | null | null | null | patch/webscaping.ipynb | patchikoooo/data-science-from-scratch | 500ee3077a5625a0bdc502a53e5c3b860b1eebec | [
"MIT"
] | null | null | null | 41.473934 | 2,712 | 0.621758 | [
[
[
"from bs4 import BeautifulSoup\nimport requests\n\nhtml = requests.get(\"https://www.oreilly.com/\").text\nsoup = BeautifulSoup(html, 'html5lib')",
"_____no_output_____"
],
[
"# get first paragraph\nprint(soup.p)",
"<p class=\"mobileHidden\">Build skills. <span class=\"nowrap\">Solve problems.</span></p>\n"
],
[
"print(soup.p.text) # print text of the first paragraph\nprint(soup.p.text.split()) # print words in list of the first paragraph",
"Build skills. Solve problems.\n['Build', 'skills.', 'Solve', 'problems.']\n"
],
[
"# extract a tag's attributes by treating it like a dict\nprint(soup.p['class']) # raises KeyError if attribute not found\nprint(soup.p.get('class')) # returns None if no id",
"['mobileHidden']\n['mobileHidden']\n"
],
[
"# get multiple tags at once\nprint(soup('p')) # or soup.findall('p')\nprint(\"\\n\\n\")\n\n# getting tags with existing attributes|\nparagraphs_with_ids = [p for p in soup('p') if p.get('class')]\nprint(paragraphs_with_ids)",
"[<p class=\"mobileHidden\">Build skills. <span class=\"nowrap\">Solve problems.</span></p>, <p>New expert playlists: Collections of handpicked content from <span class=\"nowrap\">industry leaders</span></p>, <p>OâReilly online learning gives your team the knowledge they need to stay ahead with on-demand access to the latest OâReilly books, videos, and live training. Build skills with learning paths, live online courses, and collections of content selected by expertsâor solve a problem quickly through books and videos. Follow your organizationâs progress and go in-depth with reporting and insights tools. OâReilly members: Explore all our <a class=\"nowrap\" href=\"https://learning.oreilly.com/playlists/discover/\">expert playlists here.</a></p>, <p>Give your team the knowledge they need to stay ahead with on-demand access to the latest OâReilly books, videos, and live training courses through OâReilly online learning.</p>, <p>Build skills with learning paths and live training courses or solve a problem quickly through books and videos. Follow your organizationâs progress and go in depth with reporting and insights tools. <a class=\"textCTA-small features-seeAll\" href=\"https://www.oreilly.com/online-learning/features.html\">See all features</a></p>, <p class=\"conferences-detail-tagline\">Cover the full scope of <span class=\"nowrap\">software architecture.</span></p>, <p class=\"conferences-detail-location\">New York, NY</p>, <p class=\"conferences-detail-dates\">February 23â26, 2020</p>, <p class=\"conferences-detail-presenter\">Maggie Carroll, senior engineer, MAG Aerospace</p>, <p class=\"conferences-detail-tagline\">Build for tomorrowâwithout interrupting the <span class=\"nowrap\">day-to-day.</span></p>, <p class=\"conferences-detail-location\">Santa Clara, CA</p>, <p class=\"conferences-detail-dates\">June 15â18, 2020</p>, <p class=\"conferences-detail-presenter\">Sam Newman, Technologist</p>, <p>OâReilly learning provides individuals, teams, and businesses with expert-created and curated information covering all the areas that will shape our futureâincluding artificial intelligence, operations, data, UX design, finance, leadership, and more.</p>, <p>Take O’Reilly online learning with you and learn anywhere, anytime on your phone or tablet. Download the app <span class=\"nowrap\">today and:</span></p>, <p>© 2019, O’Reilly Media, Inc. All trademarks and registered trademarks appearing on oreilly.com are the property of their respective owners.</p>, <p><a href=\"/terms/\">Terms of Service</a> ⢠<a href=\"/privacy.html\">Privacy Policy</a> ⢠<a href=\"/about/editorial_independence.html\">Editorial Independence</a></p>]\n\n\n\n[<p class=\"mobileHidden\">Build skills. <span class=\"nowrap\">Solve problems.</span></p>, <p class=\"conferences-detail-tagline\">Cover the full scope of <span class=\"nowrap\">software architecture.</span></p>, <p class=\"conferences-detail-location\">New York, NY</p>, <p class=\"conferences-detail-dates\">February 23â26, 2020</p>, <p class=\"conferences-detail-presenter\">Maggie Carroll, senior engineer, MAG Aerospace</p>, <p class=\"conferences-detail-tagline\">Build for tomorrowâwithout interrupting the <span class=\"nowrap\">day-to-day.</span></p>, <p class=\"conferences-detail-location\">Santa Clara, CA</p>, <p class=\"conferences-detail-dates\">June 15â18, 2020</p>, <p class=\"conferences-detail-presenter\">Sam Newman, Technologist</p>]\n"
],
[
"# find tags with a specific class\nprint(soup('p', {'class': 'conferences-detail-presenter'}))\nprint(soup('p', 'conferences-detail-presenter'))",
"[<p class=\"conferences-detail-presenter\">Maggie Carroll, senior engineer, MAG Aerospace</p>, <p class=\"conferences-detail-presenter\">Sam Newman, Technologist</p>]\n[<p class=\"conferences-detail-presenter\">Maggie Carroll, senior engineer, MAG Aerospace</p>, <p class=\"conferences-detail-presenter\">Sam Newman, Technologist</p>]\n"
],
[
"[p.get('class') for p in paragraphs_with_ids]",
"_____no_output_____"
],
[
"\"\"\"\nWhenever you want to scrape data from a website you should first check to see\nif it has some sort of access policy looking at terms and conditions\n\nWe should also check for robots.txt file that tells webcrawlers how to behave\n\"\"\"\n\n# try http://shop.oreilly.com/robots.txt\n# Crawl-delay: 30\n# Request-rate: 1/30\n\n# The first tells us that we should wait 30 seconds between requests.\n# The second says that we should request only one page every 30 seconds\n# So basically they're two different ways of saying the same thing",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09ccdf132c0ea143c71cc9719c617cc0a193bef | 4,156 | ipynb | Jupyter Notebook | Python-100-Days/Day66-80/code/Day68.ipynb | Little-Potato-1990/learn_python | 9e54d150ef73e4bf53f8cd9b28a2a8bc65593fe1 | [
"Apache-2.0"
] | 3 | 2022-01-15T19:06:19.000Z | 2022-01-18T16:47:27.000Z | Day66-80/code/Day68.ipynb | bdfd/4.5_Data-Science-Python-Zero2Hero- | 9dafe90b8112fdc3d07e1aa02e41ed3f019f733c | [
"MIT"
] | null | null | null | Day66-80/code/Day68.ipynb | bdfd/4.5_Data-Science-Python-Zero2Hero- | 9dafe90b8112fdc3d07e1aa02e41ed3f019f733c | [
"MIT"
] | 1 | 2022-01-14T13:18:51.000Z | 2022-01-14T13:18:51.000Z | 19.420561 | 99 | 0.493263 | [
[
[
"import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"%matplotlib inline\n%config InlineBackend.figure_format='svg'",
"_____no_output_____"
],
[
"plt.rcParams['font.sans-serif'] = 'FZJKai-Z03S'\nplt.rcParams['axes.unicode_minus'] = False",
"_____no_output_____"
],
[
"ser1 = pd.Series(data=[320, 180, 300, 405], index=['一季度', '二季度', '三季度', '四季度'])\nser1",
"_____no_output_____"
],
[
"ser2 = pd.Series({'一季度': 320, '二季度': 180, '三季度': 300, '四季度': 405})\nser2",
"_____no_output_____"
],
[
"print(ser2[0], ser2[2], ser2[-1])\nser2[0], ser2[-1] = 350, 360 \nprint(ser2)",
"_____no_output_____"
],
[
"print(ser2['一季度'], ser2['三季度'])\nser2['一季度'] = 380\nprint(ser2)",
"_____no_output_____"
],
[
"print(ser2[1:3])\nprint(ser2['二季度': '四季度'])\nser2[1:3] = 400, 500\nprint(ser2)",
"_____no_output_____"
],
[
"print(ser2[['二季度', '四季度']])\nser2[['二季度', '四季度']] = 500, 520\nprint(ser2)",
"_____no_output_____"
],
[
"print(ser2[ser2 >= 500])",
"_____no_output_____"
],
[
"# 求和\nprint(ser2.sum())\n# 求均值\nprint(ser2.mean())\n# 求最大\nprint(ser2.max())\n# 求最小\nprint(ser2.min())\n# 计数\nprint(ser2.count())\n# 求标准差\nprint(ser2.std())\n# 求方差\nprint(ser2.var())\n# 求中位数\nprint(ser2.median())",
"_____no_output_____"
],
[
"ser2.describe()",
"_____no_output_____"
],
[
"ser3 = pd.Series(data=['apple', 'banana', 'apple', 'pitaya', 'apple', 'pitaya', 'durian'])\nser3.value_counts()",
"_____no_output_____"
],
[
"ser4 = pd.Series(data=[10, 20, np.NaN, 30, np.NaN])\nser4.dropna()",
"_____no_output_____"
],
[
"ser4.fillna(value=40)",
"_____no_output_____"
],
[
"ser4.fillna(method='ffill')",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09cef6dc3174440d0201accbbe953103fb57707 | 48,749 | ipynb | Jupyter Notebook | Moringa_Data_Science_Prep_W4_Independent_Project_2021_07_Cindy_Gachuhi_Python_IP.ipynb | CindyMG/Week4IP | 85abaf8a14f5a3461a27af4926d29fcf0d671098 | [
"MIT"
] | null | null | null | Moringa_Data_Science_Prep_W4_Independent_Project_2021_07_Cindy_Gachuhi_Python_IP.ipynb | CindyMG/Week4IP | 85abaf8a14f5a3461a27af4926d29fcf0d671098 | [
"MIT"
] | null | null | null | Moringa_Data_Science_Prep_W4_Independent_Project_2021_07_Cindy_Gachuhi_Python_IP.ipynb | CindyMG/Week4IP | 85abaf8a14f5a3461a27af4926d29fcf0d671098 | [
"MIT"
] | null | null | null | 38.968026 | 292 | 0.27816 | [
[
[
"<a href=\"https://colab.research.google.com/github/CindyMG/Week4IP/blob/main/Moringa_Data_Science_Prep_W4_Independent_Project_2021_07_Cindy_Gachuhi_Python_IP.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>",
"_____no_output_____"
]
],
[
[
"#let us import the pandas library\nimport pandas as pd",
"_____no_output_____"
]
],
[
[
"From the following data sources, we will acquire our datasets for analysis:\n\nhttp://bit.ly/autolib_dataset\n\nhttps://drive.google.com/a/moringaschool.com/file/d/13DXF2CFWQLeYxxHFekng8HJnH_jtbfpN/view?usp=sharing",
"_____no_output_____"
]
],
[
[
"# let us create a dataframe from the following url:\n# http://bit.ly/autolib_dataset\n\ndf_url = \"http://bit.ly/autolib_dataset\"\nAutolib_dataset = pd.read_csv(df_url)\nAutolib_dataset",
"_____no_output_____"
],
[
"# let us identify the columns with null values and drop them\n#\nAutolib_dataset.isnull()\nAutolib_dataset.dropna(axis=1,how='all',inplace=True)\nAutolib_dataset\n",
"_____no_output_____"
],
[
"# Dropping unnecessary columns\nD_autolib= Autolib_dataset.drop(Autolib_dataset.columns[[8,9,10,15,17,18,19]], axis = 1)\nD_autolib",
"_____no_output_____"
],
[
"# let us access the hour column from our dataframe\nD_autolib['hour']",
"_____no_output_____"
],
[
"# Now, we want to identify the most popular hour in which the Blue cars are picked up\n# To do this, we are going to use the mode() function\n#\nD_autolib['hour'].mode()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09cfe67d032d00845958995a1e40ece593a587c | 12,336 | ipynb | Jupyter Notebook | Python Essentials B7 Assignment Day2.ipynb | dhanyasingh/LetsUpgrade-Python-B7 | f58c1f929bdfef8ae0da163514e9624fb9f6f7c0 | [
"Apache-2.0"
] | null | null | null | Python Essentials B7 Assignment Day2.ipynb | dhanyasingh/LetsUpgrade-Python-B7 | f58c1f929bdfef8ae0da163514e9624fb9f6f7c0 | [
"Apache-2.0"
] | null | null | null | Python Essentials B7 Assignment Day2.ipynb | dhanyasingh/LetsUpgrade-Python-B7 | f58c1f929bdfef8ae0da163514e9624fb9f6f7c0 | [
"Apache-2.0"
] | null | null | null | 17.878261 | 101 | 0.464656 | [
[
[
"### Assignment 2 Batch 7",
"_____no_output_____"
],
[
"Ans.1. **List properties:** ordered, iterable, mutable, can contain multiple data types\n##### List default functions are:\n- append()- add values or items at the end of the list\n- index()-returns the index of the list item\n- count()-Return number of occurrences of value. ",
"_____no_output_____"
]
],
[
[
"list1 = ['Abhilasha','Anamika','Dhanya',1,2,3,4]",
"_____no_output_____"
],
[
"list1.append('Matu')\nlist1",
"_____no_output_____"
],
[
"list1.remove('Matu')",
"_____no_output_____"
],
[
"list1",
"_____no_output_____"
],
[
"list1.append(1)",
"_____no_output_____"
],
[
"list1",
"_____no_output_____"
],
[
"list1.count(1)",
"_____no_output_____"
],
[
"list1.pop(-1)",
"_____no_output_____"
],
[
"list1.clear() #Remove all items from list.\n",
"_____no_output_____"
],
[
"list1",
"_____no_output_____"
]
],
[
[
"Ans.2. **Dictionary properties:**\nunordered, iterable, mutable, can contain multiple data types\n- Made of key-value pairs\n- Keys must be unique, and can be strings, numbers, or tuples\n- Values can be any type\n\n##### Dictionaries default functions:\n- get()- retrieving a value from dictionary\n- items()\n- keys()\n- pop()\n",
"_____no_output_____"
]
],
[
[
"# create an empty dictionary (two ways)\nempty_dict = {}\nempty_dict = dict()",
"_____no_output_____"
],
[
"# create a dictionary (two ways)\nfamily = {'dad':'Sachin', 'mom':'Geeta', 'size':6}\nfamily = dict(dad='Sachin', mom='Geeta', size=6)\nfamily",
"_____no_output_____"
],
[
"# pass a key to return its value\nfamily['dad']",
"_____no_output_____"
],
[
"# return the number of key-value pairs\nlen(family)",
"_____no_output_____"
],
[
"'Geeta' in family.values()",
"_____no_output_____"
],
[
"# add a new entry\nfamily['cat'] = 'snowball'\nfamily",
"_____no_output_____"
]
],
[
[
"Ans.3. **Set properties:** unordered, iterable, mutable, can contain multiple data types\n- Made of unique elements (strings, numbers, or tuples)\n- Like dictionaries, but with keys only (no values)\n- complex data structures\n- used for finding union, disjoints, commons etc. ",
"_____no_output_____"
]
],
[
[
"st = {1,2,4,5,'Dhanya', 7777}",
"_____no_output_____"
],
[
"st1 = {'Matu',7777, 6,7,8,9}",
"_____no_output_____"
],
[
"st.intersection(st1)",
"_____no_output_____"
],
[
"st.issubset(st1)",
"_____no_output_____"
],
[
"st1.issubset(st)",
"_____no_output_____"
],
[
"st.union(st1)",
"_____no_output_____"
],
[
"st.difference(st1)",
"_____no_output_____"
],
[
"st.isdisjoint(st1)",
"_____no_output_____"
]
],
[
[
"Ans.4. **Tuple properties:** ordered, iterable, immutable, can contain multiple data types\nLike lists, but they don't change size",
"_____no_output_____"
]
],
[
[
"# create a tuple directly\ndigits = (0, 1, 'two')",
"_____no_output_____"
],
[
"# create a tuple from a list\ndigits = tuple([0, 1, 'two'])",
"_____no_output_____"
],
[
"digits[2]",
"_____no_output_____"
],
[
"len(digits)",
"_____no_output_____"
],
[
"digits.count(0) #count no.of instances",
"_____no_output_____"
],
[
"digits.index(1)",
"_____no_output_____"
]
],
[
[
"Ans.5. **String and its default methods:** stores characters, ",
"_____no_output_____"
]
],
[
[
"name1 = 'Kumari Himanshi'\nname2 = 'Piyushi Srivastava'",
"_____no_output_____"
],
[
"name1 + \" \" + name2",
"_____no_output_____"
],
[
"type(name1)",
"_____no_output_____"
],
[
"name1*2 #repetition of string",
"_____no_output_____"
],
[
"name2*5",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09d01c1a05649446cb6fcab27da697c493efba4 | 116,478 | ipynb | Jupyter Notebook | notebooks/Module1_unit5/4_AreaStatsTimeSeries.ipynb | LaurenZ-IHE/WAPOROCW | 7e1357f23a142ea4394404f4462ed7ceb1f47009 | [
"CC0-1.0"
] | 7 | 2020-10-08T21:09:06.000Z | 2022-02-08T08:48:31.000Z | notebooks/Module1_unit5/4_AreaStatsTimeSeries.ipynb | LaurenZ-IHE/WAPOROCW | 7e1357f23a142ea4394404f4462ed7ceb1f47009 | [
"CC0-1.0"
] | 9 | 2020-10-03T04:26:57.000Z | 2022-03-01T10:56:01.000Z | notebooks/Module1_unit5/4_AreaStatsTimeSeries.ipynb | LaurenZ-IHE/WAPOROCW | 7e1357f23a142ea4394404f4462ed7ceb1f47009 | [
"CC0-1.0"
] | 5 | 2020-09-30T05:45:21.000Z | 2022-02-08T08:48:38.000Z | 58.50226 | 27,472 | 0.621929 | [
[
[
"# Description\n\nThis notebook is used to request computation of average time-series of a WaPOR data layer for an area using WaPOR API.\n\nYou will need WaPOR API Token to use this notebook ",
"_____no_output_____"
],
[
"# Step 1: Read APIToken\n\nGet your APItoken from https://wapor.apps.fao.org/profile. Enter your API Token when running the cell below.",
"_____no_output_____"
]
],
[
[
"import requests\nimport pandas as pd\n\npath_query=r'https://io.apps.fao.org/gismgr/api/v1/query/'\npath_sign_in=r'https://io.apps.fao.org/gismgr/api/v1/iam/sign-in/'\n\nAPIToken=input('Your API token: ')",
"Your API token: Enter your API token\n"
]
],
[
[
"# Step 2: Get Authorization AccessToken\n\nUsing the input API token to get AccessToken for authorization",
"_____no_output_____"
]
],
[
[
"resp_signin=requests.post(path_sign_in,headers={'X-GISMGR-API-KEY':APIToken})\nresp_signin = resp_signin.json()\nAccessToken=resp_signin['response']['accessToken']\nAccessToken",
"_____no_output_____"
]
],
[
[
"# Step 3: Write Query Payload\n\nFor more examples of areatimeseries query load \nvisit https://io.apps.fao.org/gismgr/api/v1/swagger-ui/examples/AreaStatsTimeSeries.txt",
"_____no_output_____"
]
],
[
[
"crs=\"EPSG:4326\" #coordinate reference system\ncube_code=\"L1_PCP_E\"\nworkspace='WAPOR_2'\nstart_date=\"2009-01-01\"\nend_date=\"2019-01-01\"\n\n#get datacube measure\ncube_url=f'https://io.apps.fao.org/gismgr/api/v1/catalog/workspaces/{workspace}/cubes/{cube_code}/measures'\nresp=requests.get(cube_url).json()\nmeasure=resp['response']['items'][0]['code']\nprint('MEASURE: ',measure)\n\n#get datacube time dimension\ncube_url=f'https://io.apps.fao.org/gismgr/api/v1/catalog/workspaces/{workspace}/cubes/{cube_code}/dimensions'\nresp=requests.get(cube_url).json()\nitems=pd.DataFrame.from_dict(resp['response']['items'])\ndimension=items[items.type=='TIME']['code'].values[0]\nprint('DIMENSION: ',dimension)",
"MEASURE: WATER_MM\nDIMENSION: DAY\n"
]
],
[
[
"## Define area by coordinate extent",
"_____no_output_____"
]
],
[
[
"bbox= [37.95883206252312, 7.89534, 43.32093, 12.3873979377346] #latlon\nxmin,ymin,xmax,ymax=bbox[0],bbox[1],bbox[2],bbox[3]\nPolygon=[\n [xmin,ymin],\n [xmin,ymax],\n [xmax,ymax],\n [xmax,ymin],\n [xmin,ymin]\n ]\nquery_areatimeseries={\n \"type\": \"AreaStatsTimeSeries\",\n \"params\": {\n \"cube\": {\n \"code\": cube_code, #cube_code\n \"workspaceCode\": workspace, #workspace code: use WAPOR for v1.0 and WAPOR_2 for v2.1\n \"language\": \"en\"\n },\n \"dimensions\": [\n {\n \"code\": dimension, #use DAY DEKAD MONTH or YEAR\n \"range\": f\"[{start_date},{end_date})\" #start date and endate\n }\n ],\n \"measures\": [\n measure\n ],\n \"shape\": {\n \"type\": \"Polygon\",\n \"properties\": {\n \"name\": crs #coordinate reference system\n },\n \"coordinates\": [\n Polygon\n ]\n }\n }\n}\n\nquery_areatimeseries",
"_____no_output_____"
]
],
[
[
"## OR define area by reading GeoJSON",
"_____no_output_____"
]
],
[
[
"import ogr\nshp_fh=r\".\\data\\Awash_shapefile.shp\"\n\nshpfile=ogr.Open(shp_fh)\nlayer=shpfile.GetLayer()\nepsg_code=layer.GetSpatialRef().GetAuthorityCode(None)\nshape=layer.GetFeature(0).ExportToJson(as_object=True)['geometry'] #get geometry of shapefile in JSON string\nshape[\"properties\"]={\"name\": \"EPSG:{0}\".format(epsg_code)}#latlon projection \n\nquery_areatimeseries={\n \"type\": \"AreaStatsTimeSeries\",\n \"params\": {\n \"cube\": {\n \"code\": cube_code,\n \"workspaceCode\": workspace,\n \"language\": \"en\"\n },\n \"dimensions\": [\n {\n \"code\": dimension,\n \"range\": f\"[{start_date},{end_date})\"\n }\n ],\n \"measures\": [\n measure\n ],\n \"shape\": shape\n }\n}\n\nquery_areatimeseries",
"_____no_output_____"
]
],
[
[
"# Step 4: Post the QueryPayload with AccessToken in Header \n\nIn responses, get an url to query job. ",
"_____no_output_____"
]
],
[
[
"resp_query=requests.post(path_query,headers={'Authorization':'Bearer {0}'.format(AccessToken)},\n json=query_areatimeseries)\nresp_query = resp_query.json()\njob_url=resp_query['response']['links'][0]['href']\n\njob_url",
"_____no_output_____"
]
],
[
[
"# Step 5: Get Job Results.\n\nIt will take some time for the job to be finished. When the job is finished, its status will be changed from 'RUNNING' to 'COMPLETED' or 'COMPLETED WITH ERRORS'. If it is COMPLETED, the area time series results can be achieved from Response 'output'.",
"_____no_output_____"
]
],
[
[
"i=0\nprint('RUNNING',end=\" \")\nwhile i==0: \n resp = requests.get(job_url)\n resp=resp.json()\n \n if resp['response']['status']=='RUNNING':\n print('.',end =\" \")\n if resp['response']['status']=='COMPLETED':\n results=resp['response']['output']\n df=pd.DataFrame(results['items'],columns=results['header'])\n i=1\n if resp['response']['status']=='COMPLETED WITH ERRORS':\n print(resp['response']['log'])\n i=1 \ndf",
"RUNNING . . . . . . . . . . . . . . "
],
[
"df.index=pd.to_datetime(df.day,format='%Y-%m-%d')\ndf.plot()",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown",
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09d0bd323e7d23f1cb23db821380651c70ca4d1 | 297,250 | ipynb | Jupyter Notebook | notebook/REIT-Industrial.ipynb | piinghel/TopicModelling | 0ee0183aa43331f243a7c35891a81e5da93408a8 | [
"Apache-2.0"
] | null | null | null | notebook/REIT-Industrial.ipynb | piinghel/TopicModelling | 0ee0183aa43331f243a7c35891a81e5da93408a8 | [
"Apache-2.0"
] | null | null | null | notebook/REIT-Industrial.ipynb | piinghel/TopicModelling | 0ee0183aa43331f243a7c35891a81e5da93408a8 | [
"Apache-2.0"
] | null | null | null | 45.716703 | 10,761 | 0.420091 | [
[
[
"import os \nos.chdir(\"C:\\\\Users\\\\Pieter-Jan\\\\Documents\\\\Work\\\\Candriam\\\\nlp\\\\ESG\\\\top2Vec\\\\TopicModelling\")",
"_____no_output_____"
],
[
"from modules import Top2Vec_custom\nimport pandas as pd\nimport numpy as np\nfrom sklearn.metrics.pairwise import cosine_similarity\nimport pickle\nimport plotly.express as px",
"_____no_output_____"
],
[
"%reload_ext autoreload\n%autoreload 2",
"_____no_output_____"
],
[
"df = pd.read_csv(\"data\\\\CRS_processed_PyMuPDF_REIT-Industrial.txt\", sep=\"\\t\")",
"_____no_output_____"
],
[
"df.shape",
"_____no_output_____"
],
[
"df.head(2)",
"_____no_output_____"
],
[
"paragraphs = df[\"paragraph\"].values.tolist()",
"_____no_output_____"
],
[
"# %%time\n# model_distilBert = Top2Vec_custom.Top2Vec(\n# documents=paragraphs, \n# embedding_model='distiluse-base-multilingual-cased',\n# load_doc_embed=False,\n# save_doc_embed=True,\n# path_doc_embed=\"output/distBert_embedding_REIT-Industrial.npy\"\n# )",
"2020-12-19 12:51:39,162 - top2vec - INFO - Pre-processing documents for training\n2020-12-19 12:51:39,162 - top2vec - INFO - Pre-processing documents for training\nINFO:top2vec:Pre-processing documents for training\n2020-12-19 12:51:41,020 - top2vec - INFO - Downloading distiluse-base-multilingual-cased model\n2020-12-19 12:51:41,020 - top2vec - INFO - Downloading distiluse-base-multilingual-cased model\nINFO:top2vec:Downloading distiluse-base-multilingual-cased model\n2020-12-19 12:59:13,055 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-19 12:59:13,055 - top2vec - INFO - Creating lower dimension embedding of documents\nINFO:top2vec:Creating lower dimension embedding of documents\n2020-12-19 12:59:13,057 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-19 12:59:13,057 - top2vec - INFO - Creating lower dimension embedding of documents\nINFO:top2vec:Creating lower dimension embedding of documents\n2020-12-19 12:59:39,758 - top2vec - INFO - Finding dense areas of documents\n2020-12-19 12:59:39,758 - top2vec - INFO - Finding dense areas of documents\nINFO:top2vec:Finding dense areas of documents\n2020-12-19 12:59:39,761 - top2vec - INFO - Finding dense areas of documents\n2020-12-19 12:59:39,761 - top2vec - INFO - Finding dense areas of documents\nINFO:top2vec:Finding dense areas of documents\n2020-12-19 12:59:39,904 - top2vec - INFO - Finding topics\n2020-12-19 12:59:39,904 - top2vec - INFO - Finding topics\nINFO:top2vec:Finding topics\n"
],
[
"# %%time\n# model_distilBert = Top2Vec_custom.Top2Vec(\n# documents=paragraphs, \n# embedding_model='distiluse-base-multilingual-cased',\n# load_doc_embed=True,\n# save_doc_embed=Falselse,\n# path_doc_embed=\"output/distBert_embedding_REIT-Industrial.npy\"\n# )",
"2020-12-19 12:59:41,214 - top2vec - INFO - Pre-processing documents for training\n2020-12-19 12:59:41,214 - top2vec - INFO - Pre-processing documents for training\nINFO:top2vec:Pre-processing documents for training\n2020-12-19 12:59:43,400 - top2vec - INFO - Downloading distiluse-base-multilingual-cased model\n2020-12-19 12:59:43,400 - top2vec - INFO - Downloading distiluse-base-multilingual-cased model\nINFO:top2vec:Downloading distiluse-base-multilingual-cased model\n2020-12-19 12:59:58,713 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-19 12:59:58,713 - top2vec - INFO - Creating lower dimension embedding of documents\nINFO:top2vec:Creating lower dimension embedding of documents\n2020-12-19 12:59:58,716 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-19 12:59:58,716 - top2vec - INFO - Creating lower dimension embedding of documents\nINFO:top2vec:Creating lower dimension embedding of documents\n2020-12-19 13:00:19,453 - top2vec - INFO - Finding dense areas of documents\n2020-12-19 13:00:19,453 - top2vec - INFO - Finding dense areas of documents\nINFO:top2vec:Finding dense areas of documents\n2020-12-19 13:00:19,455 - top2vec - INFO - Finding dense areas of documents\n2020-12-19 13:00:19,455 - top2vec - INFO - Finding dense areas of documents\nINFO:top2vec:Finding dense areas of documents\n2020-12-19 13:00:19,593 - top2vec - INFO - Finding topics\n2020-12-19 13:00:19,593 - top2vec - INFO - Finding topics\nINFO:top2vec:Finding topics\n"
],
[
"# model_dir = \"output/distilBert_REIT-Industrial.sav\" \n# with open(model_dir, 'wb') as file: \n# pickle.dump(model_distilBert, file)",
"_____no_output_____"
],
[
"model_dir = \"output/distilBert_REIT-Industrial.sav\" \nwith open(model_dir, 'rb') as file: \n model_distilBert = pickle.load(file)",
"_____no_output_____"
],
[
"topic_words, word_scores, topic_nums = model_distilBert.get_topics()",
"_____no_output_____"
],
[
"topic_sizes, topic_nums = model_distilBert.get_topic_sizes()",
"_____no_output_____"
],
[
"pd.DataFrame(topics_scores_df).T",
"_____no_output_____"
],
[
"topics_top2Vec = pd.DataFrame(topic_words).iloc[:,0:10]\ntopics_top2Vec[\"size\"] = topic_sizes\ntopics_top2Vec",
"_____no_output_____"
]
],
[
[
"## Key word / sentence topic loading",
"_____no_output_____"
]
],
[
[
"keyword_embed = model_distilBert.embed([\"volunteering\"])\nres = cosine_similarity(keyword_embed, model_distilBert.topic_vectors)",
"_____no_output_____"
],
[
"scores = pd.DataFrame(res, index=[\"Cosine similiarity\"]).T\nscores[\"Topic\"] = list(range(0,len(scores)))",
"_____no_output_____"
],
[
"scores[\"Top words\"] = scores[\"Topic\"].apply(lambda x: list(topics_top2Vec.iloc[x,0:3]))",
"_____no_output_____"
],
[
"scores.sort_values(by=\"Cosine similiarity\", ascending=False, inplace=True)\nscores.head(10)",
"_____no_output_____"
],
[
"fig = px.bar(scores.iloc[0:10,:], x='Topic', y='Cosine similiarity', text=\"Top words\", title='10 highest topic loadings')\nfig.update_layout(xaxis=dict(type='category'),\n xaxis_title=\"Topic number\")\nfig.show()",
"_____no_output_____"
]
],
[
[
"## Most similar documents for a topic",
"_____no_output_____"
]
],
[
[
"documents, document_scores, document_ids = model_distilBert.search_documents_by_topic(topic_num=5, num_docs=2)\nfor doc, score, doc_id in zip(documents, document_scores, document_ids):\n print(f\"Document: {doc_id}, Filename (Company and year): {df.iloc[doc_id,:].filename}, Score: {score}\")\n print(\"-----------\")\n print(doc)\n print(\"-----------\")\n print()",
"Document: 3234, Filename (Company and year): Prologis-2016.pdf, Score: 0.8135864734649658\n-----------\nSpace for Good Prologis developed Space for Good to provide temporary space in our distribution facilities rent-free to help charitable organizations address seasonal and short-term needs and provide disaster relief. Space for Good demonstrates Prologis commitment to corporate citizenship and willingness to form meaningful public/private partnerships on behalf of our local communities. Highlights for 2016 Prologis and the Prologis Foundation made $1.6 million in donations and matching gifts to nonprofit organizations working in the areas of education, human welfare and disaster relief. On IMPACT Day, Prologis employees volunteered 7,000 hours to benefit 60 different nonprofit organizations. Employees volunteered an additional 3,420 hours to various organizations throughout the year. OurSpace for Goodprogram, which provides warehouse spaceat free or reduced rental rate tononprofits and nongovernmental organizations (NGOs), gave82months of rent-free space to 12 non-\n-----------\n\nDocument: 2680, Filename (Company and year): Prologis-2012.pdf, Score: 0.7971879243850708\n-----------\nSome of the organizations which received financial support from the Prologis Foundation in 2012 included: American Red Cross Boys and Girls Clubs Habitat for Humanity Junior Achievement Metro Volunteers Ronald McDonald House Charities Space for Good Prologis developed the Space for Good program to provide warehouse space to charitable organizations in need of such space on temporary basis to address seasonal needs and respond to disaster relief efforts. Space for Good is an example of program that aligns our business with our charitable activities in way that directly benefits the needs of the local communities where we operate. In 2012, Prologis donated more than 4 million square feet (400,000 square meters) of distribution center space on temporary basis to organizations in need. This space represents value of more\n-----------\n\n"
],
[
"unique_labels = set(model_distilBert.clustering.labels_)",
"_____no_output_____"
],
[
"model_distilBert._create_topic_vectors()",
"_____no_output_____"
],
[
"df[\"topic\"] = model_distilBert.clustering.labels_",
"_____no_output_____"
],
[
"out = pd.DataFrame(df.groupby([\"filename\",\"topic\"]).count().iloc[:,0])",
"_____no_output_____"
],
[
"out_sorted = (out.iloc[out.index.get_level_values(0) == out.index.get_level_values(0)[0],:].\nsort_values(out.columns[0], ascending=False))\nout_sorted[\"topic\"] = out_sorted.index.get_level_values(1)\nout_sorted[\"top words\"] = out_sorted[\"topic\"].apply(lambda x: list(topics_top2Vec.iloc[x, 0:3]) if x >= 0 else list([\"Noise topic\"]))",
"_____no_output_____"
],
[
"out_sorted",
"_____no_output_____"
],
[
"fig = px.bar(out_sorted.head(10), x='topic', y=out.columns[0], text=\"top words\", title='10 highest topic counts')\nfig.update_layout(xaxis=dict(type='category'),\n xaxis_title=\"Topic number\",\n yaxis_title=\"Count\")\nfig.show()",
"_____no_output_____"
],
[
"model_distilBert._deduplicate_topics()\nmodel_distilBert.topic_vectors.shape",
"_____no_output_____"
],
[
"model_distilBert.get_num_topics()",
"_____no_output_____"
]
],
[
[
"## Update model",
"_____no_output_____"
]
],
[
[
"model_distilBert.n_components = 5\nmodel_distilBert.ngram_range = (1,4)\nmodel_distilBert._update_steps(documents=paragraphs, step=1)",
"2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\n2020-12-17 17:39:48,537 - top2vec - INFO - Pre-processing documents for training\nINFO:top2vec:Pre-processing documents for training\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,406 - top2vec - INFO - Creating lower dimension embedding of documents\nINFO:top2vec:Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n2020-12-17 17:40:59,489 - top2vec - INFO - Creating lower dimension embedding of documents\n"
],
[
"topic_words, word_scores, topic_nums = model_distilBert.get_topics()\ntopic_sizes, topic_nums = model_distilBert.get_topic_sizes()\ntopics_top2Vec = pd.DataFrame(topic_words).iloc[:,0:10]\ntopics_top2Vec[\"size\"] = topic_sizes\ntopics_top2Vec",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09d2be4ab5240937e36ac2d11089799b0bfea52 | 218,312 | ipynb | Jupyter Notebook | climate_starter.ipynb | ishanku/sqlalchemy-challenge | 2851b042265a68eb30919318c772707018c945be | [
"ADSL"
] | null | null | null | climate_starter.ipynb | ishanku/sqlalchemy-challenge | 2851b042265a68eb30919318c772707018c945be | [
"ADSL"
] | null | null | null | climate_starter.ipynb | ishanku/sqlalchemy-challenge | 2851b042265a68eb30919318c772707018c945be | [
"ADSL"
] | null | null | null | 112.069815 | 41,527 | 0.819648 | [
[
[
"%matplotlib inline\nfrom matplotlib import style\nstyle.use('fivethirtyeight')\nimport matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"import numpy as np\nimport pandas as pd",
"_____no_output_____"
],
[
"import datetime as dt",
"_____no_output_____"
]
],
[
[
"# Reflect Tables into SQLAlchemy ORM",
"_____no_output_____"
]
],
[
[
"# Python SQL toolkit and Object Relational Mapper\nimport sqlalchemy\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy.orm import Session\nfrom sqlalchemy import create_engine, func, inspect",
"_____no_output_____"
],
[
"engine = create_engine(\"sqlite:///Resources/hawaii.sqlite\")",
"_____no_output_____"
],
[
"# reflect an existing database into a new model\nBase = automap_base()\n# reflect the tables\nBase.prepare(engine, reflect=True)",
"_____no_output_____"
],
[
"# We can view all of the classes that automap found\nBase.classes.keys()",
"_____no_output_____"
],
[
"# Save references to each table\nMeasurement = Base.classes.measurement\nStation = Base.classes.station",
"_____no_output_____"
],
[
"# Create our session (link) from Python to the DB\nsession = Session(engine)",
"_____no_output_____"
],
[
"inspector = inspect(engine)",
"_____no_output_____"
]
],
[
[
"# Exploratory Climate Analysis",
"_____no_output_____"
]
],
[
[
"columns = inspector.get_columns('Measurement')\nfor column in columns:\n print(column[\"name\"], column[\"type\"])",
"id INTEGER\nstation TEXT\ndate TEXT\nprcp FLOAT\ntobs FLOAT\n"
],
[
"columns = inspector.get_columns('Station')\nfor column in columns:\n print(column[\"name\"], column[\"type\"])",
"id INTEGER\nstation TEXT\nname TEXT\nlatitude FLOAT\nlongitude FLOAT\nelevation FLOAT\n"
],
[
"# Design a query to retrieve the last 12 months of precipitation data and plot the results\n# Calculate the date 1 year ago from the last data point in the database\nLatestDate=np.ravel(session.query(Measurement.date).order_by(Measurement.date.desc()).first())\nLatestDate=str(LatestDate).replace(\"-\",\"\").replace(\"'\",\"\").replace(\"[\",\"\").replace(\"]\",\"\")\nLatestDate",
"_____no_output_____"
],
[
"#Date Calculation Using regex\nimport re\n#Split Year, Month and Date to form a Date time format\nCYear=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\1', LatestDate))\nCMonth=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\2', LatestDate))\nCDay=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\3', LatestDate))\nLatestDateFormat = dt.datetime(CYear,CMonth,CDay)\n#Subract a year\nfrom dateutil.relativedelta import relativedelta\nOneYearAgoDate =(LatestDateFormat) + relativedelta(years=-1)\n# Convert Back to queriable pattern\nLatest = re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\1-\\2-\\3', LatestDate)\nOYear=str(OneYearAgoDate.year)\nOMonth=str(OneYearAgoDate.month)\nODay=str(OneYearAgoDate.day)\nif len(OMonth) == 1:\n OMonth= \"0\" + OMonth\nif len(ODay) == 1:\n ODay= \"0\" + ODay\nOneYearAgo = OYear + \"-\" + OMonth + \"-\" + ODay\nLatest,OneYearAgo",
"_____no_output_____"
],
[
"# Perform a query to retrieve the data and precipitation scores\nLastYearPreciptitationData=session.query(Measurement.date,Measurement.prcp).filter(Measurement.date >= OneYearAgo).order_by(Measurement.date.desc()).all()",
"_____no_output_____"
],
[
"session.query(Measurement.date,Measurement.prcp).filter(Measurement.date >= OneYearAgo).order_by(Measurement.date.desc()).count()",
"_____no_output_____"
],
[
"# Save the query results as a Pandas DataFrame and set the index to the date column\nLPData=pd.DataFrame()\nfor L in LastYearPreciptitationData:\n df=pd.DataFrame({'Date':[L[0]],\"Prcp\":[L[1]]})\n LPData=LPData.append(df)\n# Sort the dataframe by date\nLPData=LPData.set_index('Date').sort_values(by=\"Date\",ascending=False)\nLPData.head(10)",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
]
],
[
[
"# Use Pandas Plotting with Matplotlib to plot the data\nLPData.plot(rot=90);\nplt.ylim(0,7)\nplt.xlabel(\"Date\")\nplt.ylabel(\"Rain (Inches)\")\nplt.title(\"Precipitation Analysis\")\nplt.legend([\"Precipitation\"])\nplt.savefig(\"./Output/Figure1.png\")\nplt.show()",
"_____no_output_____"
],
[
"# Use Pandas to calcualte the summary statistics for the precipitation data\nLPData.describe()",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
]
],
[
[
"# Design a query to show how many stations are available in this dataset?\n# ---- From Measurement Data\nsession.query(Measurement.station).group_by(Measurement.station).count()\n#----From Station Date\nsession.query(Station).count()",
"_____no_output_____"
],
[
"#-- Method 1 -- Using DataFrame\n# What are the most active stations? (i.e. what stations have the most rows)?\n# List the stations and the counts in descending order.\nStations=session.query(Measurement.station,Measurement.tobs).all()\nstation_df=pd.DataFrame()\nfor s in Stations:\n df=pd.DataFrame({\"Station\":[s.station],\"Tobs\":[s.tobs]})\n station_df=station_df.append(df)\nActiveStation=station_df.Station.value_counts()\nActiveStation",
"_____no_output_____"
],
[
"#-- Method 2 -- Using Direct Query\nActiveStationList=session.query(Measurement.station,func.count(Measurement.tobs)).group_by(Measurement.station).order_by(func.count(Measurement.tobs).desc()).all()\nActiveStationList",
"_____no_output_____"
],
[
"# Using the station id from the previous query, calculate the lowest temperature recorded, \n# highest temperature recorded, and average temperature of the most active station?\nstation_df[station_df.Station == 'USC00519281'].Tobs.min(),station_df[station_df.Station == 'USC00519281'].Tobs.max(),station_df[station_df.Station == 'USC00519281'].Tobs.mean()",
"_____no_output_____"
],
[
"# Choose the station with the highest number of temperature observations.\nprint(f\"The Station with Highest Number of temperature obervations is {ActiveStationList[0][0]} and the No of Observations are {ActiveStationList[0][1]}\")",
"The Station with Highest Number of temperature obervations is USC00519281 and the No of Observations are 2772\n"
],
[
"# Query the last 12 months of temperature observation data for this station and plot the results as a histogram\nLast12TempO=session.query(Measurement.tobs).filter(Measurement.date > OneYearAgo).filter(Measurement.station==ActiveStationList[0][0]).all()\ndf=pd.DataFrame(Last12TempO)\nplt.hist(df['tobs'],12,color='purple',hatch=\"/\",edgecolor=\"yellow\")\nplt.xlabel(\"Temperature\",fontsize=14)\nplt.ylabel(\"Frequency\", fontsize=14)\nplt.title(\"One Year Temperature (For Station USC00519281)\",fontsize=14)\nlabels=[\"Temperature obervation\"]\nplt.legend(labels)\nplt.savefig(\"./Output/Figure2.png\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"",
"_____no_output_____"
]
],
[
[
"# This function called `calc_temps` will accept start date and end date in the format '%Y-%m-%d' \n# and return the minimum, average, and maximum temperatures for that range of dates\ndef calc_temps(start_date, end_date):\n \"\"\"TMIN, TAVG, and TMAX for a list of dates.\n \n Args:\n start_date (string): A date string in the format %Y-%m-%d\n end_date (string): A date string in the format %Y-%m-%d\n \n Returns:\n TMIN, TAVE, and TMAX\n \"\"\"\n \n return session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\\\n filter(Measurement.date >= start_date).filter(Measurement.date <= end_date).all()\n\n# function usage example\nprint(calc_temps('2012-02-28', '2012-03-05'))",
"[(62.0, 69.57142857142857, 74.0)]\n"
],
[
"#----First Sample\n# Use your previous function `calc_temps` to calculate the tmin, tavg, and tmax \n# for your trip using the previous year's data for those same dates.\nTemperatureAverageLast12Months=calc_temps(OneYearAgo, Latest)\nprint(TemperatureAverageLast12Months)",
"[(58.0, 74.59058295964125, 87.0)]\n"
],
[
"#----Second Sample\ncalc_temps('2015-08-21', '2016-08-21')",
"_____no_output_____"
],
[
"# Plot the results from your previous query as a bar chart. \n# Use \"Trip Avg Temp\" as your Title\n# Use the average temperature for the y value\n# Use the peak-to-peak (tmax-tmin) value as the y error bar (yerr)\nError = TemperatureAverageLast12Months[0][2]-TemperatureAverageLast12Months[0][0]\nAverageTemp = TemperatureAverageLast12Months[0][1]\nMinTemp = TemperatureAverageLast12Months[0][0]\nMaxTemp = TemperatureAverageLast12Months[0][2]\n\nfig, ax = plt.subplots(figsize=(5,6))\nbar_chart = ax.bar(1 , AverageTemp, color= 'salmon', tick_label='',yerr=Error, alpha=0.6)\nax.set_xlabel(\"Trip\")\nax.set_ylabel(\"Temp (F)\")\nax.set_title(\"Trip Avg Temp\")\n\ndef autolabels(rects):\n for rect in rects:\n h=rect.get_height()\n \n#label the bars \nautolabels(bar_chart) \nplt.ylim(0, 100)\nplt.xlim(0,2)\nax.xaxis.grid()\nfig.tight_layout()\nplt.savefig(\"./Output/temperature.png\")\nplt.show()",
"_____no_output_____"
],
[
"# Plot the results from your previous query as a bar chart. \n# Use \"Trip Avg Temp\" as your Title\n# Use the average temperature for the y value\n# Use the peak-to-peak (tmax-tmin) value as the y error bar (yerr)\n",
"_____no_output_____"
],
[
"TripStartTime= '2016-08-21'\nTripEndTime = '2016-08-30'\n\nFirstStep = [Station.station, Station.name, Station.latitude, Station.longitude, Station.elevation, func.sum(Measurement.prcp)]\nPlaceForTrip = session.query(*FirstStep).\\\n filter(Measurement.station == Station.station).\\\n filter(Measurement.date >= TripStartTime).\\\n filter(Measurement.date <= TripEndTime).\\\n group_by(Station.name).order_by(func.sum(Measurement.prcp).desc()).all()\nprint (PlaceForTrip)",
"[('USC00516128', 'MANOA LYON ARBO 785.2, HI US', 21.3331, -157.8025, 152.4, 7.560000000000001), ('USC00519281', 'WAIHEE 837.5, HI US', 21.45167, -157.84888999999998, 32.9, 7.479999999999999), ('USC00513117', 'KANEOHE 838.1, HI US', 21.4234, -157.8015, 14.6, 4.16), ('USC00514830', 'KUALOA RANCH HEADQUARTERS 886.9, HI US', 21.5213, -157.8374, 7.0, 2.55), ('USC00519523', 'WAIMANALO EXPERIMENTAL FARM, HI US', 21.33556, -157.71139, 19.5, 2.4), ('USC00519397', 'WAIKIKI 717.2, HI US', 21.2716, -157.8168, 3.0, 0.69), ('USC00517948', 'PEARL CITY, HI US', 21.3934, -157.9751, 11.9, 0.06)]\n"
],
[
"# Calculate the total amount of rainfall per weather station for your trip dates using the previous year's matching dates.\n# Sort this in descending order by precipitation amount and list the station, name, latitude, longitude, and elevation\n\n",
"[('USC00516128', 'MANOA LYON ARBO 785.2, HI US', 21.3331, -157.8025, 152.4, 0.31), ('USC00519281', 'WAIHEE 837.5, HI US', 21.45167, -157.84888999999998, 32.9, 0.25), ('USC00518838', 'UPPER WAHIAWA 874.3, HI US', 21.4992, -158.0111, 306.6, 0.1), ('USC00513117', 'KANEOHE 838.1, HI US', 21.4234, -157.8015, 14.6, 0.060000000000000005), ('USC00511918', 'HONOLULU OBSERVATORY 702.2, HI US', 21.3152, -157.9992, 0.9, 0.0), ('USC00514830', 'KUALOA RANCH HEADQUARTERS 886.9, HI US', 21.5213, -157.8374, 7.0, 0.0), ('USC00517948', 'PEARL CITY, HI US', 21.3934, -157.9751, 11.9, 0.0), ('USC00519397', 'WAIKIKI 717.2, HI US', 21.2716, -157.8168, 3.0, 0.0), ('USC00519523', 'WAIMANALO EXPERIMENTAL FARM, HI US', 21.33556, -157.71139, 19.5, 0.0)]\n"
]
],
[
[
"## Optional Challenge Assignment",
"_____no_output_____"
]
],
[
[
"# Create a query that will calculate the daily normals \n# (i.e. the averages for tmin, tmax, and tavg for all historic data matching a specific month and day)\n\ndef daily_normals(date):\n \"\"\"Daily Normals.\n \n Args:\n date (str): A date string in the format '%m-%d'\n \n Returns:\n A list of tuples containing the daily normals, tmin, tavg, and tmax\n \n \"\"\"\n \n sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)]\n return session.query(*sel).filter(func.strftime(\"%m-%d\", Measurement.date) == date).all()\n \ndaily_normals(\"01-01\")",
"_____no_output_____"
],
[
"# calculate the daily normals for your trip\n# push each tuple of calculations into a list called `normals`\nnormals=[]\n# Set the start and end date of the trip\nTripStartTime= '2016-08-21'\nTripEndTime = '2016-08-30'\n# Stip off the year and save a list of %m-%d strings\nTripStartTime=TripStartTime.replace(\"-\",\"\")\nStartDate=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\3', TripStartTime))\nTripEndTime=TripEndTime.replace(\"-\",\"\")\nEndDate=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\3', TripEndTime))\nTripMonth=re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\2', TripEndTime)\nif len(TripMonth) == 1:\n TripMonth= \"0\" + TripMonth\n# Use the start and end date to create a range of dates\nDates = [f\"{TripMonth}-{num}\" for num in range(StartDate, EndDate)]\n\n# Loop through the list of %m-%d strings and calculate the normals for each date\nfor d in Dates:\n Normal = daily_normals(d)\n normals.extend(Normal)\n \nnormals",
"_____no_output_____"
],
[
"# Load the previous query results into a Pandas DataFrame and add the `trip_dates` range as the `date` index\nTempMin = [x[0] for x in normals]\nTempAvg = [x[1] for x in normals]\nTempMax = [x[2] for x in normals]\nSYear=int(re.sub(r'(\\d{4})(\\d{2})(\\d{2})', r'\\1', TripStartTime))\nTripDatesYear = [f\"{SYear}-{d}\" for d in Dates]\nTripDatesYear\n\ntrip_normals = pd.DataFrame({\"TempMin\":TempMin, \"TempAvg\":TempAvg, \"TempMax\":TempMax, \"date\":TripDatesYear}).set_index(\"date\")\ntrip_normals.head()",
"_____no_output_____"
],
[
"# Plot the daily normals as an area plot with `stacked=False`\ntrip_normals.plot(kind=\"area\", stacked=False)\nplt.legend(loc=\"right\")\nplt.ylabel(\"Temperature (F)\")\nplt.xticks(range(len(trip_normals.index)), trip_normals.index, rotation=\"60\")\nplt.savefig(\"./Output/daily-normals.png\")\nplt.show()",
"_____no_output_____"
],
[
"# Plot the daily normals as an area plot with `stacked=False`\n",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
]
] |
d09d3e271fe4775183730c2a8815dc34e22b3591 | 26,323 | ipynb | Jupyter Notebook | examples/010-2020-03-22-marks-r-scratchpad.ipynb | cs224/pybnl | 16d04c7fa44c905608c41a41f45e9ac3c5db89eb | [
"Apache-2.0"
] | 10 | 2018-07-26T01:08:39.000Z | 2021-12-26T14:42:31.000Z | examples/010-2020-03-22-marks-r-scratchpad.ipynb | cs224/pybnl | 16d04c7fa44c905608c41a41f45e9ac3c5db89eb | [
"Apache-2.0"
] | null | null | null | examples/010-2020-03-22-marks-r-scratchpad.ipynb | cs224/pybnl | 16d04c7fa44c905608c41a41f45e9ac3c5db89eb | [
"Apache-2.0"
] | 4 | 2019-03-19T03:58:35.000Z | 2020-01-23T23:29:38.000Z | 29.118363 | 156 | 0.324925 | [
[
[
"library(bnlearn)",
"\nAttaching package: ‘bnlearn’\n\nThe following object is masked from ‘package:stats’:\n\n sigma\n\n"
],
[
"data(learning.test)",
"_____no_output_____"
],
[
"learning.test",
"_____no_output_____"
],
[
"value = si.hiton.pc(learning.test, test=\"mc-mi\", undirected=FALSE)\nvalue",
"_____no_output_____"
],
[
"value2 = cextend(value)\nvalue2",
"_____no_output_____"
],
[
"value2_arcs = arcs(value2)\nvalue2_arcs",
"_____no_output_____"
],
[
"typeof(value2_arcs)",
"_____no_output_____"
],
[
"mode(value2_arcs)",
"_____no_output_____"
],
[
"class(value2_arcs)",
"_____no_output_____"
],
[
"dimnames(value2_arcs)",
"_____no_output_____"
],
[
"value2_arcs[TRUE, 'from']",
"_____no_output_____"
],
[
"value2_arcs[TRUE, 'to']",
"_____no_output_____"
],
[
"value2_arcs[TRUE]",
"_____no_output_____"
],
[
"value2$arcs",
"_____no_output_____"
],
[
"value2$arcs[1, TRUE]",
"_____no_output_____"
],
[
"value2_arcs[1, TRUE]",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
]
] |
d09d42da2fe665c685ec8a25041e818f956688da | 127,986 | ipynb | Jupyter Notebook | examples/FBDD/fragment search.ipynb | kzfm/pychembldb | 0dde6a05b2dc138e0be7f8f27f57b5c2a42e23c7 | [
"CC0-1.0"
] | 8 | 2020-01-16T00:43:46.000Z | 2021-11-27T18:26:12.000Z | examples/FBDD/fragment search.ipynb | iwatobipen/pychembldb | 0dde6a05b2dc138e0be7f8f27f57b5c2a42e23c7 | [
"CC0-1.0"
] | null | null | null | examples/FBDD/fragment search.ipynb | iwatobipen/pychembldb | 0dde6a05b2dc138e0be7f8f27f57b5c2a42e23c7 | [
"CC0-1.0"
] | 3 | 2020-05-31T05:54:33.000Z | 2021-11-15T04:31:07.000Z | 50.210279 | 340 | 0.643969 | [
[
[
"from pychembldb import *",
"_____no_output_____"
],
[
"for a in chembldb.query(Assay).filter(Assay.description.like(\"% kinase %\")):\n acts = []\n for act in a.activities:\n if act.compound.molecule.property is not None and act.compound.molecule.property.mw_freebase is not None and act.compound.molecule.property.mw_freebase < 250:\n acts.append(act.compound.molecule)\n\n if len(acts) > 20:\n print(a.chembl_id)\n print(a.description, \"\\n\")\n for m in acts:\n print(m.chembl_id, m.structure.canonical_smiles)\n print()\n",
"CHEMBL660806\nInhibitory activity against cyclin-dependent kinase 1-cyclin B in starfish \n\nCHEMBL77264 COc1ccc(cc1)c2[nH]c3nccnc3c2C\nCHEMBL307944 Oc1cccc(c1)c2cc3[nH]ccnc3n2\nCHEMBL305627 o1cccc1c2cc3[nH]ccnc3n2\nCHEMBL77584 Oc1ccccc1c2cc3[nH]ccnc3n2\nCHEMBL80785 Oc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL77019 Clc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL77416 c1ccc(nc1)c2cc3[nH]ccnc3n2\nCHEMBL77066 COc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL305901 c1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL306263 Fc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL74015 c1csc(c1)c2cc3[nH]ccnc3n2\nCHEMBL78264 c1ccc2c(cccc2c1)c3cc4[nH]ccnc4n3\nCHEMBL75756 c1ccc(cc1)c2cc3ccccc3[nH]2\nCHEMBL311935 Cc1c[nH]c2cc(nc2n1)c3ccccc3\nCHEMBL77697 N#Cc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL78165 Cc1c([nH]c2nccnc12)c3ccc(Cl)cc3\nCHEMBL80644 COc1cccc(c1)c2cc3[nH]ccnc3n2\nCHEMBL77458 CN(C)c1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL78232 Cn1c(cc2nccnc12)c3ccccc3\nCHEMBL307943 Cc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL77590 COc1ccccc1c2cc3[nH]ccnc3n2\nCHEMBL77641 Cc1c([nH]c2nccnc12)c3ccc(O)cc3\nCHEMBL307392 c1ccc(cc1)c2cc3[nH]c4ccccc4nc3n2\nCHEMBL76559 c1c[nH]c2cc(nc2n1)c3ccsc3\nCHEMBL79711 COc1ccc(cc1)c2cc3ccc[nH]c3n2\n\nCHEMBL661115\nInhibition of starfish oocyte (Marthasteria glacialis) Cyclin-dependent kinase 1-cyclin B1 \n\nCHEMBL270982 CCC(=C)COc1nc(N)nc2[nH]cnc12\nCHEMBL269881 Nc1nc(OCC2CCCCC2)c3nc[nH]c3n1\nCHEMBL429409 Nc1nc(OCC2=CCCCC2)c3nc[nH]c3n1\nCHEMBL405259 Nc1nc(OCC2=CCCC2)c3nc[nH]c3n1\nCHEMBL271564 CC\\C=C\\CCOc1nc(N)nc2[nH]cnc12\nCHEMBL269872 CC(C)CCOc1nc(N)nc2[nH]cnc12\nCHEMBL115498 Nc1nc(OCC2CCC=CC2)c3[nH]cnc3n1\nCHEMBL114250 CC(C)Oc1[nH]c(N)nc2ncnc12\nCHEMBL270980 CC(C)COc1nc(N)nc2[nH]cnc12\nCHEMBL270483 Nc1nc(OCC#C)c2nc[nH]c2n1\nCHEMBL325053 Nc1nc(OCC=C)c2[nH]cnc2n1\nCHEMBL271593 Nc1nc(OCC2CCCC2)c3nc[nH]c3n1\nCHEMBL407877 CC(C)C(=C)COc1nc(N)nc2[nH]cnc12\nCHEMBL270946 CCCCOc1nc(N)nc2[nH]cnc12\nCHEMBL402453 CCC(C)COc1nc(N)nc2[nH]cnc12\nCHEMBL407876 CC(=C)COc1nc(N)nc2[nH]cnc12\nCHEMBL407874 Nc1nc(OCc2ccccc2)c3nc[nH]c3n1\nCHEMBL272872 Nc1nc(OCCCCC=C)c2nc[nH]c2n1\nCHEMBL272692 CCCOc1nc(N)nc2[nH]cnc12\nCHEMBL406495 CCCCCOc1nc(N)nc2[nH]cnc12\nCHEMBL259244 CCC(C)Oc1nc(N)nc2[nH]cnc12\n\nCHEMBL663599\nInhibitory activity against recombinant mammalian cyclin-dependent kinase 5 (p35 NCK5a) expressed in Escherichia coli \n\nCHEMBL77264 COc1ccc(cc1)c2[nH]c3nccnc3c2C\nCHEMBL305627 o1cccc1c2cc3[nH]ccnc3n2\nCHEMBL77584 Oc1ccccc1c2cc3[nH]ccnc3n2\nCHEMBL80785 Oc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL77416 c1ccc(nc1)c2cc3[nH]ccnc3n2\nCHEMBL77066 COc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL305901 c1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL306263 Fc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL74015 c1csc(c1)c2cc3[nH]ccnc3n2\nCHEMBL78264 c1ccc2c(cccc2c1)c3cc4[nH]ccnc4n3\nCHEMBL311935 Cc1c[nH]c2cc(nc2n1)c3ccccc3\nCHEMBL77697 N#Cc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL78165 Cc1c([nH]c2nccnc12)c3ccc(Cl)cc3\nCHEMBL80644 COc1cccc(c1)c2cc3[nH]ccnc3n2\nCHEMBL77458 CN(C)c1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL78232 Cn1c(cc2nccnc12)c3ccccc3\nCHEMBL307943 Cc1ccc(cc1)c2cc3[nH]ccnc3n2\nCHEMBL77590 COc1ccccc1c2cc3[nH]ccnc3n2\nCHEMBL77641 Cc1c([nH]c2nccnc12)c3ccc(O)cc3\nCHEMBL307392 c1ccc(cc1)c2cc3[nH]c4ccccc4nc3n2\nCHEMBL76559 c1c[nH]c2cc(nc2n1)c3ccsc3\n\nCHEMBL666137\nPercentage inhibition against cyclin dependent kinase 2 (CDK 2) at a concentration of 50 uM \n\nCHEMBL14489 CCCc1ncc(N)c2c3ccccc3[nH]c12\nCHEMBL14475 CCOc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL340807 COc1ccc2c3CCN=C(C)c3[nH]c2c1\nCHEMBL14156 COC(=O)Cc1nccc2c3ccccc3[nH]c12\nCHEMBL280224 COc1ccc2[nH]c3c(nccc3c2c1)C(=O)O\nCHEMBL14402 CC(C)(C)c1[nH]ccc2c3ccccc3nc12\nCHEMBL442140 CCc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL278193 COc1ccc2[nH]c3c(C)nccc3c2c1\nCHEMBL14338 c1ccc(cc1)c2[nH]ccc3c4ccccc4nc23\nCHEMBL280050 Cc1ccc2[nH]c3c(C)nccc3c2c1\nCHEMBL129177 CC1=NCCc2c1[nH]c3cc(O)ccc23\nCHEMBL14787 Cc1nccc2c3cc(F)ccc3[nH]c12\nCHEMBL273547 Cc1[nH]cnc1c2[nH]ccc3c4ccccc4nc23\nCHEMBL14355 Cc1nccc2c3ccc(OCCO)cc3[nH]c12\nCHEMBL14616 Cc1cccc2[nH]c3c(C)nccc3c12\nCHEMBL276792 Cc1nccc2c1[nH]c3cccc(F)c23\nCHEMBL14445 Cc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL278683 CCCc1nccc2c3ccccc3[nH]c12\nCHEMBL274452 c1ccc2c(c1)[nH]c3c(nccc23)c4c[nH]cn4\nCHEMBL275224 c1ccc2c(c1)[nH]c3cnccc23\nCHEMBL269538 COc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL274972 FC(F)(F)c1[nH]ccc2c3ccccc3nc12\nCHEMBL12014 Cc1nccc2c3ccccc3[nH]c12\nCHEMBL14302 CC(C)c1nccc2c3ccccc3[nH]c12\nCHEMBL14500 Cc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL14441 CCCc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL486817 CC1=C2NC3=CC(=O)C=CC3=C2C=CN1\n\nCHEMBL666138\nPercentage inhibition against cyclin dependent kinase 5 (CDK 5) at a concentration of 50 uM \n\nCHEMBL14489 CCCc1ncc(N)c2c3ccccc3[nH]c12\nCHEMBL14475 CCOc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL340807 COc1ccc2c3CCN=C(C)c3[nH]c2c1\nCHEMBL14156 COC(=O)Cc1nccc2c3ccccc3[nH]c12\nCHEMBL280224 COc1ccc2[nH]c3c(nccc3c2c1)C(=O)O\nCHEMBL14402 CC(C)(C)c1[nH]ccc2c3ccccc3nc12\nCHEMBL442140 CCc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL278193 COc1ccc2[nH]c3c(C)nccc3c2c1\nCHEMBL14338 c1ccc(cc1)c2[nH]ccc3c4ccccc4nc23\nCHEMBL280050 Cc1ccc2[nH]c3c(C)nccc3c2c1\nCHEMBL129177 CC1=NCCc2c1[nH]c3cc(O)ccc23\nCHEMBL14787 Cc1nccc2c3cc(F)ccc3[nH]c12\nCHEMBL273547 Cc1[nH]cnc1c2[nH]ccc3c4ccccc4nc23\nCHEMBL14355 Cc1nccc2c3ccc(OCCO)cc3[nH]c12\nCHEMBL14616 Cc1cccc2[nH]c3c(C)nccc3c12\nCHEMBL276792 Cc1nccc2c1[nH]c3cccc(F)c23\nCHEMBL14445 Cc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL278683 CCCc1nccc2c3ccccc3[nH]c12\nCHEMBL274452 c1ccc2c(c1)[nH]c3c(nccc23)c4c[nH]cn4\nCHEMBL275224 c1ccc2c(c1)[nH]c3cnccc23\nCHEMBL269538 COc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL274972 FC(F)(F)c1[nH]ccc2c3ccccc3nc12\nCHEMBL12014 Cc1nccc2c3ccccc3[nH]c12\nCHEMBL14302 CC(C)c1nccc2c3ccccc3[nH]c12\nCHEMBL14500 Cc1ccc2c(c1)[nH]c3c(C)nccc23\nCHEMBL14441 CCCc1nccc2c3ccc(F)cc3[nH]c12\nCHEMBL486817 CC1=C2NC3=CC(=O)C=CC3=C2C=CN1\n\nCHEMBL808038\nThe compound was evaluated for its binding affinity towards phosphotyrosine binding pocket of Src protein tyrosine kinase SH2 domain \n\nCHEMBL26128 OP(=O)(O)Oc1ccccc1\nCHEMBL24231 OP(=O)(O)Oc1ccc(cc1)[N+](=O)[O-]\nCHEMBL287216 Nc1ccc(OP(=O)(O)O)cc1\nCHEMBL286678 OP(=O)(O)Oc1ccccc1C=O\nCHEMBL285665 OC1=CC=CC2=CC=CN(OP(=O)(O)O)C12\nCHEMBL26001 OC(c1ccc2ccccc2c1)P(=O)(O)O\nCHEMBL24060 COP(=O)(O)Oc1ccccc1C=O\nCHEMBL26124 OC(C1CCCCC1)P(=O)(O)O\nCHEMBL284362 CCc1ccc(NC(=O)P(=O)(O)O)cc1\nCHEMBL25431 OP(=O)(O)Oc1ccc2ccccc2c1\nCHEMBL24648 COc1ccccc1C(O)P(=O)(O)O\nCHEMBL279466 OP(=O)(O)Oc1c(C=O)cccc1C=O\nCHEMBL287276 OP(=O)(O)C(=O)NCc1ccccc1\nCHEMBL287306 OP(=O)(O)Oc1cccc(C=O)c1\nCHEMBL24124 COc1ccc(OP(=O)(O)O)cc1\nCHEMBL25186 COc1cccc(C=O)c1OP(=O)(O)O\nCHEMBL24720 OC(c1ccccc1)P(=O)(O)O\nCHEMBL26349 Cc1cccc(C)c1OP(=O)(O)O\nCHEMBL287555 Cc1cccc(OP(=O)(O)O)c1\nCHEMBL26404 OP(=O)(O)Oc1ccccc1Cl\nCHEMBL287275 OC(=O)c1ccccc1OP(=O)(O)O\nCHEMBL26549 Oc1cccc2CCCN(OP(=O)(O)O)c12\nCHEMBL25780 CC(C)(C)c1ccc(OP(=O)(O)O)cc1\nCHEMBL24730 CCCOc1ccc(cc1)C(O)P(=O)(O)O\nCHEMBL24447 OP(=O)(O)C(=O)NCCc1ccccc1\nCHEMBL284997 COC(=O)c1ccc(OP(=O)(O)O)cc1\nCHEMBL26438 CC(C)OP(=O)(O)Oc1ccccc1C=O\nCHEMBL24745 OP(=O)(O)Oc1cccc2ccccc12\nCHEMBL24645 CC(C)c1ccc(cc1)C(O)P(=O)(O)O\nCHEMBL286758 OC(c1cccc2ccccc12)P(=O)(O)O\nCHEMBL24101 COC(=O)c1cccc(OP(=O)(O)O)c1\nCHEMBL24164 Cc1ccc(OP(=O)(O)O)cc1\n\nCHEMBL831280\nInhibition of human Protein kinase C alpha \n\nCHEMBL539424 Cn1cnc2c(ncnc12)c3ccc(F)cc3\nCHEMBL359730 N(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL361227 Fc1ccc(Nc2[nH]cnc3ncnc23)cc1\nCHEMBL366984 O(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL367695 Fc1ccc(Oc2[nH]cnc3ncnc23)cc1\nCHEMBL321749 Cn1cnc2c(Nc3ccccc3)ncnc12\nCHEMBL362629 Cn1cnc2c(Nc3ccc(F)cc3)ncnc12\nCHEMBL175600 S(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL175603 Fc1ccc(Sc2ncnc3[nH]cnc23)cc1\nCHEMBL360302 S(c1ccccc1)c2[nH]cnc3nccc23\nCHEMBL367533 Fc1ccc(Sc2[nH]cnc3nccc23)cc1\nCHEMBL179751 Cn1cnc2c(Sc3ccccc3)ncnc12\nCHEMBL397434 C(Sc1ncnc2[nH]cnc12)c3ccccc3\nCHEMBL175816 c1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL175817 Fc1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL369622 C(c1ccccc1)[n+]2c[nH]c3ncncc23\nCHEMBL179515 Fc1ccc(C[n+]2c[nH]c3ncncc23)cc1\nCHEMBL7944 Clc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL368715 Clc1ncnc2ncn(Cc3ccccc3)c12\nCHEMBL266094 Nc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL426037 Nc1ncnc2c1ncn2Cc3ccc(F)cc3\nCHEMBL180422 Nc1ncnc2ncn(Cc3ccc(F)cc3)c12\nCHEMBL367930 C(c1ccccc1)c2[nH]cnc3ncnc23\nCHEMBL179158 Fc1ccc(Cc2[nH]cnc3ncnc23)cc1\nCHEMBL367470 Fc1ccc(\\C=C\\c2[nH]cnc3ncnc23)cc1\nCHEMBL176186 Cn1cnc2c(\\C=C\\c3ccccc3)ncnc12\nCHEMBL179217 Cn1cnc2c(ncnc12)c3ccccc3\nCHEMBL226345 Nc1[nH]cnc2ncnc12\n\nCHEMBL831286\nInhibition of human cAMP-dependent protein kinase (PKA) \n\nCHEMBL539424 Cn1cnc2c(ncnc12)c3ccc(F)cc3\nCHEMBL359730 N(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL361227 Fc1ccc(Nc2[nH]cnc3ncnc23)cc1\nCHEMBL366984 O(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL367695 Fc1ccc(Oc2[nH]cnc3ncnc23)cc1\nCHEMBL321749 Cn1cnc2c(Nc3ccccc3)ncnc12\nCHEMBL362629 Cn1cnc2c(Nc3ccc(F)cc3)ncnc12\nCHEMBL175600 S(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL175603 Fc1ccc(Sc2ncnc3[nH]cnc23)cc1\nCHEMBL360302 S(c1ccccc1)c2[nH]cnc3nccc23\nCHEMBL367533 Fc1ccc(Sc2[nH]cnc3nccc23)cc1\nCHEMBL179751 Cn1cnc2c(Sc3ccccc3)ncnc12\nCHEMBL397434 C(Sc1ncnc2[nH]cnc12)c3ccccc3\nCHEMBL175816 c1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL175817 Fc1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL369622 C(c1ccccc1)[n+]2c[nH]c3ncncc23\nCHEMBL179515 Fc1ccc(C[n+]2c[nH]c3ncncc23)cc1\nCHEMBL7944 Clc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL368715 Clc1ncnc2ncn(Cc3ccccc3)c12\nCHEMBL266094 Nc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL426037 Nc1ncnc2c1ncn2Cc3ccc(F)cc3\nCHEMBL180422 Nc1ncnc2ncn(Cc3ccc(F)cc3)c12\nCHEMBL367930 C(c1ccccc1)c2[nH]cnc3ncnc23\nCHEMBL179158 Fc1ccc(Cc2[nH]cnc3ncnc23)cc1\nCHEMBL367470 Fc1ccc(\\C=C\\c2[nH]cnc3ncnc23)cc1\nCHEMBL176186 Cn1cnc2c(\\C=C\\c3ccccc3)ncnc12\nCHEMBL179217 Cn1cnc2c(ncnc12)c3ccccc3\nCHEMBL226345 Nc1[nH]cnc2ncnc12\n\nCHEMBL830592\nInhibition of human Protein kinase B alpha at 30 uM \n\nCHEMBL539424 Cn1cnc2c(ncnc12)c3ccc(F)cc3\nCHEMBL359730 N(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL361227 Fc1ccc(Nc2[nH]cnc3ncnc23)cc1\nCHEMBL366984 O(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL367695 Fc1ccc(Oc2[nH]cnc3ncnc23)cc1\nCHEMBL321749 Cn1cnc2c(Nc3ccccc3)ncnc12\nCHEMBL362629 Cn1cnc2c(Nc3ccc(F)cc3)ncnc12\nCHEMBL175600 S(c1ccccc1)c2ncnc3[nH]cnc23\nCHEMBL175603 Fc1ccc(Sc2ncnc3[nH]cnc23)cc1\nCHEMBL360302 S(c1ccccc1)c2[nH]cnc3nccc23\nCHEMBL367533 Fc1ccc(Sc2[nH]cnc3nccc23)cc1\nCHEMBL179751 Cn1cnc2c(Sc3ccccc3)ncnc12\nCHEMBL397434 C(Sc1ncnc2[nH]cnc12)c3ccccc3\nCHEMBL175816 c1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL175817 Fc1ccc(cc1)[n+]2c[nH]c3ncncc23\nCHEMBL369622 C(c1ccccc1)[n+]2c[nH]c3ncncc23\nCHEMBL179515 Fc1ccc(C[n+]2c[nH]c3ncncc23)cc1\nCHEMBL7944 Clc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL368715 Clc1ncnc2ncn(Cc3ccccc3)c12\nCHEMBL266094 Nc1ncnc2c1ncn2Cc3ccccc3\nCHEMBL426037 Nc1ncnc2c1ncn2Cc3ccc(F)cc3\nCHEMBL180422 Nc1ncnc2ncn(Cc3ccc(F)cc3)c12\n"
]
]
] | [
"code"
] | [
[
"code",
"code"
]
] |
d09d4517c33038e9fd4bb156813c9748dce658ca | 22,634 | ipynb | Jupyter Notebook | code/randomForest/bostonRegression.ipynb | Knowledge-Precipitation-Tribe/Machine-Learning | 0523e000b7a823e4592d872888f3066c90bb2f51 | [
"MIT"
] | 1 | 2021-03-30T08:41:40.000Z | 2021-03-30T08:41:40.000Z | code/randomForest/bostonRegression.ipynb | Knowledge-Precipitation-Tribe/Machine-Learning | 0523e000b7a823e4592d872888f3066c90bb2f51 | [
"MIT"
] | null | null | null | code/randomForest/bostonRegression.ipynb | Knowledge-Precipitation-Tribe/Machine-Learning | 0523e000b7a823e4592d872888f3066c90bb2f51 | [
"MIT"
] | 1 | 2021-06-21T02:19:58.000Z | 2021-06-21T02:19:58.000Z | 63.937853 | 12,744 | 0.791022 | [
[
[
"import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nfrom sklearn.tree import DecisionTreeRegressor\nfrom sklearn.ensemble import RandomForestRegressor\nfrom sklearn.datasets import load_boston\n\nfrom sklearn.model_selection import train_test_split, cross_val_score\nfrom sklearn import metrics\nfrom sklearn.impute import SimpleImputer",
"_____no_output_____"
],
[
"boston = load_boston()\nregressor = RandomForestRegressor(n_estimators=100, random_state=0)\ncross_val_score(regressor, boston.data, boston.target, cv=10, scoring=\"neg_mean_squared_error\")",
"_____no_output_____"
],
[
"sorted(metrics.SCORERS.keys())",
"_____no_output_____"
]
],
[
[
"# 使用随即森林填补缺失值",
"_____no_output_____"
]
],
[
[
"dataset = load_boston()\ndataset.data.shape\n#总共506*13=6578个数据\nX_full, y_full = dataset.data, dataset.target\nn_samples = X_full.shape[0]\nn_features = X_full.shape[1]",
"_____no_output_____"
]
],
[
[
"添加缺失值",
"_____no_output_____"
]
],
[
[
"#首先确定我们希望放入的缺失数据的比例,在这里我们假设是50%,那总共就要有3289个数据缺失\nrng = np.random.RandomState(0)\nmissing_rate = 0.5\nn_missing_samples = int(np.floor(n_samples * n_features * missing_rate))\n#np.floor向下取整,返回.0格式的浮点数\n\n#所有数据要随机遍布在数据集的各行各列当中,而一个缺失的数据会需要一个行索引和一个列索引\n#如果能够创造一个数组,包含3289个分布在0~506中间的行索引,和3289个分布在0~13之间的列索引,那我们就可\n#以利用索引来为数据中的任意3289个位置赋空值\n#然后我们用0,均值和随机森林来填写这些缺失值,然后查看回归的结果如何\n\nmissing_features = rng.randint(0,n_features,n_missing_samples)\nmissing_samples = rng.randint(0,n_samples,n_missing_samples)\n\n#missing_samples = rng.choice(dataset.data.shape[0],n_missing_samples,replace=False)\n#我们现在采样了3289个数据,远远超过我们的样本量506,所以我们使用随机抽取的函数randint。但如果我们需要\n#的数据量小于我们的样本量506,那我们可以采用np.random.choice来抽样,choice会随机抽取不重复的随机数,\n#因此可以帮助我们让数据更加分散,确保数据不会集中在一些行中\n\nX_missing = X_full.copy()\ny_missing = y_full.copy()\n\nX_missing[missing_samples,missing_features] = np.nan\nX_missing = pd.DataFrame(X_missing)\n#转换成DataFrame是为了后续方便各种操作,numpy对矩阵的运算速度快到拯救人生,但是在索引等功能上却不如pandas",
"_____no_output_____"
]
],
[
[
"使用0和均值填充",
"_____no_output_____"
]
],
[
[
"#使用均值进行填补\nfrom sklearn.impute import SimpleImputer\nimp_mean = SimpleImputer(missing_values=np.nan, strategy='mean')\nX_missing_mean = imp_mean.fit_transform(X_missing)\n\n#使用0进行填补\nimp_0 = SimpleImputer(missing_values=np.nan, strategy=\"constant\",fill_value=0)\nX_missing_0 = imp_0.fit_transform(X_missing)",
"_____no_output_____"
]
],
[
[
"使用随即森林填充缺失值",
"_____no_output_____"
]
],
[
[
"\"\"\"\n使用随机森林回归填补缺失值\n任何回归都是从特征矩阵中学习,然后求解连续型标签y的过程,之所以能够实现这个过程,是因为回归算法认为,特征\n矩阵和标签之前存在着某种联系。实际上,标签和特征是可以相互转换的,比如说,在一个“用地区,环境,附近学校数\n量”预测“房价”的问题中,我们既可以用“地区”,“环境”,“附近学校数量”的数据来预测“房价”,也可以反过来,\n用“环境”,“附近学校数量”和“房价”来预测“地区”。而回归填补缺失值,正是利用了这种思想。\n\n对于一个有n个特征的数据来说,其中特征T有缺失值,我们就把特征T当作标签,其他的n-1个特征和原本的标签组成新\n的特征矩阵。那对于T来说,它没有缺失的部分,就是我们的Y_test,这部分数据既有标签也有特征,而它缺失的部分,只有特征没有标签,就是我们需要预测的部分。\n\n\n特征T不缺失的值对应的其他n-1个特征 + 本来的标签:X_train\n特征T不缺失的值:Y_train\n\n特征T缺失的值对应的其他n-1个特征 + 本来的标签:X_test\n特征T缺失的值:未知,我们需要预测的Y_test\n\n这种做法,对于某一个特征大量缺失,其他特征却很完整的情况,非常适用。\n\n那如果数据中除了特征T之外,其他特征也有缺失值怎么办?\n答案是遍历所有的特征,从缺失最少的开始进行填补(因为填补缺失最少的特征所需要的准确信息最少)。\n填补一个特征时,先将其他特征的缺失值用0代替,每完成一次回归预测,就将预测值放到原本的特征矩阵中,再继续填\n补下一个特征。每一次填补完毕,有缺失值的特征会减少一个,所以每次循环后,需要用0来填补的特征就越来越少。当\n进行到最后一个特征时(这个特征应该是所有特征中缺失值最多的),已经没有任何的其他特征需要用0来进行填补了,\n而我们已经使用回归为其他特征填补了大量有效信息,可以用来填补缺失最多的特征。\n遍历所有的特征后,数据就完整,不再有缺失值了。\n\"\"\"\n\nX_missing_reg = X_missing.copy()\n# 找出数据集中缺失值最多的从小到大的排序\nsortindex = np.argsort(X_missing_reg.isnull().sum(axis=0)).values\n\nfor i in sortindex:\n #构建我们的新特征矩阵和新标签\n df = X_missing_reg\n fillc = df.iloc[:,i]\n df = pd.concat([df.iloc[:,df.columns != i],pd.DataFrame(y_full)],axis=1)\n #在新特征矩阵中,对含有缺失值的列,进行0的填补\n df_0 =SimpleImputer(missing_values=np.nan,strategy='constant',fill_value=0).fit_transform(df)\n #找出我们的训练集和测试集\n Ytrain = fillc[fillc.notnull()]\n Ytest = fillc[fillc.isnull()]\n Xtrain = df_0[Ytrain.index,:]\n Xtest = df_0[Ytest.index,:]\n #用随机森林回归来填补缺失值\n rfc = RandomForestRegressor(n_estimators=100)\n rfc = rfc.fit(Xtrain, Ytrain)\n Ypredict = rfc.predict(Xtest)\n #将填补好的特征返回到我们的原始的特征矩阵中\n X_missing_reg.loc[X_missing_reg.iloc[:,i].isnull(),i] = Ypredict",
"_____no_output_____"
]
],
[
[
"对填补好的数据进行建模",
"_____no_output_____"
]
],
[
[
"#对所有数据进行建模,取得MSE结果\nX = [X_full,X_missing_mean,X_missing_0,X_missing_reg]\nmse = []\nstd = []\n\nfor x in X:\n estimator = RandomForestRegressor(random_state=0, n_estimators=100)\n scores = cross_val_score(estimator,x,y_full,scoring='neg_mean_squared_error',cv=5).mean()\n mse.append(scores * -1)",
"_____no_output_____"
],
[
"x_labels = ['Full data',\n 'Zero Imputation',\n 'Mean Imputation',\n 'Regressor Imputation']\ncolors = ['r', 'g', 'b', 'orange']\n\nplt.figure(figsize=(12, 6))\nax = plt.subplot(111)\nfor i in np.arange(len(mse)):\n ax.barh(i, mse[i],color=colors[i], alpha=0.6, align='center')\nax.set_title('Imputation Techniques with Boston Data')\nax.set_xlim(left=np.min(mse) * 0.9,\n right=np.max(mse) * 1.1)\nax.set_yticks(np.arange(len(mse)))\nax.set_xlabel('MSE')\nax.set_yticklabels(x_labels)\nplt.show()",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
]
] |
d09d4b7cae82850ad4a78a3e47d71cf911c50eac | 8,087 | ipynb | Jupyter Notebook | aata/normal-sage-exercises.ipynb | johnperry-math/cocalc-examples | 394479e972dc2b74211113bbb43bc1ec4ec9978c | [
"Apache-2.0",
"CC-BY-4.0"
] | 13 | 2017-09-06T23:04:59.000Z | 2021-04-05T11:08:51.000Z | aata/normal-sage-exercises.ipynb | johnperry-math/cocalc-examples | 394479e972dc2b74211113bbb43bc1ec4ec9978c | [
"Apache-2.0",
"CC-BY-4.0"
] | 9 | 2018-02-01T15:58:28.000Z | 2021-07-14T15:18:35.000Z | aata/normal-sage-exercises.ipynb | johnperry-math/cocalc-examples | 394479e972dc2b74211113bbb43bc1ec4ec9978c | [
"Apache-2.0",
"CC-BY-4.0"
] | 10 | 2017-10-26T17:30:03.000Z | 2021-12-11T07:25:28.000Z | 539.133333 | 1,228 | 0.710152 | [
[
[
"%%html\n<link href=\"http://mathbook.pugetsound.edu/beta/mathbook-content.css\" rel=\"stylesheet\" type=\"text/css\" />\n<link href=\"https://aimath.org/mathbook/mathbook-add-on.css\" rel=\"stylesheet\" type=\"text/css\" />\n<style>.subtitle {font-size:medium; display:block}</style>\n<link href=\"https://fonts.googleapis.com/css?family=Open+Sans:400,400italic,600,600italic\" rel=\"stylesheet\" type=\"text/css\" />\n<link href=\"https://fonts.googleapis.com/css?family=Inconsolata:400,700&subset=latin,latin-ext\" rel=\"stylesheet\" type=\"text/css\" /><!-- Hide this cell. -->\n<script>\nvar cell = $(\".container .cell\").eq(0), ia = cell.find(\".input_area\")\nif (cell.find(\".toggle-button\").length == 0) {\nia.after(\n $('<button class=\"toggle-button\">Toggle hidden code</button>').click(\n function (){ ia.toggle() }\n )\n )\nia.hide()\n}\n</script>\n",
"_____no_output_____"
]
],
[
[
"**Important:** to view this notebook properly you will need to execute the cell above, which assumes you have an Internet connection. It should already be selected, or place your cursor anywhere above to select. Then press the \"Run\" button in the menu bar above (the right-pointing arrowhead), or press Shift-Enter on your keyboard.",
"_____no_output_____"
],
[
"$\\newcommand{\\identity}{\\mathrm{id}}\n\\newcommand{\\notdivide}{\\nmid}\n\\newcommand{\\notsubset}{\\not\\subset}\n\\newcommand{\\lcm}{\\operatorname{lcm}}\n\\newcommand{\\gf}{\\operatorname{GF}}\n\\newcommand{\\inn}{\\operatorname{Inn}}\n\\newcommand{\\aut}{\\operatorname{Aut}}\n\\newcommand{\\Hom}{\\operatorname{Hom}}\n\\newcommand{\\cis}{\\operatorname{cis}}\n\\newcommand{\\chr}{\\operatorname{char}}\n\\newcommand{\\Null}{\\operatorname{Null}}\n\\newcommand{\\lt}{<}\n\\newcommand{\\gt}{>}\n\\newcommand{\\amp}{&}\n$",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><h2 class=\"heading hide-type\" alt=\"Exercises 10.5 Sage Exercises\"><span class=\"type\">Section</span><span class=\"codenumber\">10.5</span><span class=\"title\">Sage Exercises</span></h2><a href=\"normal-sage-exercises.ipynb\" class=\"permalink\">¶</a></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-381\"><h6 class=\"heading\"><span class=\"codenumber\">1</span></h6><p id=\"p-1698\">Build every subgroup of the alternating group on 5 symbols, $A_5\\text{,}$ and check that each is not a normal subgroup (except for the two trivial cases). This command might take a couple seconds to run. Compare this with the time needed to run the <code class=\"code-inline tex2jax_ignore\">.is_simple()</code> method and realize that there is a significant amount of theory and cleverness brought to bear in speeding up commands like this. (It is possible that your Sage installation lacks <abbr class=\"acronym\">GAP</abbr>'s “Table of Marks” library and you will be unable to compute the list of subgroups.)</p></article></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-382\"><h6 class=\"heading\"><span class=\"codenumber\">2</span></h6><p id=\"p-1699\">Consider the quotient group of the group of symmetries of an $8$-gon, formed with the cyclic subgroup of order $4$ generated by a quarter-turn. Use the <code class=\"code-inline tex2jax_ignore\">coset_product</code> function to determine the Cayley table for this quotient group. Use the number of each coset, as produced by the <code class=\"code-inline tex2jax_ignore\">.cosets()</code> method as names for the elements of the quotient group. You will need to build the table “by hand” as there is no easy way to have Sage's Cayley table command do this one for you. You can build a table in the Sage Notebook pop-up editor (shift-click on a blue line) or you might read the documentation of the <code class=\"code-inline tex2jax_ignore\">html.table()</code> method.</p></article></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-383\"><h6 class=\"heading\"><span class=\"codenumber\">3</span></h6><p id=\"p-1700\">Consider the cyclic subgroup of order $4$ in the symmetries of an $8$-gon. Verify that the subgroup is normal by first building the raw left and right cosets (without using the <code class=\"code-inline tex2jax_ignore\">.cosets()</code> method) and then checking their equality in Sage, all with a single command that employs sorting with the <code class=\"code-inline tex2jax_ignore\">sorted()</code> command.</p></article></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-384\"><h6 class=\"heading\"><span class=\"codenumber\">4</span></h6><p id=\"p-1701\">Again, use the same cyclic subgroup of order $4$ in the group of symmetries of an $8$-gon. Check that the subgroup is normal by using part (2) of Theorem <a href=\"section-factor-groups.ipynb#theorem-normal-equivalents\" class=\"xref\" alt=\"Theorem 10.3 \" title=\"Theorem 10.3 \">10.3</a>. Construct a one-line command that does the complete check and returns <code class=\"code-inline tex2jax_ignore\">True</code>. Maybe sort the elements of the subgroup <code class=\"code-inline tex2jax_ignore\">S</code> first, then slowly build up the necessary lists, commands, and conditions in steps. Notice that this check does not require ever building the cosets.</p></article></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-385\"><h6 class=\"heading\"><span class=\"codenumber\">5</span></h6><p id=\"p-1702\">Repeat the demonstration from the previous subsection that for the symmetries of a tetrahedron, a cyclic subgroup of order $3$ results in an undefined coset multiplication. Above, the default setting for the <code class=\"code-inline tex2jax_ignore\">.cosets()</code> method builds right cosets — but in this problem, work instead with left cosets. You need to choose two cosets to multiply, and then demonstrate two choices for representatives that lead to different results for the product of the cosets.</p></article></div>",
"_____no_output_____"
],
[
"<div class=\"mathbook-content\"><article class=\"exercise-like\" id=\"exercise-386\"><h6 class=\"heading\"><span class=\"codenumber\">6</span></h6><p id=\"p-1703\">Construct some dihedral groups of order $2n$ (i.e. symmetries of an $n$-gon, $D_{n}$ in the text, <code class=\"code-inline tex2jax_ignore\">DihedralGroup(n)</code> in Sage). Maybe all of them for $3\\leq n \\leq 100\\text{.}$ For each dihedral group, construct a list of the orders of each of the normal subgroups (so use <code class=\"code-inline tex2jax_ignore\">.normal_subgroups()</code>). You may need to wait ten or twenty seconds for this to finish - be patient. Observe enough examples to hypothesize a pattern to your observations, check your hypothesis against each of your examples and then state your hypothesis clearly.</p><p id=\"p-1704\">Can you predict how many normal subgroups there are in the dihedral group $D_{470448}$ without using Sage to build all the normal subgroups? Can you <em class=\"emphasis\">describe</em> all of the normal subgroups of a dihedral group in a way that would let us predict all of the normal subgroups of $D_{470448}$ without using Sage?</p></article></div>",
"_____no_output_____"
]
]
] | [
"code",
"markdown"
] | [
[
"code"
],
[
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown",
"markdown"
]
] |
d09d735b92e28b210af969c8edae18c30ec2c695 | 134,253 | ipynb | Jupyter Notebook | figures/Figure_6_cohort_VE.ipynb | cobeylab/FluAImprinting | c9f6d85df888a71a7f5dce7f0775514f4164b48d | [
"Apache-2.0"
] | 2 | 2020-10-23T06:17:55.000Z | 2021-06-22T02:55:18.000Z | figures/Figure_6_cohort_VE.ipynb | cobeylab/FluAImprinting | c9f6d85df888a71a7f5dce7f0775514f4164b48d | [
"Apache-2.0"
] | null | null | null | figures/Figure_6_cohort_VE.ipynb | cobeylab/FluAImprinting | c9f6d85df888a71a7f5dce7f0775514f4164b48d | [
"Apache-2.0"
] | 1 | 2020-07-14T03:01:06.000Z | 2020-07-14T03:01:06.000Z | 268.506 | 47,108 | 0.897529 | [
[
[
"from matplotlib import pyplot as plt\nimport pandas as pd\nimport seaborn as sns\nfrom matplotlib import rcParams\nimport numpy as np\n%matplotlib inline\n\nrcParams['font.sans-serif'] = 'arial'\npal = sns.xkcd_palette(['dark sky blue', 'light sky blue', 'deep red']).as_hex()\n\nimprinting_df = pd.read_csv('../data/imprinting_function_birth_year.csv')\npop_df = pd.read_csv('../data/demography_by_birth_year.csv')\n\nprofiles = pd.read_csv('../final_results_for_ms/15-100/DAHVcohort_subtype.profile_liks.csv', index_col='param')\n\n\nimprinting_df = imprinting_df[imprinting_df.Season==2018]\npop_df = pop_df[pop_df.Season==2018]\ndef make_pie_scatter(X, Y, r1, r2, ax, colors, size=200, edgecolor='#666666'):\n x = [0] + np.cos(np.linspace(0, 2 * np.pi * r1, 1000)).tolist()\n y = [0] + np.sin(np.linspace(0, 2 * np.pi * r1, 1000)).tolist()\n xy1 = np.column_stack([x, y])\n s1 = np.abs(xy1).max()\n\n x = [0] + np.cos(np.linspace(2 * np.pi * r1, 2 * np.pi * r2, 1000)).tolist()\n y = [0] + np.sin(np.linspace(2 * np.pi * r1, 2 * np.pi * r2, 1000)).tolist()\n xy2 = np.column_stack([x, y])\n s2 = np.abs(xy2).max()\n\n x = [0] + np.cos(np.linspace(2 * np.pi * r2, 2 * np.pi, 1000)).tolist()\n y = [0] + np.sin(np.linspace(2 * np.pi * r2, 2 * np.pi, 1000)).tolist()\n xy3 = np.column_stack([x, y])\n s3 = np.abs(xy3).max()\n\n \n ax.scatter([X], [Y], marker=(xy1),\n s=size, facecolor=colors[0],\n edgecolor=edgecolor)\n ax.scatter([X], [Y], marker=(xy2),\n s=size, facecolor=colors[1],\n edgecolor=edgecolor)\n ax.scatter([X], [Y], marker=(xy3),\n s=size, facecolor=colors[2],\n edgecolor=edgecolor)\n \n\n\ndef get_imprinting_probs(cohort_label):\n min_birth_year, max_birth_year = cohort_label.split('-')\n min_birth_year = int(min_birth_year)\n max_birth_year = int(max_birth_year)\n m = imprinting_df[(imprinting_df.Birth_year >= min_birth_year) &\n (imprinting_df.Birth_year <= max_birth_year)].sort_values('Birth_year')\n p = pop_df[(pop_df.Birth_year >= min_birth_year) &\n (pop_df.Birth_year <= max_birth_year)].sort_values('Birth_year')\n weights = np.array(p.Population / p.sum().Population)\n h1 = sum(m['H1'] * weights)\n h2 = sum(m['H2'] * weights)\n h3 = sum(m['H3'] * weights)\n\n return(h1, h2, h3)\n\nx = []\ny = []\nax0 = plt.subplot(111)\nax0.plot([0, 1], [0, 1], '--', color='#cccccc', zorder=0)\nflip = ['1968-1977']\nfor param, row in profiles.iterrows():\n if type(param) == str:\n if 'h1' in param and 'VE' in param:\n label = param.split('_')[1].replace('.','-')\n if label == '2003-2007':\n label = '2003-2006'\n if label == '1917-1952':\n label = '1918-1952'\n h1, h2, h3 = get_imprinting_probs(label)\n print(h1,h2,h3,label)\n row2 = profiles.loc[param.replace('h1', 'h3'), ]\n if row.mle != 0.5:\n #y.append(row.mle)\n #x.append(row2.mle)\n\n if label in flip:\n ax0.text(row2.mle - 0.02, row.mle -0.03, label, va='center', ha='right', size=9)\n else:\n ax0.text(row2.mle + 0.02, row.mle -0.03, label, va='center', size=9)\n # errorbars\n ax0.hlines(row.mle, row2.prof_min, row2.prof_max, linestyle='-', color='#aaaaaa', zorder=0)\n ax0.vlines(row2.mle, row.prof_min, row.prof_max, linestyle='-', color='#aaaaaa', zorder=0)\n make_pie_scatter(row2.mle, row.mle, h1, h1+h2, ax0, pal)\n#ax0.plot(x, y, 'o', markeredgecolor='purple', color='white')\n#ax0.set_ylim(0, 1.05)\n\nl_h1, = plt.plot([100, 100], [100, 100], 's', color=pal[0], markersize=10, label='H1N1', markeredgecolor='k')\nl_h2, = plt.plot([100, 100], [100, 100], 's', color=pal[1], markersize=10, label='H3N2', markeredgecolor='k')\nl_h3, = plt.plot([100, 100], [100, 100], 's', color=pal[2], markersize=10, label='H3N2', markeredgecolor='k')\nplt.legend((l_h1, l_h2, l_h3), ('H1N1', 'H2N2', 'H3N2'), ncol=3, loc='upper center', bbox_to_anchor=(0.5, -0.2), title='Imprinting subtype')\n\nplt.xticks(np.arange(0, 1.1, 0.1), range(0,110,10))\nplt.yticks(np.arange(0, 1.1, 0.1), range(0,110,10))\n\nax0.set_xlim(-0.05, 1.05)\nax0.set_ylim(-0.01, 1.05)\nax0.set_xlabel('Cohort-specific VE for H3N2 (%)', weight='bold')\nax0.set_ylabel('Cohort-specific VE for H1N1 (%)', weight='bold')\nplt.gcf().set_size_inches(4,4)",
"0.23822795785154927 0.0 0.761039397301948 1998-2002\n0.14567991199338928 0.0 0.8542306303000468 1988-1997\n0.3619136781221029 0.0 0.6380816888964732 1978-1987\n0.18256659020313182 6.29290070516151e-05 0.8173700301788517 1968-1977\n0.12036063535637873 0.7018086316647366 0.1778307207007203 1953-1967\n0.9525354061728994 0.046219535865558194 0.0012450578755783233 1918-1952\n"
],
[
"import glob\nimport pandas as pd\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\nfrom matplotlib import rcParams\nfrom matplotlib.colors import ListedColormap\nimport numpy as np\n%matplotlib inline\n\ndf = pd.read_csv('../final_results_for_ms/15-100/result_summary.csv', index_col='Unnamed: 0')\n\npal = sns.color_palette('colorblind').as_hex()\nflatui = ['white', pal[3], 'darkgreen', 'lightgreen']\nmy_cmap = ListedColormap(sns.color_palette(flatui).as_hex())\n\n\nrcParams['font.sans-serif'] = 'arial'\nsns.set_context('paper')\nfull_model='DAHNV'\n\nfinal_df = pd.DataFrame(columns=['D', 'E', 'Ap', 'vac_cov', 'Nu', 'A', 'N2', 'H_sub','H_group', 'V_constant', 'V_age', 'V_season', 'V_imprinting', 'V_cohort', 'cAIC'])\nrow = 0\n\n\ndf = df.iloc[1:, ]\n\nexclude = ['DAHVage_subtype', 'DAHVcohort_subtype', 'DAHNVageseason_subtype', 'DAHNVageseason_group',\n 'DAHNVcohortseason_subtype', 'DAHNVcohortseason_group', 'DAVage', 'DAVcohort', 'DAVimprinting', 'DAVseason']\n\n\nfor model, r in df.iterrows():\n if model not in exclude:\n if 'Vage' in model:\n V = 'V_age'\n final_df.loc[row, V] = 1\n elif 'Vseason' in model:\n V = 'V_season'\n final_df.loc[row, V] = 1\n elif 'Vimprinting' in model:\n V = 'V_imprinting'\n final_df.loc[row, V] = 1\n elif 'Vcohort' in model:\n V = 'V_cohort'\n final_df.loc[row, V] = 1\n elif 'Vmean' in model:\n V = 'V_constant'\n final_df.loc[row, V] = 1\n\n if 'H' in model:\n if 'subtype' in model:\n final_df.loc[row, 'H_sub'] = 1\n elif 'group' in model:\n final_df.loc[row, 'H_group'] = 1\n\n if 'N' in model:\n\n if r['N2m'] != 0:\n final_df.loc[row, 'N2'] = 0.5\n else:\n final_df.loc[row, 'N2'] = 0.5\n final_df.loc[row, 'A'] = 1\n final_df.loc[row, 'D'] = 0.25\n final_df.loc[row, 'E'] = 0.25\n final_df.loc[row, 'Ap'] = 0.25\n final_df.loc[row, 'vac_cov'] = 0.25\n final_df.loc[row, 'Nu'] = 0.25\n #final_df.loc[row, '']\n final_df.loc[row, 'cAIC'] = r.cAIC\n row += 1\n\nfinal_df = final_df.sort_values('cAIC')\nfinal_df = final_df.fillna(0)\n#final_df['cAIC'] = [np.exp(-0.5 * (c - min(final_df['cAIC']))) for c in final_df['cAIC']]\n#final_df.index = [\"%.4f\" % (c/sum(final_df.cAIC)) for c in final_df['cAIC']]\nfinal_df.index = [\"%.4f\" % (c - min(final_df['cAIC'])) for c in final_df['cAIC']]\nfinal_df = final_df.loc[:, final_df.columns != 'cAIC']\nfinal_df.columns = ['Demography',\n 'Enrollment fraction',\n 'Approachment fraction',\n 'Healthcare-seeking behavior among vaccinated',\n 'Nursing home residency',\n 'Age-specific risk of medically attended influenza A infection',\n 'N2 imprinting',\n 'HA imprinting (subtype)',\n 'HA imprinting (group)',\n 'Vaccine effectiveness (constant)',\n 'Vaccine effectiveness (age-specific)',\n 'Vaccine effectiveness (season-specific)',\n 'Vaccine effectiveness (imprinting-specific)',\n 'Vaccine effectiveness (cohort-specific)']\nsns.heatmap(final_df, cmap=my_cmap, linewidths=1, linecolor='black', cbar=False, yticklabels=1)\nax = plt.gca()\nax.xaxis.tick_top()\nplt.yticks(rotation=0, fontsize=10)\nplt.xticks(rotation=45, ha='left', weight='bold')\nplt.ylabel('Δ cAIC', weight='bold')\nf = plt.gcf()\nf.set_size_inches(5.5, 5.5)\nplt.tight_layout()",
"_____no_output_____"
],
[
"import pandas as pd\nfrom matplotlib import pyplot as plt\nfrom matplotlib import rcParams, patches\nimport seaborn as sns\nimport numpy as np\n%matplotlib inline\n\nH1_cohort_expectations = '../final_results_for_ms/15-100/DAHVcohort_subtype_H1_expectations.csv'\nH1_age_expectations = '../final_results_for_ms/15-100/DAHVage_subtype_H1_expectations.csv'\nH3_cohort_expectations = '../final_results_for_ms/15-100/DAHVcohort_subtype_H3_expectations.csv'\nH3_age_expectations = '../final_results_for_ms/15-100/DAHVage_subtype_H3_expectations.csv'\n\ndef get_labels(age_classes):\n labels = []\n for l in age_classes:\n if l == '65-100':\n labels.append('65+')\n else:\n labels.append(l.replace('v',''))\n return labels\n\ndef season_to_label(season):\n if season == 2009.5:\n label = '2009Pan'\n else:\n label = str(int(season) - 1) + '-' + str(int(season))\n return label\n\nrcParams['font.sans-serif'] = 'arial'\nrcParams['font.size'] = 10\nrcParams['font.weight'] = 'medium'\npal = sns.xkcd_palette(['dark sky blue', 'sky', 'deep red', 'baby pink']).as_hex()\nh3_seasons = [2008, 2011, 2012, 2013, 2015, 2017, 2018]\n\n\nkwargs={'linewidth': 1,\n 'zorder': 10,\n 'color': '#8a8a8a'}\nsubplot = 1\nexcess = []\nresid_co = 0\nresid_ag = 0\nfor season in range(2008, 2019):\n if season not in h3_seasons and season != 2009.5:\n df_cohort = pd.read_csv(H1_cohort_expectations, index_col=0)\n df_age = pd.read_csv(H1_age_expectations, index_col=0)\n df_cohort = df_cohort[df_cohort.vac_status == 'vaccinated']\n df_age = df_age[df_age.vac_status == 'vaccinated']\n \n plt.subplot(4,3,subplot)\n codf = df_cohort[df_cohort.season==season].copy()\n agdf = df_age[df_age.season==season].copy()\n \n \n final_df = pd.merge(codf, agdf, suffixes=['_co', '_ag'], on=['age_group', 'Observed'])\n final_df['Excess_co'] = final_df.Observed - final_df.Prediction_co\n\n \n \n final_df['Excess_ag'] = final_df.Observed - final_df.Prediction_ag\n new_rows = []\n x = []\n y1 = []\n y2 = []\n x1 = []\n x2 = []\n start = -0.2\n for index, row in final_df.iterrows():\n new_rows.append([row.age_group, row.Excess_co, 'Cohort VE'])\n new_rows.append([row.age_group, row.Excess_ag, 'Age VE'])\n x.append(start)\n x.append(start + 0.4)\n \n y2.append(row.ci_high_co - row.Prediction_co)\n y2.append(row.ci_high_ag - row.Prediction_ag)\n y1.append(row.ci_low_co - row.Prediction_co)\n y1.append(row.ci_low_ag - row.Prediction_ag)\n \n x1.append(start-0.1)\n x2.append(start+0.1)\n \n x1.append(start + 0.4 - 0.1)\n x2.append(start + 0.4 + 0.1)\n start += 1\n plotdf = pd.DataFrame(new_rows, columns = ['Age group', 'Excess cases', 'VE type'])\n plt.vlines(x=x, ymin=y1, ymax=y2, **kwargs)\n plt.hlines(y=y1, xmin = x1, xmax=x2, **kwargs)\n plt.hlines(y=y2, xmin = x1, xmax=x2, **kwargs)\n ax = sns.barplot(data=plotdf, x='Age group', y='Excess cases', hue='VE type', palette=pal[0:2],edgecolor='#333333')\n ax.legend_.remove()\n check1 = final_df[(final_df.Observed < final_df.ci_low_ag) | (final_df.Observed > final_df.ci_high_ag)].copy()\n check2 = final_df[(final_df.Observed < final_df.ci_low_co) | (final_df.Observed > final_df.ci_high_co)].copy()\n \n\n elif season != 2009.5:\n df_cohort = pd.read_csv(H3_cohort_expectations, index_col=0)\n df_age = pd.read_csv(H3_age_expectations, index_col=0)\n df_cohort = df_cohort[df_cohort.vac_status == 'vaccinated']\n df_age = df_age[df_age.vac_status == 'vaccinated']\n \n plt.subplot(4,3,subplot)\n codf = df_cohort[df_cohort.season==season].copy()\n agdf = df_age[df_age.season==season].copy()\n\n \n final_df = pd.merge(codf, agdf, suffixes=['_co', '_ag'], on=['age_group', 'Observed'])\n final_df['Excess_co'] = final_df.Observed - final_df.Prediction_co\n \n final_df['Excess_ag'] = final_df.Observed - final_df.Prediction_ag\n\n \n new_rows = []\n x = []\n x1 = []\n x2 = []\n y1 = []\n y2 = []\n start = -0.2\n for index, row in final_df.iterrows():\n new_rows.append([row.age_group, row.Excess_co, 'Cohort VE'])\n new_rows.append([row.age_group, row.Excess_ag, 'Age VE'])\n x.append(start)\n x.append(start + 0.4)\n \n y2.append(row.ci_high_co - row.Prediction_co)\n y2.append(row.ci_high_ag - row.Prediction_ag)\n y1.append(row.ci_low_co - row.Prediction_co)\n y1.append(row.ci_low_ag - row.Prediction_ag)\n \n x1.append(start-0.1)\n x2.append(start+0.1)\n \n x1.append(start + 0.4 - 0.1)\n x2.append(start + 0.4 + 0.1)\n start += 1\n plotdf = pd.DataFrame(new_rows, columns = ['Age group', 'Excess cases', 'VE type'])\n plt.vlines(x=x, ymin=y1, ymax=y2, **kwargs)\n plt.hlines(y=y1, xmin = x1, xmax=x2, **kwargs)\n plt.hlines(y=y2, xmin = x1, xmax=x2, **kwargs)\n ax = sns.barplot(data=plotdf, x='Age group', y='Excess cases', hue='VE type', palette=pal[2:],edgecolor='#333333')\n ax.legend_.remove()\n check1 = final_df[(final_df.Observed < final_df.ci_low_ag) | (final_df.Observed > final_df.ci_high_ag)].copy()\n check2 = final_df[(final_df.Observed < final_df.ci_low_co) | (final_df.Observed > final_df.ci_high_co)].copy()\n \n plt.title(str(season - 1) + '-' + str(season), weight='bold')\n plt.axhline(0, color='black', linewidth=1)\n ticks, labels = plt.xticks()\n if subplot not in [1,4,7,10]:\n plt.ylabel('')\n else:\n plt.ylabel('Exceess cases\\namong vaccinated\\nindividuals', weight='bold')\n if subplot not in [9, 10, 11]:\n plt.xlabel('')\n plt.xticks(ticks, [])\n else:\n plt.xlabel('Age group\\n(years)', weight='bold')\n plt.xticks(ticks, labels, rotation=45, ha='right')\n plt.gcf().align_ylabels()\n subplot += 1\n\nxmin, xmax = plt.xlim()\nplt.gcf().set_size_inches(5.5, 7)\nplt.tight_layout()\nb1, = plt.bar([10], [0], color=pal[0], edgecolor='#333333', label='H1N1 unvaccinated')\nb2, = plt.bar([10], [0], color=pal[1], edgecolor='#333333', label='H1N1 vaccinated')\nb3, = plt.bar([10], [0], color=pal[2], edgecolor='#333333', label='H3N2 unvaccinated')\nb4, = plt.bar([10], [0], color=pal[3], edgecolor='#333333', label='H3N2 vaccinated')\n\nplt.legend((b1, b2,b3,b4),\n ('H1N1 cohort VE model',\n 'H1N1 age VE model',\n 'H3N2 cohort VE model',\n 'H3N2 age VE model'),\n loc='center',\n bbox_to_anchor=(0.5, -1.6),\n ncol=2)\nplt.xlim(xmin, xmax)",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code"
]
] |
d09d7a5b8c87098b674e064d65b745ccceecce5b | 1,204 | ipynb | Jupyter Notebook | Chapter01/unittest_chapter1/.ipynb_checkpoints/exercise 12-checkpoint.ipynb | arifmudi/The-Data-Wrangling-Workshop | c325f6fa1c6daf8dd22e9705df48ce2644217a73 | [
"MIT"
] | 22 | 2020-06-27T04:21:49.000Z | 2022-03-08T04:39:44.000Z | Chapter01/unittest_chapter1/.ipynb_checkpoints/exercise 12-checkpoint.ipynb | arifmudi/The-Data-Wrangling-Workshop | c325f6fa1c6daf8dd22e9705df48ce2644217a73 | [
"MIT"
] | 2 | 2021-02-02T22:49:16.000Z | 2021-06-02T02:09:21.000Z | Chapter01/unittest_chapter1/.ipynb_checkpoints/exercise 12-checkpoint.ipynb | Hubertus444/The-Data-Wrangling-Workshop | ddad20f8676602ac6624e72e802769fcaff45b0f | [
"MIT"
] | 46 | 2020-04-20T13:04:11.000Z | 2022-03-22T05:23:52.000Z | 18.242424 | 43 | 0.496678 | [
[
[
"def test_exercise_12_1(x) -> bool:\n return x == 'Hello World!'",
"_____no_output_____"
],
[
"def test_exercise_12_3(x) -> bool:\n return x == 'o'",
"_____no_output_____"
],
[
"def test_exercise_12_4(x) -> bool:\n return x == '!'",
"_____no_output_____"
],
[
"def test_exercise_12_5(x) -> bool:\n return x == '!'",
"_____no_output_____"
]
]
] | [
"code"
] | [
[
"code",
"code",
"code",
"code"
]
] |
d09d7c3ec96d23cb9aa98726a7ede4c03c60b91f | 153,762 | ipynb | Jupyter Notebook | sklearn/notes/ensemble_gradient_boosting.ipynb | shamik-biswas-rft/CodeSnippets | d27621ab65fd9bd1c5195db028eb5a42a469d420 | [
"MIT"
] | 1 | 2022-03-08T11:28:58.000Z | 2022-03-08T11:28:58.000Z | sklearn/notes/ensemble_gradient_boosting.ipynb | shamik-biswas-rft/CodeSnippets | d27621ab65fd9bd1c5195db028eb5a42a469d420 | [
"MIT"
] | null | null | null | sklearn/notes/ensemble_gradient_boosting.ipynb | shamik-biswas-rft/CodeSnippets | d27621ab65fd9bd1c5195db028eb5a42a469d420 | [
"MIT"
] | 1 | 2022-03-08T10:28:33.000Z | 2022-03-08T10:28:33.000Z | 255.843594 | 39,064 | 0.917307 | [
[
[
"# Gradient-boosting decision tree (GBDT)\n\nIn this notebook, we will present the gradient boosting decision tree\nalgorithm and contrast it with AdaBoost.\n\nGradient-boosting differs from AdaBoost due to the following reason: instead\nof assigning weights to specific samples, GBDT will fit a decision tree on\nthe residuals error (hence the name \"gradient\") of the previous tree.\nTherefore, each new tree in the ensemble predicts the error made by the\nprevious learner instead of predicting the target directly.\n\nIn this section, we will provide some intuition about the way learners are\ncombined to give the final prediction. In this regard, let's go back to our\nregression problem which is more intuitive for demonstrating the underlying\nmachinery.",
"_____no_output_____"
]
],
[
[
"import pandas as pd\nimport numpy as np\n\n# Create a random number generator that will be used to set the randomness\nrng = np.random.RandomState(0)\n\n\ndef generate_data(n_samples=50):\n \"\"\"Generate synthetic dataset. Returns `data_train`, `data_test`,\n `target_train`.\"\"\"\n x_max, x_min = 1.4, -1.4\n len_x = x_max - x_min\n x = rng.rand(n_samples) * len_x - len_x / 2\n noise = rng.randn(n_samples) * 0.3\n y = x ** 3 - 0.5 * x ** 2 + noise\n\n data_train = pd.DataFrame(x, columns=[\"Feature\"])\n data_test = pd.DataFrame(np.linspace(x_max, x_min, num=300),\n columns=[\"Feature\"])\n target_train = pd.Series(y, name=\"Target\")\n\n return data_train, data_test, target_train\n\n\ndata_train, data_test, target_train = generate_data()",
"_____no_output_____"
],
[
"import matplotlib.pyplot as plt\nimport seaborn as sns\n\nsns.scatterplot(x=data_train[\"Feature\"], y=target_train, color=\"black\",\n alpha=0.5)\n_ = plt.title(\"Synthetic regression dataset\")",
"_____no_output_____"
]
],
[
[
"As we previously discussed, boosting will be based on assembling a sequence\nof learners. We will start by creating a decision tree regressor. We will set\nthe depth of the tree so that the resulting learner will underfit the data.",
"_____no_output_____"
]
],
[
[
"from sklearn.tree import DecisionTreeRegressor\n\ntree = DecisionTreeRegressor(max_depth=3, random_state=0)\ntree.fit(data_train, target_train)\n\ntarget_train_predicted = tree.predict(data_train)\ntarget_test_predicted = tree.predict(data_test)",
"_____no_output_____"
]
],
[
[
"Using the term \"test\" here refers to data that was not used for training.\nIt should not be confused with data coming from a train-test split, as it\nwas generated in equally-spaced intervals for the visual evaluation of the\npredictions.",
"_____no_output_____"
]
],
[
[
"# plot the data\nsns.scatterplot(x=data_train[\"Feature\"], y=target_train, color=\"black\",\n alpha=0.5)\n# plot the predictions\nline_predictions = plt.plot(data_test[\"Feature\"], target_test_predicted, \"--\")\n\n# plot the residuals\nfor value, true, predicted in zip(data_train[\"Feature\"],\n target_train,\n target_train_predicted):\n lines_residuals = plt.plot([value, value], [true, predicted], color=\"red\")\n\nplt.legend([line_predictions[0], lines_residuals[0]],\n [\"Fitted tree\", \"Residuals\"])\n_ = plt.title(\"Prediction function together \\nwith errors on the training set\")",
"_____no_output_____"
]
],
[
[
"<div class=\"admonition tip alert alert-warning\">\n<p class=\"first admonition-title\" style=\"font-weight: bold;\">Tip</p>\n<p class=\"last\">In the cell above, we manually edited the legend to get only a single label\nfor all the residual lines.</p>\n</div>\nSince the tree underfits the data, its accuracy is far from perfect on the\ntraining data. We can observe this in the figure by looking at the difference\nbetween the predictions and the ground-truth data. We represent these errors,\ncalled \"Residuals\", by unbroken red lines.\n\nIndeed, our initial tree was not expressive enough to handle the complexity\nof the data, as shown by the residuals. In a gradient-boosting algorithm, the\nidea is to create a second tree which, given the same data `data`, will try\nto predict the residuals instead of the vector `target`. We would therefore\nhave a tree that is able to predict the errors made by the initial tree.\n\nLet's train such a tree.",
"_____no_output_____"
]
],
[
[
"residuals = target_train - target_train_predicted\n\ntree_residuals = DecisionTreeRegressor(max_depth=5, random_state=0)\ntree_residuals.fit(data_train, residuals)\n\ntarget_train_predicted_residuals = tree_residuals.predict(data_train)\ntarget_test_predicted_residuals = tree_residuals.predict(data_test)",
"_____no_output_____"
],
[
"sns.scatterplot(x=data_train[\"Feature\"], y=residuals, color=\"black\", alpha=0.5)\nline_predictions = plt.plot(\n data_test[\"Feature\"], target_test_predicted_residuals, \"--\")\n\n# plot the residuals of the predicted residuals\nfor value, true, predicted in zip(data_train[\"Feature\"],\n residuals,\n target_train_predicted_residuals):\n lines_residuals = plt.plot([value, value], [true, predicted], color=\"red\")\n\nplt.legend([line_predictions[0], lines_residuals[0]],\n [\"Fitted tree\", \"Residuals\"], bbox_to_anchor=(1.05, 0.8),\n loc=\"upper left\")\n_ = plt.title(\"Prediction of the previous residuals\")",
"_____no_output_____"
]
],
[
[
"We see that this new tree only manages to fit some of the residuals. We will\nfocus on a specific sample from the training set (i.e. we know that the\nsample will be well predicted using two successive trees). We will use this\nsample to explain how the predictions of both trees are combined. Let's first\nselect this sample in `data_train`.",
"_____no_output_____"
]
],
[
[
"sample = data_train.iloc[[-2]]\nx_sample = sample['Feature'].iloc[0]\ntarget_true = target_train.iloc[-2]\ntarget_true_residual = residuals.iloc[-2]",
"_____no_output_____"
]
],
[
[
"Let's plot the previous information and highlight our sample of interest.\nLet's start by plotting the original data and the prediction of the first\ndecision tree.",
"_____no_output_____"
]
],
[
[
"# Plot the previous information:\n# * the dataset\n# * the predictions\n# * the residuals\n\nsns.scatterplot(x=data_train[\"Feature\"], y=target_train, color=\"black\",\n alpha=0.5)\nplt.plot(data_test[\"Feature\"], target_test_predicted, \"--\")\nfor value, true, predicted in zip(data_train[\"Feature\"],\n target_train,\n target_train_predicted):\n lines_residuals = plt.plot([value, value], [true, predicted], color=\"red\")\n\n# Highlight the sample of interest\nplt.scatter(sample, target_true, label=\"Sample of interest\",\n color=\"tab:orange\", s=200)\nplt.xlim([-1, 0])\nplt.legend(bbox_to_anchor=(1.05, 0.8), loc=\"upper left\")\n_ = plt.title(\"Tree predictions\")",
"_____no_output_____"
]
],
[
[
"Now, let's plot the residuals information. We will plot the residuals\ncomputed from the first decision tree and show the residual predictions.",
"_____no_output_____"
]
],
[
[
"# Plot the previous information:\n# * the residuals committed by the first tree\n# * the residual predictions\n# * the residuals of the residual predictions\n\nsns.scatterplot(x=data_train[\"Feature\"], y=residuals,\n color=\"black\", alpha=0.5)\nplt.plot(data_test[\"Feature\"], target_test_predicted_residuals, \"--\")\nfor value, true, predicted in zip(data_train[\"Feature\"],\n residuals,\n target_train_predicted_residuals):\n lines_residuals = plt.plot([value, value], [true, predicted], color=\"red\")\n\n# Highlight the sample of interest\nplt.scatter(sample, target_true_residual, label=\"Sample of interest\",\n color=\"tab:orange\", s=200)\nplt.xlim([-1, 0])\nplt.legend()\n_ = plt.title(\"Prediction of the residuals\")",
"_____no_output_____"
]
],
[
[
"For our sample of interest, our initial tree is making an error (small\nresidual). When fitting the second tree, the residual in this case is\nperfectly fitted and predicted. We will quantitatively check this prediction\nusing the fitted tree. First, let's check the prediction of the initial tree\nand compare it with the true value.",
"_____no_output_____"
]
],
[
[
"print(f\"True value to predict for \"\n f\"f(x={x_sample:.3f}) = {target_true:.3f}\")\n\ny_pred_first_tree = tree.predict(sample)[0]\nprint(f\"Prediction of the first decision tree for x={x_sample:.3f}: \"\n f\"y={y_pred_first_tree:.3f}\")\nprint(f\"Error of the tree: {target_true - y_pred_first_tree:.3f}\")",
"True value to predict for f(x=-0.517) = -0.393\nPrediction of the first decision tree for x=-0.517: y=-0.145\nError of the tree: -0.248\n"
]
],
[
[
"As we visually observed, we have a small error. Now, we can use the second\ntree to try to predict this residual.",
"_____no_output_____"
]
],
[
[
"print(f\"Prediction of the residual for x={x_sample:.3f}: \"\n f\"{tree_residuals.predict(sample)[0]:.3f}\")",
"Prediction of the residual for x=-0.517: -0.248\n"
]
],
[
[
"We see that our second tree is capable of predicting the exact residual\n(error) of our first tree. Therefore, we can predict the value of `x` by\nsumming the prediction of all the trees in the ensemble.",
"_____no_output_____"
]
],
[
[
"y_pred_first_and_second_tree = (\n y_pred_first_tree + tree_residuals.predict(sample)[0]\n)\nprint(f\"Prediction of the first and second decision trees combined for \"\n f\"x={x_sample:.3f}: y={y_pred_first_and_second_tree:.3f}\")\nprint(f\"Error of the tree: {target_true - y_pred_first_and_second_tree:.3f}\")",
"Prediction of the first and second decision trees combined for x=-0.517: y=-0.393\nError of the tree: 0.000\n"
]
],
[
[
"We chose a sample for which only two trees were enough to make the perfect\nprediction. However, we saw in the previous plot that two trees were not\nenough to correct the residuals of all samples. Therefore, one needs to\nadd several trees to the ensemble to successfully correct the error\n(i.e. the second tree corrects the first tree's error, while the third tree\ncorrects the second tree's error and so on).\n\nWe will compare the generalization performance of random-forest and gradient\nboosting on the California housing dataset.",
"_____no_output_____"
]
],
[
[
"from sklearn.datasets import fetch_california_housing\nfrom sklearn.model_selection import cross_validate\n\ndata, target = fetch_california_housing(return_X_y=True, as_frame=True)\ntarget *= 100 # rescale the target in k$",
"_____no_output_____"
],
[
"from sklearn.ensemble import GradientBoostingRegressor\n\ngradient_boosting = GradientBoostingRegressor(n_estimators=200)\ncv_results_gbdt = cross_validate(\n gradient_boosting, data, target, scoring=\"neg_mean_absolute_error\",\n n_jobs=2,\n)",
"_____no_output_____"
],
[
"print(\"Gradient Boosting Decision Tree\")\nprint(f\"Mean absolute error via cross-validation: \"\n f\"{-cv_results_gbdt['test_score'].mean():.3f} +/- \"\n f\"{cv_results_gbdt['test_score'].std():.3f} k$\")\nprint(f\"Average fit time: \"\n f\"{cv_results_gbdt['fit_time'].mean():.3f} seconds\")\nprint(f\"Average score time: \"\n f\"{cv_results_gbdt['score_time'].mean():.3f} seconds\")",
"_____no_output_____"
],
[
"from sklearn.ensemble import RandomForestRegressor\n\nrandom_forest = RandomForestRegressor(n_estimators=200, n_jobs=2)\ncv_results_rf = cross_validate(\n random_forest, data, target, scoring=\"neg_mean_absolute_error\",\n n_jobs=2,\n)",
"_____no_output_____"
],
[
"print(\"Random Forest\")\nprint(f\"Mean absolute error via cross-validation: \"\n f\"{-cv_results_rf['test_score'].mean():.3f} +/- \"\n f\"{cv_results_rf['test_score'].std():.3f} k$\")\nprint(f\"Average fit time: \"\n f\"{cv_results_rf['fit_time'].mean():.3f} seconds\")\nprint(f\"Average score time: \"\n f\"{cv_results_rf['score_time'].mean():.3f} seconds\")",
"_____no_output_____"
]
],
[
[
"In term of computation performance, the forest can be parallelized and will\nbenefit from using multiple cores of the CPU. In terms of scoring\nperformance, both algorithms lead to very close results.\n\nHowever, we see that the gradient boosting is a very fast algorithm to\npredict compared to random forest. This is due to the fact that gradient\nboosting uses shallow trees. We will go into details in the next notebook\nabout the hyperparameters to consider when optimizing ensemble methods.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code",
"code",
"code",
"code"
],
[
"markdown"
]
] |
d09d904c6a4315d7add8f45d32d22a2fa51af933 | 91,360 | ipynb | Jupyter Notebook | White House/White House.ipynb | frankbearzou/Data-analysis | 9402d0db0fbdef9c742b49963b32742306582aec | [
"MIT"
] | null | null | null | White House/White House.ipynb | frankbearzou/Data-analysis | 9402d0db0fbdef9c742b49963b32742306582aec | [
"MIT"
] | null | null | null | White House/White House.ipynb | frankbearzou/Data-analysis | 9402d0db0fbdef9c742b49963b32742306582aec | [
"MIT"
] | null | null | null | 58.489117 | 29,048 | 0.634019 | [
[
[
"%ls",
"\u001b[0m\u001b[01;32m2015_white_house.csv\u001b[0m* \u001b[01;32mBasics.ipynb\u001b[0m*\r\n"
],
[
"import numpy as np\nimport pandas as pd\n%matplotlib inline\nimport matplotlib.pyplot as plt",
"_____no_output_____"
],
[
"white_house = pd.read_csv(\"2015_white_house.csv\")",
"_____no_output_____"
],
[
"white_house.head(5)",
"_____no_output_____"
],
[
"white_house.shape",
"_____no_output_____"
],
[
"white_house.iloc[0]",
"_____no_output_____"
],
[
"white_house",
"_____no_output_____"
],
[
"plt.hist(white_house[\"Salary\"])\nplt.show()",
"_____no_output_____"
]
],
[
[
"Untill now, we overview the functionality that Jupyter notebook provides for us.",
"_____no_output_____"
],
[
"## How does length of employee titles correlate to salary?",
"_____no_output_____"
]
],
[
[
"position_title = white_house[\"Position Title\"]\ntitle_length = position_title.apply(len)\nsalary = white_house[\"Salary\"]",
"_____no_output_____"
],
[
"from scipy.stats.stats import pearsonr",
"_____no_output_____"
],
[
"pearsonr(title_length, salary)",
"_____no_output_____"
],
[
"plt.scatter(title_length, salary)\nplt.xlabel(\"title length\")\nplt.ylabel(\"salary\")\nplt.title(\"Title length - Salary Scatter Plot\")\nplt.show()",
"_____no_output_____"
]
],
[
[
"## How much does the White House pay in total salary?",
"_____no_output_____"
]
],
[
[
"white_house[\"Salary\"].sum()",
"_____no_output_____"
]
],
[
[
"## Who are the highest and lowest paid staffers?",
"_____no_output_____"
]
],
[
[
"max_salary = white_house[\"Salary\"].max()\n\nmax_salary_column = white_house[\"Salary\"] == max_salary\n\nwhite_house.loc[max_salary_column].reset_index(drop = True)",
"_____no_output_____"
],
[
"min_salary = white_house[\"Salary\"].min()\n\nmin_salary_column = white_house[\"Salary\"] == min_salary\n\nwhite_house.loc[min_salary_column].reset_index(drop = True)",
"_____no_output_____"
]
],
[
[
"## What words are the most common in titles?",
"_____no_output_____"
]
],
[
[
"words = {}\nfor title in position_title:\n title_list = title.split()\n for word in title_list:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1",
"_____no_output_____"
],
[
"import operator\nsorted_words = sorted(words.items(), key=operator.itemgetter(1), reverse = True)",
"_____no_output_____"
],
[
"sorted_words",
"_____no_output_____"
]
]
] | [
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code"
] | [
[
"code",
"code",
"code",
"code",
"code",
"code",
"code",
"code"
],
[
"markdown",
"markdown"
],
[
"code",
"code",
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
]
] |
d09d98fc41a5e8019681231254da89f90613c681 | 4,139 | ipynb | Jupyter Notebook | docs/examples/parallel.ipynb | zmoon/xmovie | 023abcb3c14c7c21c90d665c41892f271dc8b4cd | [
"MIT"
] | null | null | null | docs/examples/parallel.ipynb | zmoon/xmovie | 023abcb3c14c7c21c90d665c41892f271dc8b4cd | [
"MIT"
] | null | null | null | docs/examples/parallel.ipynb | zmoon/xmovie | 023abcb3c14c7c21c90d665c41892f271dc8b4cd | [
"MIT"
] | null | null | null | 23.787356 | 182 | 0.557139 | [
[
[
"# Saving frames in parallel",
"_____no_output_____"
]
],
[
[
"import xarray as xr\nfrom xmovie import Movie\n\nds = xr.tutorial.open_dataset('air_temperature').isel(time=slice(0,200))",
"_____no_output_____"
],
[
"# Creating the movie object\nmov = Movie(ds.air, vmin=230, vmax=310)",
"_____no_output_____"
]
],
[
[
"The creation of a movie can take quite long for datasets with many timesteps, creating many frames in a loop.",
"_____no_output_____"
]
],
[
[
"%%time\nmov.save('movie.mov', overwrite_existing=True)",
"Movie created at movie.mov\nCPU times: user 34.7 s, sys: 1.02 s, total: 35.7 s\nWall time: 59.4 s\n"
]
],
[
[
"You can speed up the frame creation by activating the `parallel` option. This will save the frames using dask.\n\nFor this to work you need to chunk the input dataarray with a single step along the dimension that represent your frames (`framedim`).",
"_____no_output_____"
]
],
[
[
"mov_parallel = Movie(ds.air.chunk({'time':1}), vmin=230, vmax=310)",
"_____no_output_____"
],
[
"%%time\nmov_parallel.save(\n 'movie_parallel.mov',\n parallel=True,\n overwrite_existing=True,\n)",
"Movie created at movie_parallel.mov\nCPU times: user 38.8 s, sys: 1.46 s, total: 40.3 s\nWall time: 48.3 s\n"
]
],
[
[
"You can pass arguments to the dask `.compute()` call with `parallel_compute_kwargs` to tune for your particular setup.",
"_____no_output_____"
]
],
[
[
"%%time\nmov_parallel.save(\n 'movie_parallel_modified.mov',\n parallel=True,\n overwrite_existing=True,\n parallel_compute_kwargs=dict(scheduler=\"processes\", num_workers=8)\n)",
"Movie created at movie_parallel.mov\nCPU times: user 4.84 s, sys: 249 ms, total: 5.09 s\nWall time: 33.6 s\n"
]
],
[
[
"Thats not bad, a 50% time saving (keeping in mind that the time needed for the ffmpeg call is included). We expect speedups to be even bigger when using higher resolution data.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
],
[
"code"
],
[
"markdown"
]
] |
d09da4e0ff157995e372da5347f6ad91b6038657 | 7,866 | ipynb | Jupyter Notebook | _doc/notebooks/td2a/td2a_correction_session_5.ipynb | mohamedelkansouli/Ensae_py | 8bc867bd2081c259c793fadfa8be5dcc7bd1400b | [
"MIT"
] | null | null | null | _doc/notebooks/td2a/td2a_correction_session_5.ipynb | mohamedelkansouli/Ensae_py | 8bc867bd2081c259c793fadfa8be5dcc7bd1400b | [
"MIT"
] | null | null | null | _doc/notebooks/td2a/td2a_correction_session_5.ipynb | mohamedelkansouli/Ensae_py | 8bc867bd2081c259c793fadfa8be5dcc7bd1400b | [
"MIT"
] | null | null | null | 36.929577 | 181 | 0.364989 | [
[
[
"# 2A.i - Modèle relationnel, analyse d'incidents dans le transport aérien - correction\n\nManipulation de données avec les dataframes, jointures. Correction inachevée...",
"_____no_output_____"
]
],
[
[
"from jyquickhelper import add_notebook_menu\nadd_notebook_menu()",
"_____no_output_____"
]
],
[
[
"### Données\n\nLe code suivant télécharge les données nécessaires [tp_2a_5_compagnies.zip](http://www.xavierdupre.fr/enseignement/complements/tp_2a_5_compagnies.zip).",
"_____no_output_____"
]
],
[
[
"import pyensae\npyensae.download_data(\"tp_2a_5_compagnies.zip\")",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code"
] | [
[
"markdown"
],
[
"code"
],
[
"markdown"
],
[
"code"
]
] |
d09dacebdaf2fc5c4886e0aa383095df839c61c4 | 62,613 | ipynb | Jupyter Notebook | Chapter01.ipynb | lnsongxf/Applied_Computational_Economics_and_Finance | f14661bfbfa711d49539bda290d4be5a25087185 | [
"MIT"
] | 19 | 2018-05-09T08:17:44.000Z | 2021-12-26T07:02:17.000Z | Chapter01.ipynb | lnsongxf/Applied_Computational_Economics_and_Finance | f14661bfbfa711d49539bda290d4be5a25087185 | [
"MIT"
] | null | null | null | Chapter01.ipynb | lnsongxf/Applied_Computational_Economics_and_Finance | f14661bfbfa711d49539bda290d4be5a25087185 | [
"MIT"
] | 11 | 2017-12-15T13:39:35.000Z | 2021-05-15T15:06:02.000Z | 52.221018 | 16,701 | 0.606919 | [
[
[
"# Introduction\n\n\n## 1.1 Some Apparently Simple Questions\n\n## 1.2 An Alternative Analytic Framework\n\nSolved to a high degree of accuracy using numerical method",
"_____no_output_____"
]
],
[
[
"!pip install --user quantecon",
"\u001b[33mThe directory '/home/jovyan/.cache/pip/http' or its parent directory is not owned by the current user and the cache has been disabled. Please check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.\u001b[0m\r\n\u001b[33mThe directory '/home/jovyan/.cache/pip' or its parent directory is not owned by the current user and caching wheels has been disabled. check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.\u001b[0m\r\nRequirement already satisfied: quantecon in /home/jonduan/.local/lib/python3.5/site-packages\r\n"
],
[
"\n\n",
"_____no_output_____"
],
[
"import numpy as np\nimport numpy.linalg as la\nfrom numba import *\nfrom __future__ import division\n#from quantecon.quad import qnwnorm\n",
"_____no_output_____"
]
],
[
[
"\nSuppose now that the economist is presented with a demand function\n\n$$q = 0.5* p^{-0.2} + 0.5*p^{-0.5}$$\n\none that is the sum a domestic demand term and an export demand term.\n\nsuppose that the economist is asked to find the price that clears the\nmarket of, say, a quantity of 2 units.\n\n\n",
"_____no_output_____"
]
],
[
[
"#%pylab inline\n%pylab notebook\n# pylab Populating the interactive namespace from numpy and matplotlib\n# numpy for numerical computation\n# matplotlib for ploting",
"Populating the interactive namespace from numpy and matplotlib\n"
],
[
"#http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.plot\np = np.linspace(0.01,0.5, 100)\nq = .5 * p **-.2 + .5 * p ** -.5 - 2\nplot(q,p)\nx1,x2,y1,y2 = 2, 2, 0, 0.5\nplot((x1, x2), (y1, y2), 'k-')",
"_____no_output_____"
],
[
"# example 1.2\n\np = 0.25\nfor i in range(100):\n deltap = (.5 * p **-.2 + .5 * p ** -.5 - 2)/(.1 * p **-1.2 + .25 * p **-1.5)\n p = p + deltap\n if abs(deltap) < 1.e-8: # accuracy\n break\n#https://stackoverflow.com/questions/20457038/python-how-to-round-down-to-2-decimals\nprint('The market clean price is {:0.2f} '.format(p))\n",
"The market clean price is 0.15 \n"
]
],
[
[
"Consider now the rational expectations commodity market model with government\nintervention. The source of difficulty in solving this problem is the need to\nevaluate the truncated expectation of a continuous distribution.\n\nThe economist would replace the original normal yield distribution\nwith a discrete distribution that has identical lower moments, say one that assumes\nvalues y1; y2; ... ; yn with probabilities w1; w2; ...; wn.",
"_____no_output_____"
]
],
[
[
"# https://github.com/QuantEcon/QuantEcon.py/blob/master/quantecon/quad.py\ndef qnwnorm(n, mu=None, sig2=None, usesqrtm=False):\n \"\"\"\n Computes nodes and weights for multivariate normal distribution\n Parameters\n ----------\n n : int or array_like(float)\n A length-d iterable of the number of nodes in each dimension\n mu : scalar or array_like(float), optional(default=zeros(d))\n The means of each dimension of the random variable. If a scalar\n is given, that constant is repeated d times, where d is the\n number of dimensions\n sig2 : array_like(float), optional(default=eye(d))\n A d x d array representing the variance-covariance matrix of the\n multivariate normal distribution.\n Returns\n -------\n nodes : np.ndarray(dtype=float)\n Quadrature nodes\n weights : np.ndarray(dtype=float)\n Weights for quadrature nodes\n Notes\n -----\n Based of original function ``qnwnorm`` in CompEcon toolbox by\n Miranda and Fackler\n References\n ----------\n Miranda, Mario J, and Paul L Fackler. Applied Computational\n Economics and Finance, MIT Press, 2002.\n \"\"\"\n n = np.asarray(n)\n d = n.size\n\n if mu is None:\n mu = np.zeros((d,1))\n else:\n mu = np.asarray(mu).reshape(-1, 1)\n\n if sig2 is None:\n sig2 = np.eye(d)\n else:\n sig2 = np.asarray(sig2).reshape(d, d)\n\n if all([x.size == 1 for x in [n, mu, sig2]]):\n nodes, weights = _qnwnorm1(n)\n else:\n nodes = []\n weights = []\n\n for i in range(d):\n _1d = _qnwnorm1(n[i])\n nodes.append(_1d[0])\n weights.append(_1d[1])\n\n nodes = gridmake(*nodes)\n weights = ckron(*weights[::-1])\n\n if usesqrtm:\n new_sig2 = la.sqrtm(sig2)\n else: # cholesky\n new_sig2 = la.cholesky(sig2)\n\n if d > 1:\n nodes = new_sig2.dot(nodes) + mu # Broadcast ok\n else: # nodes.dot(sig) will not be aligned in scalar case.\n nodes = nodes * new_sig2 + mu\n\n return nodes.squeeze(), weights\n\n\n\ndef _qnwnorm1(n):\n \"\"\"\n Compute nodes and weights for quadrature of univariate standard\n normal distribution\n Parameters\n ----------\n n : int\n The number of nodes\n Returns\n -------\n nodes : np.ndarray(dtype=float)\n An n element array of nodes\n nodes : np.ndarray(dtype=float)\n An n element array of weights\n Notes\n -----\n Based of original function ``qnwnorm1`` in CompEcon toolbox by\n Miranda and Fackler\n References\n ----------\n Miranda, Mario J, and Paul L Fackler. Applied Computational\n Economics and Finance, MIT Press, 2002.\n \"\"\"\n maxit = 100\n pim4 = 1 / np.pi**(0.25)\n m = np.fix((n + 1) / 2).astype(int)\n nodes = np.zeros(n)\n weights = np.zeros(n)\n\n for i in range(m):\n if i == 0:\n z = np.sqrt(2*n+1) - 1.85575 * ((2 * n + 1)**(-1 / 6.1))\n elif i == 1:\n z = z - 1.14 * (n ** 0.426) / z\n elif i == 2:\n z = 1.86 * z + 0.86 * nodes[0]\n elif i == 3:\n z = 1.91 * z + 0.91 * nodes[1]\n else:\n z = 2 * z + nodes[i-2]\n\n its = 0\n\n while its < maxit:\n its += 1\n p1 = pim4\n p2 = 0\n for j in range(1, n+1):\n p3 = p2\n p2 = p1\n p1 = z * math.sqrt(2.0/j) * p2 - math.sqrt((j - 1.0) / j) * p3\n\n pp = math.sqrt(2 * n) * p2\n z1 = z\n z = z1 - p1/pp\n if abs(z - z1) < 1e-14:\n break\n\n if its == maxit:\n raise ValueError(\"Failed to converge in _qnwnorm1\")\n\n nodes[n - 1 - i] = z\n nodes[i] = -z\n weights[i] = 2 / (pp*pp)\n weights[n - 1 - i] = weights[i]\n\n weights /= math.sqrt(math.pi)\n nodes = nodes * math.sqrt(2.0)\n\n return nodes, weights\n",
"_____no_output_____"
],
[
"# example 1.2\n\ny, w = qnwnorm(10, 1, 0.1)\na = 1\nfor it in range(100):\n aold = a\n p = 3 - 2 * a * y\n f = w.dot(np.maximum(p, 1))\n a = 0.5 + 0.5 * f\n if abs(a - aold) < 1.e-8:\n break\n\nprint('The rational expectations equilibrium acreage is {:0.2f} '.format(a) )\nprint('The expected market price is {:0.2f} '.format(np.dot(w, p)) )\nprint('The expected effective producer price is {:0.2f} '.format(f) )\n\n",
"The rational expectations equilibrium acreage is 1.10 \nThe expected market price is 0.81 \nThe expected effective producer price is 1.19 \n"
]
],
[
[
"The economist has combined Gaussian quadrature techniques and fixed-point function iteration methods to solve the problem.",
"_____no_output_____"
]
]
] | [
"markdown",
"code",
"markdown",
"code",
"markdown",
"code",
"markdown"
] | [
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code",
"code"
],
[
"markdown"
],
[
"code",
"code"
],
[
"markdown"
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.