neural-chatbot / train.py
ierhon's picture
Update train.py
2d971fe
raw
history blame
1.62 kB
import numpy as np
import json
from keras.optimizers import Adam, SGD
from keras.models import Sequential
from keras.layers import Embedding, Dense, Dropout, Flatten, PReLU
from keras.preprocessing.text import Tokenizer
from keras_self_attention import SeqSelfAttention, SeqWeightedAttention
with open("dataset.json", "r") as f:
dset = json.load(f)
dset_size = len(dset)
tokenizer = Tokenizer()
tokenizer.fit_on_texts(list(dset.keys()))
emb_size = 128 # how big are the word vectors in the input (how much information can be fit into one word)
vocab_size = len(tokenizer.get_vocabulary())
inp_len = 10 # limit of the input length, after 10 words the
model = Sequential()
model.add(Embedding(input_dim=vocab_size, output_dim=emb_size, input_length=inp_len))
model.add(SeqSelfAttention()) # an ATTENTION LAYER makes the model LEARN the MAIN INFORMATION in the text, AND NOT the TEXT ITSELF
model.add(Flatten()) # SelfAttention and the embedding layer outputs a 2D array, it's a list of words with a list of numbers for each word
model.add(Dense(1024, activation="relu"))
model.add(Dropout(0.5)) # dropout makes ___ task harder __ removing ____ information, 0.5 means delete 50% (it resets neurons to 0 so the model will truly focus on what's important, and not learn on some data that's there by accident)
model.add(Dense(dset_size, activation="linear")) # TBH it doesn't matter that much what activation function to use, just linear does nothing at all to the output, that might be something like softmax but i'll test that later
model.save("chatbot.keras") # It's obvious what it does, saves the model to a file