import tensorflow as tf from tensorflow.keras.preprocessing.text import Tokenizer from tensorflow.keras.preprocessing.sequence import pad_sequences import numpy as np corpus = [ 'This is a simple example', 'Language modeling is interesting', 'Neural networks are powerful', 'Feed-forward networks are common in NLP' ] tokenizer = Tokenizer() tokenizer.fit_on_texts(corpus) total_words = len(tokenizer.word_index) + 1 input_sequences = [] for line in corpus: token_list = tokenizer.texts_to_sequences([line])[0] for i in range(1, len(token_list)): n_gram_sequence = token_list[:i+1] input_sequences.append(n_gram_sequence) max_sequence_length = max([len(x) for x in input_sequences]) input_sequences = pad_sequences(input_sequences, maxlen=max_sequence_length, padding='pre') X, y = input_sequences[:, :-1], input_sequences[:, -1] y = tf.keras.utils.to_categorical(y, num_classes=total_words) model = tf.keras.Sequential([ tf.keras.layers.Embedding(total_words, 50, input_length=max_sequence_length-1), tf.keras.layers.Flatten(), tf.keras.layers.Dense(100, activation='relu'), tf.keras.layers.Dense(total_words, activation='softmax') ]) model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) model.fit(X, y, epochs=100, verbose=1) seed_text = "Neural networks" next_words = 5 for _ in range(next_words): token_list = tokenizer.texts_to_sequences([seed_text])[0] token_list = pad_sequences([token_list], maxlen=max_sequence_length-1, padding='pre') predicted = np.argmax(model.predict(token_list), axis=-1) output_word = "" for word, index in tokenizer.word_index.items(): if index == predicted: output_word = word break seed_text += " " + output_word print(seed_text)
Standard input is empty
Epoch 1/100 16/16 [==============================] - 0s 10ms/sample - loss: 2.8845 - acc: 0.0000e+00 Epoch 2/100 16/16 [==============================] - 0s 101us/sample - loss: 2.8632 - acc: 0.0000e+00 Epoch 3/100 16/16 [==============================] - 0s 80us/sample - loss: 2.8431 - acc: 0.1250 Epoch 4/100 16/16 [==============================] - 0s 93us/sample - loss: 2.8238 - acc: 0.3125 Epoch 5/100 16/16 [==============================] - 0s 93us/sample - loss: 2.8044 - acc: 0.4375 Epoch 6/100 16/16 [==============================] - 0s 89us/sample - loss: 2.7849 - acc: 0.5000 Epoch 7/100 16/16 [==============================] - 0s 151us/sample - loss: 2.7653 - acc: 0.5625 Epoch 8/100 16/16 [==============================] - 0s 133us/sample - loss: 2.7452 - acc: 0.6250 Epoch 9/100 16/16 [==============================] - 0s 128us/sample - loss: 2.7246 - acc: 0.6875 Epoch 10/100 16/16 [==============================] - 0s 126us/sample - loss: 2.7032 - acc: 0.6875 Epoch 11/100 16/16 [==============================] - 0s 124us/sample - loss: 2.6809 - acc: 0.6875 Epoch 12/100 16/16 [==============================] - 0s 124us/sample - loss: 2.6577 - acc: 0.6875 Epoch 13/100 16/16 [==============================] - 0s 88us/sample - loss: 2.6332 - acc: 0.6875 Epoch 14/100 16/16 [==============================] - 0s 82us/sample - loss: 2.6074 - acc: 0.6875 Epoch 15/100 16/16 [==============================] - 0s 80us/sample - loss: 2.5806 - acc: 0.6875 Epoch 16/100 16/16 [==============================] - 0s 78us/sample - loss: 2.5525 - acc: 0.6875 Epoch 17/100 16/16 [==============================] - 0s 76us/sample - loss: 2.5229 - acc: 0.6875 Epoch 18/100 16/16 [==============================] - 0s 81us/sample - loss: 2.4918 - acc: 0.6875 Epoch 19/100 16/16 [==============================] - 0s 80us/sample - loss: 2.4592 - acc: 0.6875 Epoch 20/100 16/16 [==============================] - 0s 84us/sample - loss: 2.4253 - acc: 0.6875 Epoch 21/100 16/16 [==============================] - 0s 81us/sample - loss: 2.3899 - acc: 0.6875 Epoch 22/100 16/16 [==============================] - 0s 88us/sample - loss: 2.3530 - acc: 0.6875 Epoch 23/100 16/16 [==============================] - 0s 98us/sample - loss: 2.3147 - acc: 0.6875 Epoch 24/100 16/16 [==============================] - 0s 102us/sample - loss: 2.2750 - acc: 0.6875 Epoch 25/100 16/16 [==============================] - 0s 104us/sample - loss: 2.2338 - acc: 0.6875 Epoch 26/100 16/16 [==============================] - 0s 107us/sample - loss: 2.1913 - acc: 0.6875 Epoch 27/100 16/16 [==============================] - 0s 105us/sample - loss: 2.1474 - acc: 0.6875 Epoch 28/100 16/16 [==============================] - 0s 93us/sample - loss: 2.1020 - acc: 0.6875 Epoch 29/100 16/16 [==============================] - 0s 92us/sample - loss: 2.0555 - acc: 0.7500 Epoch 30/100 16/16 [==============================] - 0s 96us/sample - loss: 2.0080 - acc: 0.7500 Epoch 31/100 16/16 [==============================] - 0s 91us/sample - loss: 1.9593 - acc: 0.7500 Epoch 32/100 16/16 [==============================] - 0s 90us/sample - loss: 1.9097 - acc: 0.7500 Epoch 33/100 16/16 [==============================] - 0s 88us/sample - loss: 1.8590 - acc: 0.7500 Epoch 34/100 16/16 [==============================] - 0s 87us/sample - loss: 1.8076 - acc: 0.7500 Epoch 35/100 16/16 [==============================] - 0s 88us/sample - loss: 1.7554 - acc: 0.7500 Epoch 36/100 16/16 [==============================] - 0s 87us/sample - loss: 1.7027 - acc: 0.7500 Epoch 37/100 16/16 [==============================] - 0s 88us/sample - loss: 1.6495 - acc: 0.7500 Epoch 38/100 16/16 [==============================] - 0s 83us/sample - loss: 1.5962 - acc: 0.8125 Epoch 39/100 16/16 [==============================] - 0s 83us/sample - loss: 1.5427 - acc: 0.8125 Epoch 40/100 16/16 [==============================] - 0s 80us/sample - loss: 1.4890 - acc: 0.8125 Epoch 41/100 16/16 [==============================] - 0s 81us/sample - loss: 1.4349 - acc: 0.8125 Epoch 42/100 16/16 [==============================] - 0s 78us/sample - loss: 1.3805 - acc: 0.8125 Epoch 43/100 16/16 [==============================] - 0s 80us/sample - loss: 1.3260 - acc: 0.8125 Epoch 44/100 16/16 [==============================] - 0s 78us/sample - loss: 1.2718 - acc: 0.8125 Epoch 45/100 16/16 [==============================] - 0s 78us/sample - loss: 1.2177 - acc: 0.8750 Epoch 46/100 16/16 [==============================] - 0s 77us/sample - loss: 1.1642 - acc: 0.8750 Epoch 47/100 16/16 [==============================] - 0s 80us/sample - loss: 1.1114 - acc: 0.8750 Epoch 48/100 16/16 [==============================] - 0s 77us/sample - loss: 1.0594 - acc: 0.8750 Epoch 49/100 16/16 [==============================] - 0s 79us/sample - loss: 1.0084 - acc: 0.8750 Epoch 50/100 16/16 [==============================] - 0s 76us/sample - loss: 0.9585 - acc: 0.8750 Epoch 51/100 16/16 [==============================] - 0s 78us/sample - loss: 0.9100 - acc: 0.8750 Epoch 52/100 16/16 [==============================] - 0s 75us/sample - loss: 0.8631 - acc: 0.8750 Epoch 53/100 16/16 [==============================] - 0s 77us/sample - loss: 0.8181 - acc: 0.8750 Epoch 54/100 16/16 [==============================] - 0s 75us/sample - loss: 0.7746 - acc: 0.8750 Epoch 55/100 16/16 [==============================] - 0s 77us/sample - loss: 0.7328 - acc: 0.8750 Epoch 56/100 16/16 [==============================] - 0s 74us/sample - loss: 0.6928 - acc: 0.9375 Epoch 57/100 16/16 [==============================] - 0s 77us/sample - loss: 0.6548 - acc: 0.9375 Epoch 58/100 16/16 [==============================] - 0s 77us/sample - loss: 0.6185 - acc: 0.9375 Epoch 59/100 16/16 [==============================] - 0s 90us/sample - loss: 0.5839 - acc: 1.0000 Epoch 60/100 16/16 [==============================] - 0s 117us/sample - loss: 0.5511 - acc: 1.0000 Epoch 61/100 16/16 [==============================] - 0s 118us/sample - loss: 0.5200 - acc: 1.0000 Epoch 62/100 16/16 [==============================] - 0s 120us/sample - loss: 0.4905 - acc: 1.0000 Epoch 63/100 16/16 [==============================] - 0s 121us/sample - loss: 0.4627 - acc: 1.0000 Epoch 64/100 16/16 [==============================] - 0s 118us/sample - loss: 0.4364 - acc: 1.0000 Epoch 65/100 16/16 [==============================] - 0s 119us/sample - loss: 0.4115 - acc: 1.0000 Epoch 66/100 16/16 [==============================] - 0s 117us/sample - loss: 0.3880 - acc: 1.0000 Epoch 67/100 16/16 [==============================] - 0s 120us/sample - loss: 0.3659 - acc: 1.0000 Epoch 68/100 16/16 [==============================] - 0s 120us/sample - loss: 0.3451 - acc: 1.0000 Epoch 69/100 16/16 [==============================] - 0s 127us/sample - loss: 0.3254 - acc: 1.0000 Epoch 70/100 16/16 [==============================] - 0s 137us/sample - loss: 0.3068 - acc: 1.0000 Epoch 71/100 16/16 [==============================] - 0s 149us/sample - loss: 0.2894 - acc: 1.0000 Epoch 72/100 16/16 [==============================] - 0s 152us/sample - loss: 0.2730 - acc: 1.0000 Epoch 73/100 16/16 [==============================] - 0s 148us/sample - loss: 0.2575 - acc: 1.0000 Epoch 74/100 16/16 [==============================] - 0s 140us/sample - loss: 0.2429 - acc: 1.0000 Epoch 75/100 16/16 [==============================] - 0s 150us/sample - loss: 0.2291 - acc: 1.0000 Epoch 76/100 16/16 [==============================] - 0s 150us/sample - loss: 0.2161 - acc: 1.0000 Epoch 77/100 16/16 [==============================] - 0s 153us/sample - loss: 0.2038 - acc: 1.0000 Epoch 78/100 16/16 [==============================] - 0s 140us/sample - loss: 0.1922 - acc: 1.0000 Epoch 79/100 16/16 [==============================] - 0s 136us/sample - loss: 0.1813 - acc: 1.0000 Epoch 80/100 16/16 [==============================] - 0s 132us/sample - loss: 0.1710 - acc: 1.0000 Epoch 81/100 16/16 [==============================] - 0s 131us/sample - loss: 0.1614 - acc: 1.0000 Epoch 82/100 16/16 [==============================] - 0s 129us/sample - loss: 0.1523 - acc: 1.0000 Epoch 83/100 16/16 [==============================] - 0s 129us/sample - loss: 0.1438 - acc: 1.0000 Epoch 84/100 16/16 [==============================] - 0s 128us/sample - loss: 0.1358 - acc: 1.0000 Epoch 85/100 16/16 [==============================] - 0s 128us/sample - loss: 0.1283 - acc: 1.0000 Epoch 86/100 16/16 [==============================] - 0s 130us/sample - loss: 0.1213 - acc: 1.0000 Epoch 87/100 16/16 [==============================] - 0s 129us/sample - loss: 0.1146 - acc: 1.0000 Epoch 88/100 16/16 [==============================] - 0s 131us/sample - loss: 0.1084 - acc: 1.0000 Epoch 89/100 16/16 [==============================] - 0s 126us/sample - loss: 0.1026 - acc: 1.0000 Epoch 90/100 16/16 [==============================] - 0s 129us/sample - loss: 0.0972 - acc: 1.0000 Epoch 91/100 16/16 [==============================] - 0s 124us/sample - loss: 0.0920 - acc: 1.0000 Epoch 92/100 16/16 [==============================] - 0s 129us/sample - loss: 0.0873 - acc: 1.0000 Epoch 93/100 16/16 [==============================] - 0s 131us/sample - loss: 0.0828 - acc: 1.0000 Epoch 94/100 16/16 [==============================] - 0s 96us/sample - loss: 0.0786 - acc: 1.0000 Epoch 95/100 16/16 [==============================] - 0s 85us/sample - loss: 0.0746 - acc: 1.0000 Epoch 96/100 16/16 [==============================] - 0s 79us/sample - loss: 0.0709 - acc: 1.0000 Epoch 97/100 16/16 [==============================] - 0s 78us/sample - loss: 0.0675 - acc: 1.0000 Epoch 98/100 16/16 [==============================] - 0s 75us/sample - loss: 0.0642 - acc: 1.0000 Epoch 99/100 16/16 [==============================] - 0s 77us/sample - loss: 0.0612 - acc: 1.0000 Epoch 100/100 16/16 [==============================] - 0s 76us/sample - loss: 0.0584 - acc: 1.0000 Neural networks are Neural networks are powerful Neural networks are powerful in Neural networks are powerful in nlp Neural networks are powerful in nlp common
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version. Instructions for updating: Colocations handled automatically by placer. WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version. Instructions for updating: Use tf.cast instead.