import numpy as np
import tensorflow as tf
from tensorflow.keras .models import Sequential
from tensorflow.keras .layers import Embedding, SimpleRNN, Dense
text= "This is a sample text"
c= sorted ( set ( text) )
char_to_index= { char:index for index, char in enumerate ( c) }
index_to_char= { index:char for index, char in enumerate ( c) }
t_indices= [ char_to_index[ char] for char in text]
seq_len, seq, n_char= 20 , [ ] , [ ]
for i in range ( 0 , len ( t_indices) -seq_len) :
seq.append ( t_indices[ i:i+seq_len] )
n_char.append ( t_indices[ i+seq_len] )
X, y= np.array ( seq) , np.array ( n_char)
model= Sequential( [ Embedding( input_dim= len ( c) , output_dim= 50 , input_length= seq_len) , SimpleRNN( 100 , return_sequences= False ) , Dense( len ( c) , activation= "softmax" ) ] )
model.compile ( loss= "sparse_categorical_crossentropy" , optimizer= "adam" )
model.fit ( X, y, batch_size= 64 , epochs= 20 )
s_text= "This is a sample txt"
g_text= s_text
num_chars_to_generate= 100
for _ in range ( num_chars_to_generate) :
s_indices= [ char_to_index[ char] for char in s_text]
if len ( s_indices) < seq_len:
diff= seq_len-len ( s_indices)
s_indices= [ 0 ] *diff+s_indices
s_indices= np.array ( s_indices) .reshape ( 1 , -1 )
n_index= model.predict ( s_indices) .argmax ( )
n_char= index_to_char[ n_index]
g_text+= n_char
s_text= s_text[ 1 :] +n_char
print ( g_text)
aW1wb3J0IG51bXB5IGFzIG5wCmltcG9ydCB0ZW5zb3JmbG93IGFzIHRmCmZyb20gdGVuc29yZmxvdy5rZXJhcy5tb2RlbHMgaW1wb3J0IFNlcXVlbnRpYWwKZnJvbSB0ZW5zb3JmbG93LmtlcmFzLmxheWVycyBpbXBvcnQgRW1iZWRkaW5nLCBTaW1wbGVSTk4sIERlbnNlCnRleHQ9IlRoaXMgaXMgYSBzYW1wbGUgdGV4dCIKYz1zb3J0ZWQoc2V0KHRleHQpKQpjaGFyX3RvX2luZGV4PXtjaGFyOmluZGV4IGZvciBpbmRleCxjaGFyIGluIGVudW1lcmF0ZShjKX0KaW5kZXhfdG9fY2hhcj17aW5kZXg6Y2hhciBmb3IgaW5kZXgsY2hhciBpbiBlbnVtZXJhdGUoYyl9CnRfaW5kaWNlcz1bY2hhcl90b19pbmRleFtjaGFyXSBmb3IgY2hhciBpbiB0ZXh0XQpzZXFfbGVuLHNlcSxuX2NoYXI9MjAsW10sW10KZm9yIGkgaW4gcmFuZ2UoMCxsZW4odF9pbmRpY2VzKS1zZXFfbGVuKToKIHNlcS5hcHBlbmQodF9pbmRpY2VzW2k6aStzZXFfbGVuXSkKIG5fY2hhci5hcHBlbmQodF9pbmRpY2VzW2krc2VxX2xlbl0pClgseT1ucC5hcnJheShzZXEpLG5wLmFycmF5KG5fY2hhcikKbW9kZWw9U2VxdWVudGlhbChbRW1iZWRkaW5nKGlucHV0X2RpbT1sZW4oYyksb3V0cHV0X2RpbT01MCxpbnB1dF9sZW5ndGg9c2VxX2xlbiksU2ltcGxlUk5OKDEwMCxyZXR1cm5fc2VxdWVuY2VzPUZhbHNlKSxEZW5zZShsZW4oYyksYWN0aXZhdGlvbj0ic29mdG1heCIpXSkKbW9kZWwuY29tcGlsZShsb3NzPSJzcGFyc2VfY2F0ZWdvcmljYWxfY3Jvc3NlbnRyb3B5IixvcHRpbWl6ZXI9ImFkYW0iKQptb2RlbC5maXQoWCx5LGJhdGNoX3NpemU9NjQsZXBvY2hzPTIwKQpzX3RleHQ9IlRoaXMgaXMgYSBzYW1wbGUgdHh0IgpnX3RleHQ9c190ZXh0Cm51bV9jaGFyc190b19nZW5lcmF0ZT0xMDAKZm9yIF8gaW4gcmFuZ2UobnVtX2NoYXJzX3RvX2dlbmVyYXRlKToKIHNfaW5kaWNlcz1bY2hhcl90b19pbmRleFtjaGFyXSBmb3IgY2hhciBpbiBzX3RleHRdCiBpZiBsZW4oc19pbmRpY2VzKTxzZXFfbGVuOgogICAgIGRpZmY9c2VxX2xlbi1sZW4oc19pbmRpY2VzKQogICAgIHNfaW5kaWNlcz1bMF0qZGlmZitzX2luZGljZXMKIHNfaW5kaWNlcz1ucC5hcnJheShzX2luZGljZXMpLnJlc2hhcGUoMSwtMSkKIG5faW5kZXg9bW9kZWwucHJlZGljdChzX2luZGljZXMpLmFyZ21heCgpCiBuX2NoYXI9aW5kZXhfdG9fY2hhcltuX2luZGV4XQogZ190ZXh0Kz1uX2NoYXIKIHNfdGV4dD1zX3RleHRbMTpdK25fY2hhcgpwcmludChnX3RleHQpCg==