I wrote this code by learning from this site: https://www.geeksforgeeks.org/deploy-a-chatbot-using-tensorflow-in-python/
#import
import random
import json
import pickle
import numpy as np
import nltk
from keras.models import Sequential
from nltk.stem import WordNetLemmatizer
from keras.layers import Dense, Activation, Dropout
from keras.optimizers import SGD
lemmatizer=WordNetLemmatizer()
# Reading the JSON file
intents=json.loads(open("intents2.json").read())
#Creating empty lists to store data
words=[]
classes=[]
documents=[]
ignoreLetters=["?", "!", ".", ","]
for intent in intents['intents']:
for pattern in intent['patterns']:
#Separate words from patterns
wordList=nltk.word_tokenize(pattern)
words.extend(wordList)
#Associate patterns with respective tags
documents.append(((wordList), intent['tag']))
#append tags to class list
if intent['tag'] not in classes:
classes.append(intent['tag'])
#Storing the root words or lemma
words=[lemmatizer.lemmatize(word)
for word in words if word not in ignoreLetters]
words=sorted(set(words))
#Save words and class list to binary files
pickle.dump(words,open('words.pkl','wb'))
pickle.dump(classes,open('classes.pkl','wb'))
# Training
training=[]
outputEmpty=[0]*len(classes)
for document in documents:
bag=[]
wordPatterns=document[0]
wordPatterns=[lemmatizer.lemmatize(
word.lower())for word in wordPatterns]
for word in words:
bag.append(1) if word in wordPatterns else bag.append(0)
#Make a copy of outputEmpty
outputRow=list(outputEmpty)
outputRow[classes.index(document[1])]=1
training.append([bag,outputRow])
random.shuffle(training)
training=np.array(training)
#Split data
trainX=list(training[:,0])
trainY=list(training[:,1])
#Create sequential machine learning model
model=Sequential()
model.add(Dense(128,input_shape=(len(trainX[0]), ),
activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64,activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(trainX[0]),
activation='softmax'))
#Compile model
sgd=SGD(learning_rate=0.01, weight_decay=1e-6,momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy',
optimizer=sgd,metrics=['accuracy'])
hist=model.fit(np.array(trainX),np.array(trainY),
epochs=200, batch_size=5, verbose=True) #Offending line
#Save Model
model.save("chatbotmodel.h5", hist)
print("Whooohooo! Chatbot model training is success!")
But when I try to run training.py, I get this error:
Traceback (most recent call last):
File "C:\Users\user\Documents\G4G Bot\training.py", line 83, in <module>
hist=model.fit(np.array(trainX),np.array(trainY),
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\utils\traceback_utils.py", line 70, in error_handler
raise e.with_traceback(filtered_tb) from None
File "C:\Users\user\AppData\Local\Temp\__autograph_generated_filegfvyeton.py", line 15, in tf__train_function
retval_ = ag__.converted_call(ag__.ld(step_function), (ag__.ld(self), ag__.ld(iterator)), None, fscope)
ValueError: in user code:
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\training.py", line 1401, in train_function *
return step_function(self, iterator)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\training.py", line 1384, in step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\training.py", line 1373, in run_step **
outputs = model.train_step(data)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\training.py", line 1151, in train_step
loss = self.compute_loss(x, y, y_pred, sample_weight)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\training.py", line 1209, in compute_loss
return self.compiled_loss(
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\compile_utils.py", line 277, in __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py", line 143, in __call__
losses = call_fn(y_true, y_pred)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py", line 270, in call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py", line 2221, in categorical_crossentropy
return backend.categorical_crossentropy(
File "C:\Users\user\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\backend.py", line 5573, in categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
ValueError: Shapes (5, 6) and (5, 35) are incompatible
Is it another of these old, deprecated packages? Any fix?