Rumah > Artikel > pembangunan bahagian belakang > Cipta bot sembang - JO PARIS 4
Dalam artikel ini, saya menunjukkan cara mencipta bot sembang mudah dengan aliran tensor.
Untuk data, saya menggunakan set data kaggle daripada PARIS JO JO 2024 untuk mendapatkan ayat pada peringkat latihan.
Anda boleh mendapatkan kod penamat dalam github saya : https://github.com/victordalet/Kaggle_analysis/tree/feat/paris_2024_olympics
Setset aliran tensor pada bot sembang kelihatan seperti ini.
Kita boleh mencari tag, corak dan pelbagai respons.
Matlamat kami adalah untuk menambah urutan yang berbeza daripada set data pertaruhan JO dan menambahkannya pada fail seperti ini.
{ "intents": [ { "tag": "google", "patterns": [ "google", "search", "internet" ], "responses": [ "Redirecting to Google..." ] },
Saya membaca set data bot sembang dalam json lalai dan csv JO dan membelah serta memprosesnya untuk menambah ayat dalam json
import json class CreateDataset: def __init__(self): self.json_path = 'data.json' self.csv_path = '../paris-2024-faq.csv' with open(self.json_path) as file: self.dataset = json.load(file) f = open(self.csv_path, 'r') dataset_split = f.read().split(";") question = False for data in dataset_split: if question: question = False self.dataset["intents"][-1]["responses"].append(data) if "?" in data: question = True self.dataset["intents"].append({ "tag": "", "patterns": [ data ], "responses": [ ] }) with open(self.json_path, 'w') as f: json.dump(self.dataset, f)
Untuk tujuan latihan, saya telah mengedit contoh aliran tensor.
Jika anda mengambil kod saya untuk menjalankannya, tambahkan dalam hujah pertama bilangan zaman yang anda mahukan.
Buat direktori simpan di mana model anda akan pergi, dan tambahkan di dalam fail classes.pkl dan words.pkl yang berada dalam github seperti pada permulaan artikel ini.
import random import json import pickle import numpy as np import sys import nltk from nltk.stem import WordNetLemmatizer from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Dropout from tensorflow.keras.optimizers import SGD class Train: words: list classes: list documents: list ignore_letters: list training: list output_empty: list train_x: list train_y: list model: Sequential epochs: int def __init__(self): self.lemmatizer = WordNetLemmatizer() self.intents = json.loads(open('data.json').read()) self.words = [] self.classes = [] self.documents = [] self.training = [] self.ignore_letters = ['?', '!'] self.epochs = int(sys.argv[1]) def run(self): self.download_nltk_data() self.load_training_data() self.prepare_training_data() self.build_neural_network() self.train() @staticmethod def download_nltk_data(): nltk.download('punkt') nltk.download('wordnet') def load_training_data(self): for intent in self.intents['intents']: for pattern in intent['patterns']: word_list = nltk.word_tokenize(pattern) self.words.extend(word_list) self.documents.append((word_list, intent['tag'])) if intent['tag'] not in self.classes: self.classes.append(intent['tag']) def prepare_training_data(self): self.words = [self.lemmatizer.lemmatize(word) for word in self.words if word not in self.ignore_letters] self.words = sorted(set(self.words)) self.classes = sorted(set(self.classes)) pickle.dump(self.words, open('saves/words.pkl', 'wb')) pickle.dump(self.classes, open('saves/classes.pkl', 'wb')) self.output_empty = [0] * len(self.classes) for document in self.documents: bag = [] word_patterns = document[0] word_patterns = [self.lemmatizer.lemmatize(word.lower()) for word in word_patterns] for word in self.words: bag.append(1) if word in word_patterns else bag.append(0) output_row = list(self.output_empty) output_row[self.classes.index(document[1])] = 1 self.training.append([bag, output_row]) random.shuffle(self.training) self.training = np.array(self.training) self.train_x = list(self.training[:, 0]) self.train_y = list(self.training[:, 1]) def build_neural_network(self): self.model = Sequential() self.model.add(Dense(128, input_shape=(len(self.train_x[0]),), activation='relu')) self.model.add(Dropout(0.5)) self.model.add(Dense(64, activation='relu')) self.model.add(Dropout(0.5)) self.model.add(Dense(len(self.train_y[0]), activation='softmax')) sgd = SGD(lr=0.01, momentum=0.9, nesterov=True) self.model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) def train(self): self.model.fit(np.array(self.train_x), np.array(self.train_y), epochs=self.epochs, batch_size=5, verbose=1) self.model.save('saves/chatbot_model.model') if __name__ == "__main__": Train().run()
Saya mencipta kelas ChatBot, dengan kaedah ujian yang mengambil mesej rawak.
Anda boleh menggunakan kaedah get_response untuk menambahkan chatbot ini pada aplikasi anda, contohnya saya memanggilnya dalam salah satu projek saya dalam api flask untuk mempunyai chatbot saya dalam tapak web.
import random import json import pickle import numpy as np import nltk from nltk.stem import WordNetLemmatizer from tensorflow.keras.models import load_model class ChatBot: lemmatizer: WordNetLemmatizer intents: dict words: list classes: list model: load_model ERROR_THRESHOLD = 0.25 def __init__(self): self.download_nltk_data() self.lemmatizer = WordNetLemmatizer() self.intents = json.loads(open('data.json').read()) self.words = pickle.load(open('saves/words.pkl', 'rb')) self.classes = pickle.load(open('saves/classes.pkl', 'rb')) self.model = load_model('saves/chatbot_model.model') @staticmethod def download_nltk_data(): nltk.download('punkt') nltk.download('wordnet') def clean_up_sentence(self, sentence): sentence_words = nltk.word_tokenize(sentence) sentence_words = [self.lemmatizer.lemmatize(word) for word in sentence_words] return sentence_words def bag_of_words(self, sentence): sentence_words = self.clean_up_sentence(sentence) bag = [0] * len(self.words) for w in sentence_words: for i, word in enumerate(self.words): if word == w: bag[i] = 1 return np.array(bag) def predict_class(self, sentence): bow = self.bag_of_words(sentence) res = self.model.predict(np.array([bow]))[0] results = [[i, r] for i, r in enumerate(res) if r > self.ERROR_THRESHOLD] results.sort(key=lambda x: x[1], reverse=True) return_list = [] for r in results: return_list.append({'intent': self.classes[r[0]], 'probability': str(r[1])}) return return_list def get_response(self, intents_list): intents_json = self.intents tag = intents_list[0]['intent'] list_of_intents = intents_json['intents'] for i in list_of_intents: if i['tag'] == tag: result = random.choice(i['responses']) break return result def test(self): while True: message = input("") ints = self.predict_class(message) res = self.get_response(ints) print(res)
Atas ialah kandungan terperinci Cipta bot sembang - JO PARIS 4. Untuk maklumat lanjut, sila ikut artikel berkaitan lain di laman web China PHP!