Winky's profile picture

Published by

published

Category: Web, HTML, Tech

The latest and best version of my Chatbot

do have to create a folder structure
Main Folder, contains programs and modules, then xml folder,standard folder with all the aiml folders, brain folder with the collections.brn and brain.brn. and an image folder with jpeg images.
i been experimenting with this code for the last few days, doesnt have the fancy features of Penny or Mabel, but seems to understand context, sentences, and less likely to to be confused or lost track of topic. though she still does, she does it the least, and i assume with some fine tuning she can be way better.

import aiml
import os
from tkinter import *
from PIL import Image, ImageTk
import xml.etree.ElementTree as ET
import logging
import threading
import random
import time
import re
import nltk
from nltk.tokenize import word_tokenize
from nltk.corpus import wordnet
import networkx as nx
import collections
import pickle
# Initialize NLTK's WordNet
nltk.download('wordnet')
import importlib
import subprocess
from nltk.sentiment import SentimentIntensityAnalyzer
from nltk.chunk import ne_chunk
import pickle
from PIL import Image, ImageTk

nltk.download('vader_lexicon')
nltk.download('averaged_perceptron_tagger')
nltk.download('maxent_ne_chunker')
nltk.download('words')

collections_data = {'example_key': 'example_value'}  # Define collections_data

current_dir = os.path.dirname(__file__)
folder_path = os.path.join(current_dir, 'brain')

with open(os.path.join(folder_path, 'collections.pkl'), 'wb') as file:
    pickle.dump(collections_data, file)

with open(os.path.join(folder_path, 'collections.brn'), 'wb') as file:
    pickle.dump(collections_data, file)

sid = SentimentIntensityAnalyzer()

def import_or_install(package):
    try:
        __import__(package)
    except ImportError:
        print(f"{package} is not installed. Installing...")
        subprocess.check_call(['pip', 'install', package])
# List of required packages
required_packages = ['aiml', 'os', 'tkinter', 'PIL', 'xml.etree.ElementTree', 'logging', 'threading', 'random', 'time', 're', 'nltk', 'networkx']
# Check and install missing packages
for package in required_packages:
    import_or_install(package)
def word_tokenize(text):
    tokens = nltk.word_tokenize(text)
    word_counts = collections.Counter(tokens)
    return tokens, word_counts

def chunk_text(text):
    sentences = nltk.sent_tokenize(text)
    chunks = []
    for sentence in sentences:
        tokens = nltk.word_tokenize(sentence)
        tagged_tokens = nltk.pos_tag(tokens)
        chunked_tokens = ne_chunk(tagged_tokens)
        chunks.append(chunked_tokens)
    return chunks

def generate_random_response():
    responses = ["Hello there!", "Anyone there?", "I see you!","Am I alone?","Anyone here?","Who woke me up?","Did i lose you?"]
    return random.choice(responses)
last_activity_time = time.time()
repeated_sentences = {}
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(message)s')
def create_xml_file(file_name):
    root = ET.Element("root")
    sentiment_element = ET.Element("sentiment")
    chunk_element = ET.Element("chunk")
    root.append(sentiment_element)
    root.append(chunk_element)
    tree = ET.ElementTree(root)
    tree.write(file_name)

def append_to_xml(file_name, element_name, element_text):
    tree = ET.parse(file_name)
    root = tree.getroot()
    
    if element_name == "sentiment":
        sentiment_element = root.find("sentiment")
        new_element = ET.Element("score")
        new_element.text = element_text
        sentiment_element.append(new_element)
    elif element_name == "chunk":
        chunk_element = root.find("chunk")
        new_element = ET.Element("entity")
        new_element.text = element_text
        chunk_element.append(new_element)
    
    tree.write(file_name)

def update_xml_element(file_name, element_name, new_text):
    tree = ET.parse(file_name)
    root = tree.getroot()   
    for element in root.iter(element_name):
        element.text = new_text  
    tree.write(file_name)

# Create a unique identifier for the chatbot
bot_id = "my_unique_chatbot_id"
# Load AIML files from the 'standard' folder
kernel = aiml.Kernel()
aiml_path = "standard"
for file in os.listdir(aiml_path):
    kernel.learn(os.path.join(aiml_path, file))
# Check if the 'brain' folder exists. If not, create it.
if not os.path.exists("brain"):
    os.makedirs("brain")
# Check if a brain file already exists in the 'brain' folder. If not, create a new one.
brain_file = "brain/brain.brn"
if not os.path.isfile(brain_file):
    kernel.saveBrain(brain_file)
# Create a GUI for the chatbot
root = Tk()
root.title("AIML Chatbot")
chat_log = Text(root)
chat_log.pack()
# Create text input field
entry = Entry(root, width=50)
entry.pack()
# Function to handle pressing "Enter" key
def on_enter(event):
    send_message()
# Bind the "Enter" key to the text input field
entry.bind("<Return>", on_enter)
# Load and resize the image
# Load and resize the image
image_path = "images/billie4.jpeg"  # Update the image path to a GIF file
image = Image.open(image_path)
new_size = tuple(int(dim * 0.5) for dim in image.size)
resized_image = image.resize(new_size, Image.ANTIALIAS)
photo = ImageTk.PhotoImage(resized_image)
image_label = Label(root, image=photo)
image_label.image = photo
image_label.pack()
image_label.pack()

def check_user_activity():
    global last_activity_time
    inactive_time = 240  # 4 minutes in seconds
    while True:
        current_time = time.time()
        if current_time - last_activity_time > inactive_time:
            response = generate_random_response()
            chat_log.insert(END, "Bot: " + response + "\n")
            last_activity_time = current_time
        time.sleep(10)  # Check every 10 seconds
activity_thread = threading.Thread(target=check_user_activity)
activity_thread.daemon = True
activity_thread.start()

# Function to handle user input and bot response
def send_message():
    global last_activity_time
    global repeated_sentences
    
    # Get user input from the text input field
    user_input = entry.get()
    tokens, word_counts = word_tokenize(user_input)
    
    # Check if the user input is empty
    if not user_input:
        chat_log.insert(END, "Bot: I can't hear you.\n")
        return
    
    # Check if the user has repeated the same sentence more than three times
    if user_input in repeated_sentences:
        repeated_sentences[user_input] += 1
        if repeated_sentences[user_input] > 3:
            responses = ["I heard you the first time.", "Got it, thanks!", "Understood."]
            response = random.choice(responses)
            chat_log.insert(END, f"Bot: {response}\n")
            return
    else:
        repeated_sentences[user_input] = 1  
    
    # Perform sentiment analysis on the user input
    sentiment_analyzer = SentimentIntensityAnalyzer()
    sentiment_scores = sentiment_analyzer.polarity_scores(user_input)
    sentiment = sentiment_scores['compound']
    
    # Perform named entity recognition on the user input
    named_entities = ne_chunk(nltk.pos_tag(tokens))
    
    # Generate a response from the chatbot
    response = kernel.respond(user_input)  
    last_activity_time = time.time()
    
    # Log the AIML file and line number
    #logging.debug(f"AIML File: {kernel.getPredicate('aiml-file')}, Line Number: {kernel.getPredicate('aiml-line')}")
    
    chat_log.insert(END, "You: " + user_input + "\n")
    chat_log.insert(END, "Bot: " + response + "\n")
    
    # Display the sentiment score and named entities
    #print("Sentiment Score: " + str(sentiment))
    #print("Named Entities: " + str(named_entities()))
    #print("Most Common Words: " + str(word_counts.most_common()))

    
    entry.delete(0, END)
    
    # Scroll the chat log to the bottom
    chat_log.see(END)


    
# Create send button
send_button = Button(root, text="Send", command=send_message)
send_button.pack()
root.mainloop()


0 Kudos

Comments

Displaying 0 of 0 comments ( View all | Add Comment )