Added complete code documentation

This commit is contained in:
Matias Espinoza 2023-03-20 15:30:26 -03:00
parent 63d2c1ddf1
commit c74ad8d0fc
11 changed files with 159 additions and 25 deletions

View File

@ -7,32 +7,60 @@ import { Runnable } from '@/models/runnable';
import { Logger } from '@/logger';
export class Api implements AI, Runnable {
/**
* Logger instance
* @private
*/
private _logger: Logger;
/**
* OpenAI API instance
* @private
*/
private _api!: OpenAIApi;
/**
* OpenAI API configuration
* @private
*/
private readonly _configuration: Configuration;
/**
* Create API instance
*/
constructor() {
this._logger = new Logger(Api.name);
/**
* Create OpenAI API configuration with API key
*/
this._configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
}
/**
* Initialize OpenAI API service
*/
run(): void {
try {
this._api = new OpenAIApi(this._configuration);
this._logger.service.info('OpenAI Service has been initialized successfully.');
this._api = new OpenAIApi(this._configuration); // Create API instance
this._logger.service.info('OpenAI Service has been initialized successfully.'); // Log service initialization
} catch (error) {
this._logger.service.error(`Failed to start OpenAI Service: ${error}`);
process.exit(1);
this._logger.service.error(`Failed to start OpenAI Service: ${error}`); // Log service initialization error
process.exit(1); // Exit process
}
}
/**
* Get the chat completion from the OpenAI API
* @param chatHistory
*/
async chatCompletion(chatHistory: ChatCompletionRequestMessage[])
: Promise<ChatCompletionResponseMessage> {
/**
* Create chat completion request and return response or throw error
*/
const request = await this._api.createChatCompletion({
model: 'gpt-3.5-turbo',
messages: chatHistory,

View File

@ -9,26 +9,32 @@ import { Command } from '@/bot/models/command';
export const ChatCommand: Command = {
name: 'chat',
description: 'Say anything to the Chat bot',
description: 'Chat with the bot',
type: ApplicationCommandType.ChatInput,
options: [
{
name: 'question',
description: 'Question for the Chat bot',
description: 'The question you want to ask the bot',
required: true,
type: ApplicationCommandOptionType.String,
},
{
name: 'ephemeral',
description: 'If you set \'false\' the message will be persisted over time',
description: 'If the response should be ephemeral or not',
required: false,
type: ApplicationCommandOptionType.Boolean,
},
],
execute: async (client: Client, interaction: CommandInteraction, ai) => {
/**
* Get the chat history from the channel
*/
const channel = client.channels.cache.get(interaction.channelId) as TextChannel;
const messages = await channel.messages.fetch({ limit: 100 });
const messages = await channel.messages.fetch({ limit: 100 }); // Get the last 100 messages from the channel
/**
* Filter the messages from the user and get the embeds from the messages
*/
const chatHistory: ChatCompletionRequestMessage[] = [];
const consistentMessages = messages
.filter((x) => x.interaction?.user.id === interaction.user.id);
@ -38,18 +44,27 @@ export const ChatCommand: Command = {
.flatMap((item) => item.data);
embed.forEach((item) => {
/**
* Create the message object from the embed and add it to the chat history
*/
const message: ChatCompletionRequestMessage = {
role: item.footer?.text === 'embed-question' ? 'user' : 'assistant',
content: item.description || 'An error occurred during the process, please try again later.',
};
chatHistory.push(message);
chatHistory.push(message); // Add the message to the chat history
});
/**
* Get the options from the interaction
*/
const interactionResolver = (interaction.options as CommandInteractionOptionResolver);
const question = interactionResolver.getString('question') || undefined;
const ephemeral = interactionResolver.getBoolean('ephemeral') || true;
const question = interactionResolver.getString('question') || undefined; // Default to undefined
const ephemeral = interactionResolver.getBoolean('ephemeral') || true; // Default to true
/**
* Add the current question to the chat history
*/
const currentQuestion: ChatCompletionRequestMessage = {
role: 'user',
content: question || 'An error occurred during the process, please try again later.',
@ -57,10 +72,16 @@ export const ChatCommand: Command = {
chatHistory.push(currentQuestion);
/**
* Get the answer from the AI
*/
const answer = await ai?.chatCompletion(chatHistory)
.then((response) => response.content)
.catch((error: Error) => error.message);
/**
* Add the current answer to the chat history and reply to the user on the channel
*/
await interaction.followUp({
ephemeral,
fetchReply: true,
@ -70,7 +91,7 @@ export const ChatCommand: Command = {
title: '✥ Question',
description: question,
footer: {
text: 'embed-question',
text: 'embed-question', // embed-question is used to identify is a response from user to bot
},
},
{
@ -78,7 +99,7 @@ export const ChatCommand: Command = {
title: '✥ Answer',
description: answer,
footer: {
text: 'embed-answer',
text: 'embed-answer', // embed-answer is used to identify is a response from bot to user
},
},
],

View File

@ -8,14 +8,26 @@ export const ClearCommand: Command = {
description: 'Delete your interactions with the Chat bot',
type: ApplicationCommandType.ChatInput,
execute: async (client: Client, interaction: CommandInteraction) => {
/**
* Get the chat history from the channel and filter the messages from the user
*/
const channel = client.channels.cache.get(interaction.channelId) as TextChannel;
const messages = await channel.messages.fetch({ limit: 100 });
const messages = await channel.messages.fetch({ limit: 100 }); // Get the last 100 messages from the channel
const consistentMessages = messages
.filter((x) => x.interaction?.user.id === interaction.user.id);
/**
* Delete the messages from the channel
*/
if (channel.type === ChannelType.GuildText) {
/**
* Bulk delete the messages if the channel is a guild text channel
*/
await channel.bulkDelete(consistentMessages);
} else {
/**
* Delete the messages one by one if the channel is a DM channel
*/
await messages.forEach((message) => {
if (message.author.id !== client.user?.id) return;
message.delete();

View File

@ -3,6 +3,9 @@ import { PingCommand } from '@/bot/commands/pingCommand';
import { ChatCommand } from '@/bot/commands/chatCommand';
import { ClearCommand } from '@/bot/commands/clearCommand';
/**
* Export all the commands registered as an array for centralized management
*/
export const commands: Command[] = [
PingCommand,
ChatCommand,

View File

@ -7,6 +7,9 @@ export const PingCommand: Command = {
type: ApplicationCommandType.ChatInput,
execute: async (client: Client, interaction: CommandInteraction) => {
const content = 'Pong';
/**
* Send a message to the channel
*/
await interaction.followUp({
ephemeral: true,
content,

View File

@ -8,16 +8,31 @@ import { AI } from '@/models/ai';
import { commands } from '@/bot/commands';
export class Bot implements Runnable {
/**
* Logger instance
* @private
*/
private _logger: Logger;
/**
* AI instance
* @private
*/
private readonly _ai: AI;
/**
* Discord API client instance
* @private
*/
private readonly _client: Client;
constructor(ai: AI) {
this._logger = new Logger(Bot.name);
this._ai = ai;
/**
* Create Discord API client instance with intents and partials
*/
this._client = new Client({
intents: [
IntentsBitField.Flags.Guilds,
@ -26,12 +41,20 @@ export class Bot implements Runnable {
IntentsBitField.Flags.DirectMessages,
],
partials: [
Partials.Channel,
Partials.Channel, // For DMs
],
});
}
/**
* Handle slash commands from Discord API
* @param interaction
* @private
*/
private async handleSlashCommand(interaction: CommandInteraction): Promise<void> {
/**
* Find command by name and execute it if found or return error message
*/
const slashCommand = commands.find((command) => command.name === interaction.commandName);
if (!slashCommand) {
this._logger.service.warning(`SlashCommand [${interaction.commandName}] not found.`);
@ -39,36 +62,56 @@ export class Bot implements Runnable {
return;
}
await interaction.deferReply();
this._logger.service.debug(`SlashCommand [${interaction.commandName}] executed properly.`);
await slashCommand.execute(this._client, interaction, this._ai);
await interaction.deferReply(); // Defer reply to show loading state
this._logger.service.debug(`SlashCommand [${interaction.commandName}] executed properly.`); // Log command execution
await slashCommand.execute(this._client, interaction, this._ai); // Execute command
}
/**
* Initialize Discord API service
*/
run(): void {
/**
* Login to Discord API and set status for show command if login was successful or exit process if failed
*/
this._client.login(process.env.DISCORD_API_KEY).then(() => {
this._logger.service.info('Discord Service has been initialized successfully.');
this._logger.service.info('Discord Service has been initialized successfully.'); // Log service initialization
}).catch((error) => {
this._logger.service.error(`Failed to start Discord Service: ${error}`);
process.exit(1);
this._logger.service.error(`Failed to start Discord Service: ${error}`); // Log service initialization error
process.exit(1); // Exit process
});
this._client.on('ready', async () => {
/**
* Check if user and application are available before continue
*/
if (!this._client.user || !this._client.application) {
return;
}
// Set status for show command
/**
* Set status for show command
*/
this._client.user?.setActivity({
name: '/chat',
type: ActivityType.Listening,
});
/**
* Set slash commands for bot application
*/
await this._client.application.commands.set(commands);
});
/**
* On interaction create event handler
*/
this._client.on('interactionCreate', async (interaction: Interaction) => {
/**
* Check if interaction is command or chat input command
*/
if (interaction.isCommand() || interaction.isChatInputCommand()) {
await this.handleSlashCommand(interaction);
await this.handleSlashCommand(interaction); // Handle slash command
}
});
}

View File

@ -2,5 +2,11 @@ import { CommandInteraction, ChatInputApplicationCommandData, Client } from 'dis
import { AI } from '@/models/ai';
export interface Command extends ChatInputApplicationCommandData {
/**
* Execute the command with the given parameters
* @param client
* @param interaction
* @param ai
*/
execute: (client: Client, interaction: CommandInteraction, ai?: AI) => Promise<void>;
}

View File

@ -3,12 +3,12 @@ import { Bot } from '@/bot';
import { Api } from '@/api';
/**
* Configure dotenv.
* Load environment variables from .env file
*/
dotenv.config();
/**
* OpenAI contained in API Module.
* OpenAI contained in Api Module.
*/
const api = new Api();
api.run();

View File

@ -4,8 +4,16 @@ import {
import process from 'process';
export class Logger {
/**
* Winston logger instance
* @protected
*/
protected _logger: WinstonLogger;
/**
* Create logger instance
* @param serviceName
*/
constructor(serviceName: string) {
this._logger = createLogger({
level: process.env.NODE_ENV === 'dev' ? 'debug' : 'info',
@ -23,6 +31,9 @@ export class Logger {
});
}
/**
* Get logger instance
*/
get service() {
return this._logger;
}

View File

@ -1,6 +1,10 @@
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from 'openai';
export interface AI {
/**
* Get the chat completion from the OpenAI API
* @param chatHistory
*/
chatCompletion(chatHistory: ChatCompletionRequestMessage[]):
Promise<ChatCompletionResponseMessage>;
}

View File

@ -1,3 +1,6 @@
export interface Runnable {
/**
* Run the service instance
*/
run(): void;
}