Initial upload of the project in its first usable version

This commit is contained in:
Matias Espinoza
2023-03-13 05:41:45 -03:00
commit 46c766fd38
21 changed files with 3045 additions and 0 deletions

38
src/api/index.ts Normal file
View File

@ -0,0 +1,38 @@
import {
ChatCompletionRequestMessage, ChatCompletionResponseMessage, Configuration, OpenAIApi,
} from 'openai';
import { AI } from '@/models/ai';
import { Runnable } from '@/models/runnable';
import { Logger } from '@/logger';
export class Api implements AI, Runnable {
private _logger: Logger;
private _api: OpenAIApi;
private readonly _configuration: Configuration;
constructor() {
this._logger = new Logger(Api.name);
this._configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
this._api = new OpenAIApi(this._configuration);
}
run(): void {
this._logger.service.info('OpenAI Service has been initialized successfully.');
}
async chatCompletion(chatHistory: ChatCompletionRequestMessage[])
: Promise<ChatCompletionResponseMessage> {
const request = await this._api.createChatCompletion({
model: 'gpt-3.5-turbo',
messages: chatHistory,
});
return (request.data.choices[0].message as ChatCompletionResponseMessage);
}
}

View File

@ -0,0 +1,87 @@
import {
CommandInteraction,
Client,
CommandInteractionOptionResolver,
TextChannel, ApplicationCommandType, ApplicationCommandOptionType,
} from 'discord.js';
import { ChatCompletionRequestMessage } from 'openai';
import { Command } from '@/bot/models/command';
export const ChatCommand: Command = {
name: 'chat',
description: 'Say anything to the Chat bot',
type: ApplicationCommandType.ChatInput,
options: [
{
name: 'question',
description: 'Question for the Chat bot',
required: true,
type: ApplicationCommandOptionType.String,
},
{
name: 'ephemeral',
description: 'If you set \'false\' the message will be persisted over time',
required: false,
type: ApplicationCommandOptionType.Boolean,
},
],
execute: async (client: Client, interaction: CommandInteraction, ai) => {
const channel = client.channels.cache.get(interaction.channelId) as TextChannel;
const messages = await channel.messages.fetch({ limit: 100 });
const chatHistory: ChatCompletionRequestMessage[] = [];
const consistentMessages = messages
.filter((x) => x.interaction?.user.id === interaction.user.id);
const embed = consistentMessages.map((message) => message.embeds)
.flatMap((item) => item)
.flatMap((item) => item.data);
embed.forEach((item) => {
const message: ChatCompletionRequestMessage = {
role: item.footer?.text === 'embed-question' ? 'user' : 'assistant',
content: item.description || 'Please notify an error in process',
};
chatHistory.push(message);
});
const interactionResolver = (interaction.options as CommandInteractionOptionResolver);
const question = interactionResolver.getString('question') || undefined;
const ephemeral = interactionResolver.getBoolean('ephemeral') || true;
const currentQuestion: ChatCompletionRequestMessage = {
role: 'user',
content: question || 'Please notify an error in process',
};
chatHistory.push(currentQuestion);
const answer = await ai?.chatCompletion(chatHistory)
.then((response) => response.content)
.catch((error) => error);
await interaction.followUp({
ephemeral,
fetchReply: true,
embeds: [
{
color: 15844367,
title: '✥ Question',
description: question,
footer: {
text: 'embed-question',
},
},
{
color: 5763719,
title: '✥ Answer',
description: answer,
footer: {
text: 'embed-answer',
},
},
],
});
},
};

View File

@ -0,0 +1,25 @@
import {
CommandInteraction, Client, TextChannel, ChannelType, ApplicationCommandType,
} from 'discord.js';
import { Command } from '@/bot/models/command';
export const ClearCommand: Command = {
name: 'clear',
description: 'Delete your interactions with the Chat bot',
type: ApplicationCommandType.ChatInput,
execute: async (client: Client, interaction: CommandInteraction) => {
const channel = client.channels.cache.get(interaction.channelId) as TextChannel;
const messages = await channel.messages.fetch({ limit: 100 });
const consistentMessages = messages
.filter((x) => x.interaction?.user.id === interaction.user.id);
if (channel.type === ChannelType.GuildText) {
await channel.bulkDelete(consistentMessages);
} else {
await messages.forEach((message) => {
if (message.author.id !== client.user?.id) return;
message.delete();
});
}
},
};

10
src/bot/commands/index.ts Normal file
View File

@ -0,0 +1,10 @@
import { Command } from '@/bot/models/command';
import { PingCommand } from '@/bot/commands/pingCommand';
import { ChatCommand } from '@/bot/commands/chatCommand';
import { ClearCommand } from '@/bot/commands/clearCommand';
export const commands: Command[] = [
PingCommand,
ChatCommand,
ClearCommand,
];

View File

@ -0,0 +1,15 @@
import { CommandInteraction, Client, ApplicationCommandType } from 'discord.js';
import { Command } from '@/bot/models/command';
export const PingCommand: Command = {
name: 'ping',
description: 'A very simple ping command',
type: ApplicationCommandType.ChatInput,
execute: async (client: Client, interaction: CommandInteraction) => {
const content = 'Pong';
await interaction.followUp({
ephemeral: true,
content,
});
},
};

73
src/bot/index.ts Normal file
View File

@ -0,0 +1,73 @@
import {
ActivityType, Client, CommandInteraction, IntentsBitField, Interaction, Partials,
} from 'discord.js';
import { Logger } from '@/logger';
import { Runnable } from '@/models/runnable';
import { AI } from '@/models/ai';
import { commands } from '@/bot/commands';
export class Bot implements Runnable {
private _logger: Logger;
private readonly _ai: AI;
private readonly _client: Client;
constructor(ai: AI) {
this._logger = new Logger(Bot.name);
this._ai = ai;
this._client = new Client({
intents: [
IntentsBitField.Flags.Guilds,
IntentsBitField.Flags.GuildMessages,
IntentsBitField.Flags.MessageContent,
IntentsBitField.Flags.DirectMessages,
],
partials: [
Partials.Channel,
],
});
}
private async handleSlashCommand(interaction: CommandInteraction): Promise<void> {
const slashCommand = commands.find((command) => command.name === interaction.commandName);
if (!slashCommand) {
this._logger.service.warning(`SlashCommand [${interaction.commandName}] not found.`);
await interaction.followUp({ content: 'An error has occurred' });
return;
}
await interaction.deferReply();
this._logger.service.debug(`SlashCommand [${interaction.commandName}] executed properly.`);
await slashCommand.execute(this._client, interaction, this._ai);
}
run(): void {
this._client.login(process.env.DISCORD_API_KEY).then(() => {
this._logger.service.info('Discord Service has been initialized successfully.');
}).catch((reason) => {
this._logger.service.error(`Failed to start Discord Service: ${reason}`);
});
this._client.on('ready', async () => {
// Set status to listening command
this._client.user?.setActivity({
name: '/chat',
type: ActivityType.Listening,
});
if (!this._client.user || !this._client.application) {
return;
}
await this._client.application.commands.set(commands);
});
this._client.on('interactionCreate', async (interaction: Interaction) => {
if (interaction.isCommand() || interaction.isChatInputCommand()) {
await this.handleSlashCommand(interaction);
}
});
}
}

View File

@ -0,0 +1,6 @@
import { CommandInteraction, ChatInputApplicationCommandData, Client } from 'discord.js';
import { AI } from '@/models/ai';
export interface Command extends ChatInputApplicationCommandData {
execute: (client: Client, interaction: CommandInteraction, ai?: AI) => Promise<void>;
}

20
src/index.ts Normal file
View File

@ -0,0 +1,20 @@
import dotenv from 'dotenv';
import { Bot } from '@/bot';
import { Api } from '@/api';
/**
* Configure dotenv.
*/
dotenv.config();
/**
* OpenAI contained in API Module.
*/
const api = new Api();
api.run();
/**
* Discord contained in Bot Module.
*/
const bot = new Bot(api);
bot.run();

29
src/logger/index.ts Normal file
View File

@ -0,0 +1,29 @@
import {
createLogger, format, Logger as WinstonLogger, transports,
} from 'winston';
import process from 'process';
export class Logger {
protected _logger: WinstonLogger;
constructor(serviceName: string) {
this._logger = createLogger({
level: process.env.NODE_ENV === 'dev' ? 'debug' : 'info',
transports: [new transports.Console()],
format: format.combine(
format.colorize(),
format.timestamp(),
format.printf(({
timestamp, level, message, service,
}) => `[${timestamp}] [${service}] ${level}: ${message}`),
),
defaultMeta: {
service: serviceName,
},
});
}
get service() {
return this._logger;
}
}

6
src/models/ai.ts Normal file
View File

@ -0,0 +1,6 @@
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from 'openai';
export interface AI {
chatCompletion(chatHistory: ChatCompletionRequestMessage[]):
Promise<ChatCompletionResponseMessage>;
}

3
src/models/runnable.ts Normal file
View File

@ -0,0 +1,3 @@
export interface Runnable {
run(): void;
}