This commit is contained in:
Lukian LEIZOUR 2023-03-30 16:57:42 +02:00
parent 323d861852
commit 3a69d32510
3 changed files with 100 additions and 2 deletions

View file

@ -14,6 +14,19 @@ const commands = [
],
},
{
name: 'quickgpt',
description: 'Make a quicker request to the GPT-3.5 API',
options: [
{
name: 'question',
description: 'The question you want to ask to the API',
type: ApplicationCommandOptionType.String,
required: true,
},
],
},
{
name : 'info',
description : 'utilise pas cette commande fdp',

View file

@ -1,7 +1,7 @@
const discord = require('discord.js');
const { addToLogs } = require('../libs/botTools');
const { generateImage, answerQuestion, sendConv } = require('../libs/openAi');
const { generateImage, answerQuestion, sendConv, quickAnswer } = require('../libs/openAi');
const { incrementQuota, addConv, delConv, getConvs, addMessage, getMessages, isNewUser } = require('../libs/mysql');
const { commands } = require('../commands/commands');
@ -72,6 +72,72 @@ async function gptrequest(interaction, client) {
}
async function quickGptrequest(interaction, client) {
await interaction.deferReply();
quota = isNewUser(interaction.member.user.id, interaction.member.user.username).catch((err) => {
console.log(err);
addToLogs(err);
});
if (quota >= 200000) {
const embed = new discord.EmbedBuilder()
.setColor(0xFABBDE)
.setAuthor({ name: "Quota exceeded", iconURL: client.user.displayAvatarURL() })
.setDescription("You have a quota of " + quota + " tokens, please wait until reset (every months)")
.setFooter({ text: "Powered by OpenAI https://www.openai.com/", iconURL: "https://seeklogo.com/images/O/open-ai-logo-8B9BFEDC26-seeklogo.com.png" });
interaction.editReply({ embeds: [embed] });
}
else {
quickAnswer(interaction.options.get('question').value).then((res) => {
incrementQuota(interaction.member.user.id, res.data.usage.total_tokens).catch((err) => {
console.log(err);
addToLogs(err);
});
if (res.data.choices[0].message.content.length > 4096) {
const embed = new discord.EmbedBuilder()
.setColor(0xFABBDE)
.setAuthor({ name: "Reply to : " + interaction.member.user.username, iconURL: "https://cdn.discordapp.com/avatars/" + interaction.member.user.id + "/" + interaction.member.user.avatar + ".jpeg" })
.setTitle("Question : " + interaction.options.get('question').value)
.setDescription("The answer is too long to be displayed, please try again with a shorter question.")
.setFooter({ text: "Powered by OpenAI https://www.openai.com/", iconURL: "https://seeklogo.com/images/O/open-ai-logo-8B9BFEDC26-seeklogo.com.png" });
console.log('[Discord] Sent answer to : ' + interaction.options.get('question').value);
addToLogs('[Discord] Sent answer to : ' + interaction.options.get('question').value);
interaction.editReply({ embeds: [embed] });
}
else {
title = "Question : " + interaction.options.get('question').value;
if (title.length > 256) {
title = title.slice(0, 253) + "...";
}
const embed = new discord.EmbedBuilder()
.setColor(0xFABBDE)
.setAuthor({ name: "Reply to : " + interaction.member.user.username, iconURL: "https://cdn.discordapp.com/avatars/" + interaction.member.user.id + "/" + interaction.member.user.avatar + ".jpeg" })
.setTitle(title)
.setDescription(res.data.choices[0].message.content)
.setFooter({ text: "Powered by OpenAI https://www.openai.com/", iconURL: "https://seeklogo.com/images/O/open-ai-logo-8B9BFEDC26-seeklogo.com.png" });
console.log('[Discord] Sent answer to : ' + interaction.options.get('question').value);
addToLogs('[Discord] Sent answer to : ' + interaction.options.get('question').value);
interaction.editReply({ embeds: [embed] });
}
}).catch((err) => {
console.log(err);
addToLogs(err);
interaction.editReply("Something went wrong");
})
console.log('[Discord] Generating answer to : ' + interaction.options.get('question').value);
addToLogs('[Discord] Generating answer to : ' + interaction.options.get('question').value);
}
}
async function addconv(interaction, client) {
await interaction.deferReply();
convs = await getConvs().catch((err) => {
@ -105,6 +171,7 @@ async function addconv(interaction, client) {
}
async function delconv(interaction, client) {
await interaction.deferReply();
@ -355,6 +422,10 @@ module.exports = {
gptrequest(interaction, client);
}
else if (interaction.commandName === 'quickgpt') {
quickGptrequest(interaction, client);
}
else if (interaction.commandName === 'info') {
console.log(interaction)
}

View file

@ -26,6 +26,20 @@ async function answerQuestion(query) {
model: "gpt-4",
messages: [{ "role" : "user", "content" : query}],
temperature: 0.9,
max_tokens: 300,
}).catch((err) => {
console.log(err);
addToLogs("--> error : " + err);
})
return response;
}
async function quickAnswer(query) {
response = await openai.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [{ "role" : "user", "content" : query}],
temperature: 0.9,
}).catch((err) => {
console.log(err);
addToLogs("--> error : " + err);
@ -47,4 +61,4 @@ async function sendConv (messages) {
return response;
}
module.exports = { generateImage, answerQuestion, sendConv };
module.exports = { generateImage, answerQuestion, sendConv, quickAnswer };