From ab70c2ac1901f975c7df58ce44599e8945d9489f Mon Sep 17 00:00:00 2001 From: Lukian LEIZOUR Date: Tue, 31 Jan 2023 12:11:25 +0100 Subject: [PATCH] commit --- app.js | 35 +++++++++++++++++++++++++++++------ libs/openAi.js | 17 +++-------------- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/app.js b/app.js index b949ade..da622dd 100644 --- a/app.js +++ b/app.js @@ -85,8 +85,20 @@ bot.command('g', ctx => { generateImage(ctx.message.text.slice(+3), ctx, bot) }) -bot.command('q', ctx => { - answerQuestion(ctx.message.text.slice(+3), ctx, bot) +bot.command('q', async ctx => { + answerQuestion(ctx.message.text.slice(+3), ctx, bot).then((res) => { + console.log('[Telegram] Sent answer to : ' + ctx.message.text.slice(+3)); + addToLogs('[Telegram] Sent answer to : ' + ctx.message.text.slice(+3)); + bot.telegram.sendMessage(ctx.chat.id, res.data.choices[0].text.slice(+2), {}); + }).catch((err) => { + console.log(err); + addToLogs(err); + bot.telegram.sendMessage(ctx.chat.id, "Something went wrong", {}); + }) + + console.log('[Telegram] Generating answer to : ' + ctx.message.text.slice(+3)); + addToLogs('[Telegram] Generating answer to : ' + ctx.message.text.slice(+3)); + bot.telegram.sendMessage(ctx.chat.id, 'Generating the answer...', {}); }) bot.command('sb' , ctx => { @@ -99,10 +111,21 @@ client.on('ready', () => { console.log(`Logged in as ${client.user.tag}!`); }); -client.on('messageCreate', msg => { - console.log(msg.content); - if (msg.content === 'ping') { - msg.reply('Pong!'); +client.on('messageCreate', async msg => { + if (msg.content.startsWith('/q')) { + answerQuestion(msg.content.slice(+3)).then((res) => { + console.log('[Discord] Sent answer to : ' + msg.content.slice(+3)); + addToLogs('[Discord] Sent answer to : ' + msg.content.slice(+3)); + msg.reply(res.data.choices[0].text.slice(+2)); + }).catch((err) => { + console.log(err); + addToLogs(err); + msg.reply("Something went wrong"); + }) + + console.log('[Discord] Generating answer to : ' + msg.content.slice(+3)); + addToLogs('[Discord] Generating answer to : ' + msg.content.slice(+3)); + msg.reply('Generating the answer...'); } }); diff --git a/libs/openAi.js b/libs/openAi.js index dbed5ff..ac67fab 100644 --- a/libs/openAi.js +++ b/libs/openAi.js @@ -33,8 +33,8 @@ function generateImage(query, ctx, bot) { }) } -function answerQuestion(query, ctx, bot) { - response = openai.createCompletion({ +async function answerQuestion(query) { + response = await openai.createCompletion({ model: "text-davinci-003", prompt: query, max_tokens: 500, @@ -42,19 +42,8 @@ function answerQuestion(query, ctx, bot) { }).catch((err) => { console.log(err); }) - - console.log("--> answering the question " + query); - addToLogs("--> answering the question " + query) - bot.telegram.sendMessage(ctx.chat.id, "Generating the answer.", {}); - response.then((res) => { - const text = res.data.choices[0].text.slice(+2); - - bot.telegram.sendMessage(ctx.chat.id, text, {}).catch((err) => { - bot.telegram.sendMessage(ctx.chat.id, "Something went wrong.", {}); - console.log("--> error while sending the answer : " + err); - }) - }) + return response; } module.exports = { generateImage, answerQuestion }; \ No newline at end of file