Ответ пиши без markdown.

This commit is contained in:
leo 2024-10-15 01:46:09 +05:00
parent a7556995af
commit af269e19b7
12 changed files with 59 additions and 6 deletions

View File

@ -9,3 +9,6 @@ RUN npm install
COPY /src /app/src
ENV NODE_PATH=./node_modules
ENV HELLO_FILE=hello.txt
ENV ALLOWED_FILE=allowed.list

View File

@ -1,6 +1,6 @@
### Simple Telegram bot for ChatGPT OpenAi API
`tgbot-chatgpt` https://git.rozenlab.com/leo/tgbot-chatgpt
`tgbot-chatgpt`
#### Запустить контейнер:
@ -20,6 +20,19 @@
#### Сервисные команды:
Запустить с указанием `APP_INSTANCE`: `APP_INSTANCE=4o docker compose up`
Пересоздать образ: `APP_INSTANCE=4o docker compose up --build --force-recreate`
Останавливает и удаляет контейнеры, сети, тома и образы: `APP_INSTANCE=4o docker compose down -v`
Пересоздать образ:
`APP_INSTANCE=4o docker compose -p 4o up --build --force-recreate`
`APP_INSTANCE=in docker compose -p in up --build --force-recreate`
Открыть консоль запущенного контейнера `docker exec -it tgbot-chatgpt-4o /bin/sh`
остановить контейнеры:
`docker stop tgbot-chatgpt-4o`
`docker stop tgbot-chatgpt-in`

View File

@ -10,7 +10,7 @@ services:
- app_node_modules:/app/node_modules
- /data/secrets/${SERVER_DOMAIN}/tgbot-chatgpt/${APP_INSTANCE}/allowed.list:/app/allowed.list:ro
command: npm run start
restart: always
restart: unless-stopped
volumes:
node_modules:
app_node_modules:

BIN
logos/gpt-o1-raw.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

BIN
logos/gpt-o1.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 27 KiB

After

Width:  |  Height:  |  Size: 27 KiB

BIN
logos/gpt4-raw.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 148 KiB

BIN
logos/gpt4.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

3
run/35.sh Normal file
View File

@ -0,0 +1,3 @@
#!/bin/bash
APP_INSTANCE=35 docker compose -p 35 -f ../docker-compose.yml up -d

3
run/4o.sh Normal file
View File

@ -0,0 +1,3 @@
#!/bin/bash
APP_INSTANCE=4o docker compose -p 4o -f ../docker-compose.yml up -d

3
run/in.sh Normal file
View File

@ -0,0 +1,3 @@
#!/bin/bash
APP_INSTANCE=in docker compose -p in -f ../docker-compose.yml up -d

View File

@ -5,6 +5,8 @@ import OpenAI from 'openai';
import { promises as fs } from 'fs';
import { SocksProxyAgent } from 'socks-proxy-agent';
const isInstruct = process.env.API_INSTRUCT === 'true';
const proxyEnable = process.env.PROXY_ENABLE;
let proxyConfig = {};
@ -12,6 +14,9 @@ const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
const timeout = parseInt(process.env.TIMEOUT, 10);
let allowedUsers = [];
console.log("GPT_MODEL: " + process.env.GPT_MODEL);
console.log("API_INSTRUCT: " + process.env.API_INSTRUCT);
if(proxyEnable === 'true') {
const proxyHost = process.env.PROXY_HOST;
const proxyPort = process.env.PROXY_PORT;
@ -34,7 +39,10 @@ bot.command('start', initCommand);
async function chatGPT(content) {
try {
const chatCompletion = await openai.chat.completions.create({
messages: [{ role: 'user', content }],
messages: [
{ role: 'system', content: 'Ответ пиши без markdown.' },
{ role: 'user', content }
],
model: process.env.GPT_MODEL,
}, proxyConfig);
return chatCompletion.choices[0].message.content;
@ -43,11 +51,31 @@ async function chatGPT(content) {
}
}
async function chatGPTinstruct(content) {
try {
const chatCompletion = await openai.completions.create({
model: process.env.GPT_MODEL,
prompt: content,
max_tokens: 3000
}, proxyConfig);
return chatCompletion.choices[0].text;
} catch (e) {
console.log('Error while gpt chat', e.message);
}
}
bot.on(message('text'), async (ctx) => {
if (await auth(ctx.message.from.id)) {
try {
await ctx.reply(code('🕰️⏰🕙⏱️⏳...'));
const responce = await chatGPT(ctx.message.text);
let responce;
if(isInstruct) {
responce = await chatGPTinstruct(ctx.message.text);
} else {
responce = await chatGPT(ctx.message.text);
}
if(responce) {
await ctx.reply(responce);
} else {