Skip to content

Commit

Permalink
feature: config system integrated
Browse files Browse the repository at this point in the history
  • Loading branch information
onury5506 committed Mar 28, 2023
1 parent ce24f6e commit 900471f
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 8 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
node_modules/*
.env
.env
config.json
11 changes: 6 additions & 5 deletions chatgpt/chatgpt.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import config from '../config/config.js'
import Moderations from './moderations.js'
import tokenCount from './tokenCount.js'

const MAX_TOKENS = parseInt(process.env.CONVERSATION_START_PROMPT) ? parseInt(process.env.CONVERSATION_START_PROMPT) : 1000

const chatGPT = {
sendMessage: null,
}

chatGPT.sendMessage = async function (prompt) {

const tokens = tokenCount(prompt)
const MAX_TOKENS = config.get("MAX_TOKEN")

if (tokens > MAX_TOKENS / 2) {
return `Please limit your prompt to a maximum of ${parseInt(MAX_TOKENS / 2)} tokens. Thank you.`
Expand All @@ -18,8 +18,8 @@ chatGPT.sendMessage = async function (prompt) {
const messages = [
{
role: "system",
content: process.env.CONVERSATION_START_PROMPT.toLowerCase() != "false" ?
process.env.CONVERSATION_START_PROMPT.toLowerCase() :
content: config.get("CONVERSATION_START_PROMPT") != "" ?
config.get("CONVERSATION_START_PROMPT") :
"You are helpful assistant"
},
{
Expand All @@ -29,7 +29,7 @@ chatGPT.sendMessage = async function (prompt) {
]

const data = {
model: process.env.OPENAI_MODEL,
model: config.get("OPENAI_MODEL"),
messages,
max_tokens: MAX_TOKENS - tokens
}
Expand Down Expand Up @@ -60,6 +60,7 @@ export async function askQuestion(question, cb, opts = {}) {
return;
}
} catch (e) {
console.log(e)
cb(e)
return;
}
Expand Down
49 changes: 49 additions & 0 deletions config/config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import fs from "fs/promises"
const PATH = "./configFile/config.json"
let config = {
"OPENAI_MODEL":"gpt-3.5-turbo",
"MAX_TOKEN":1000,
"ENABLE_DIRECT_MESSAGES":false,
"CONVERSATION_START_PROMPT":"",
"USE_EMBED":true
}

load()

function get(key){
return config[key]
}

function set(key,value){
config[key] = value
}

function save(){
fs.writeFile(PATH,JSON.stringify(config)).then(()=>{
console.log("Config saved!")
}).catch((err)=>{
console.error("Config save error! : ",err)
})
}

function load(){

fs.readFile(PATH).then((data)=>{
try{
data = data.toString()
config = JSON.parse(data)
console.log(new Date()+" --- Config loaded")
}catch(e){
console.error("Config file corrupted! : ",e)
}
}).catch(()=>{
save()
})
}

export default {
save,
load,
set,
get
}
7 changes: 7 additions & 0 deletions configFile/config.example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"OPENAI_MODEL":"gpt-3.5-turbo",
"MAX_TOKEN":1000,
"ENABLE_DIRECT_MESSAGES":false,
"CONVERSATION_START_PROMPT":"",
"USE_EMBED":true
}
3 changes: 2 additions & 1 deletion discord/discord_helpers.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { EmbedBuilder, AttachmentBuilder } from 'discord.js'

import stableDiffusion from '../huggingface/stablediffusion/stableDiffusion.js';
import config from '../config/config.js';

export const MAX_RESPONSE_CHUNK_LENGTH = 1500

Expand Down Expand Up @@ -116,7 +117,7 @@ export async function splitAndSendResponse(resp, user) {
}

export async function generateInteractionReply(interaction, user, question, content) {
if (process.env.USE_EMBED.toLowerCase() == "true") {
if (config.get("USE_EMBED")) {
//embed
const embed = createEmbedForAskCommand(user, question, content)
await interaction.editReply({ embeds: [embed] }).catch(() => { })
Expand Down
4 changes: 3 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import 'dotenv/config'
import { Client, GatewayIntentBits, Partials, ChannelType } from 'discord.js'

import config from './config/config.js'
import { askQuestion } from './chatgpt/chatgpt.js'
import { initDiscordCommands, handle_interaction_ask, handle_interaction_image, handle_interaction_remix } from './discord/discord_commands.js'
import { splitAndSendResponse, MAX_RESPONSE_CHUNK_LENGTH } from './discord/discord_helpers.js'
Expand All @@ -25,7 +27,7 @@ async function main() {
});

client.on("messageCreate", async message => {
if (process.env.ENABLE_DIRECT_MESSAGES !== "true" || message.channel.type != ChannelType.DM || message.author.bot) {
if (config.get("ENABLE_DIRECT_MESSAGES") !== "true" || message.channel.type != ChannelType.DM || message.author.bot) {
return;
}
const user = message.author
Expand Down
3 changes: 3 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
"description": "",
"main": "index.js",
"type": "module",
"nodemonConfig": {
"ignore": ["configFile/config.json"]
},
"scripts": {
"dev": "nodemon index.js",
"start": "node index.js"
Expand Down

0 comments on commit 900471f

Please sign in to comment.