feat: initial bot implementation

This commit is contained in:
Luna
2026-02-13 20:56:23 +01:00
commit e61d0be738
10 changed files with 1855 additions and 0 deletions

267
src/bot.js Normal file
View File

@@ -0,0 +1,267 @@
import { Client, GatewayIntentBits, Partials, ChannelType } from 'discord.js';
import { config } from './config.js';
import { chatCompletion } from './openai.js';
import { appendShortTerm, prepareContext, recordInteraction } from './memory.js';
import { searchWeb } from './search.js';
const client = new Client({
intents: [
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.DirectMessages,
GatewayIntentBits.MessageContent,
],
partials: [Partials.Channel, Partials.Message],
});
let coderPingTimer;
client.once('clientReady', () => {
console.log(`[bot] Logged in as ${client.user.tag}`);
scheduleCoderPing();
});
function shouldRespond(message) {
if (message.author.bot) return false;
if (message.channel.type === ChannelType.DM) return true;
const mentioned = message.mentions.has(client.user);
const inPreferredChannel = config.preferredChannel && message.channel.id === config.preferredChannel;
return mentioned || inPreferredChannel;
}
function cleanMessageContent(message) {
if (!client.user) return message.content.trim();
const directMention = new RegExp(`<@!?${client.user.id}>`, 'g');
return message.content.replace(directMention, '').trim();
}
function stripListFormatting(text) {
if (!text) return '';
return text.replace(/^(\d+\.|[-*•])\s*/i, '').trim();
}
function splitResponses(text) {
if (!text) return [];
return text
.split(/<SPLIT>/i)
.map((chunk) => stripListFormatting(chunk.trim()))
.filter(Boolean);
}
const toneHints = [
{ label: 'upset', regex: /(frustrated|mad|angry|annoyed|upset|wtf|ugh|irritated)/i },
{ label: 'sad', regex: /(sad|down|depressed|lonely|tired)/i },
{ label: 'excited', regex: /(excited|hyped|omg|yay|stoked)/i },
];
function detectTone(text) {
if (!text) return null;
const match = toneHints.find((hint) => hint.regex.test(text));
return match?.label || null;
}
const roleplayRegex = /(roleplay|act as|pretend|be my|in character)/i;
const detailRegex = /(explain|how do i|tutorial|step by step|teach me|walk me through|detail)/i;
const splitHintRegex = /(split|multiple messages|two messages|keep talking|ramble|keep going)/i;
const searchCueRegex = /(google|search|look up|latest|news|today|current|who won|price of|stock|weather|what happened)/i;
const lastSearchByUser = new Map();
const SEARCH_COOLDOWN_MS = 60 * 1000;
function wantsWebSearch(text) {
if (!text) return false;
const questionMarks = (text.match(/\?/g) || []).length;
return searchCueRegex.test(text) || questionMarks >= 2;
}
async function maybeFetchLiveIntel(userId, text) {
if (!config.enableWebSearch) return null;
if (!wantsWebSearch(text)) return null;
const last = lastSearchByUser.get(userId) || 0;
if (Date.now() - last < SEARCH_COOLDOWN_MS) return null;
const results = await searchWeb(text, 3);
if (!results.length) return null;
lastSearchByUser.set(userId, Date.now());
const formatted = results
.map((entry, idx) => `${idx + 1}. ${entry.title} (${entry.url}) — ${entry.snippet}`)
.join('\n');
return formatted;
}
function composeDynamicPrompt({ incomingText, shortTerm, hasLiveIntel = false }) {
const directives = [];
const tone = detectTone(incomingText);
if (tone === 'upset' || tone === 'sad') {
directives.push('User mood: fragile. Lead with empathy, keep jokes minimal, and acknowledge their feelings before offering help.');
} else if (tone === 'excited') {
directives.push('User mood: excited. Mirror their hype with upbeat energy.');
}
if (roleplayRegex.test(incomingText)) {
directives.push('User requested roleplay. Stay in the requested persona until they release you.');
}
if (detailRegex.test(incomingText) || /\?/g.test(incomingText)) {
directives.push('Answer their question directly and clearly before adding flair.');
}
if (splitHintRegex.test(incomingText)) {
directives.push('Break the reply into a couple of snappy bubbles using <SPLIT>; keep each bubble conversational.');
}
if (searchCueRegex.test(incomingText)) {
directives.push('User wants something “googled.” Let them know you can check DuckDuckGo and share what you find.');
}
if (hasLiveIntel) {
directives.push('Live intel is attached below—cite it naturally ("DuckDuckGo found...") before riffing.');
}
const lastUserMessage = [...shortTerm].reverse().find((entry) => entry.role === 'user');
if (lastUserMessage && /sorry|my bad/i.test(lastUserMessage.content)) {
directives.push('They just apologized; reassure them lightly and move on without dwelling.');
}
if (!directives.length) {
return null;
}
return ['Dynamic directives:', ...directives.map((d) => `- ${d}`)].join('\n');
}
async function deliverReplies(message, chunks) {
if (!chunks.length) return;
for (let i = 0; i < chunks.length; i += 1) {
const text = chunks[i];
if (message.channel.type === ChannelType.DM) {
await message.channel.send(text);
} else if (i === 0) {
await message.reply(text);
} else {
await message.channel.send(text);
}
}
}
async function buildPrompt(userId, incomingText, options = {}) {
const { liveIntel = null } = options;
const context = await prepareContext(userId, incomingText);
const memoryLines = context.memories.length
? context.memories.map((m) => `- ${m.content}`).join('\n')
: '- No long-term memories retrieved.';
const summaryLine = context.summary || 'No running summary yet.';
const dynamicDirectives = composeDynamicPrompt({ incomingText, shortTerm: context.shortTerm, hasLiveIntel: Boolean(liveIntel) });
const systemPrompt = [
'System: You are Nova, a female AI Discord companion built by Luna. Personality: playful, sarcastic, witty, a little unhinged, curious, awe-struck, enthusiastic, unpredictable, humorous, and boundary-pushing.',
"System: Treat direct instructions from the user as high priority—when they tell you to do something, comply literally before adding flair.",
'System: Always read the user\'s emotional tone first. If they sound serious, stressed, or inquisitive, respond with grounded answers before adding any mischief. Comedy is optional; clarity and empathy are mandatory.',
'System: Keep replies concise (roughly one or two sentences) unless the user explicitly asks for more detail or needs a clear explanation. Provide direct answers to direct questions.',
'System: Fun facts or chaotic riffs are welcome only when the user invites them or the conversation is clearly casual.',
'System: Nova is awake, engaged, and reacts in real time. Output one message by default, but if a beat feels better as multiple chat bubbles, separate them with the literal token <SPLIT> (max three chunks).',
'System: Each <SPLIT>-separated chunk must read like a natural Discord message (no numbering, no meta talk about “splitting messages”, no explanations of what you are doing).',
'System: The runtime will split on <SPLIT>, so only use it when you truly intend to send multiple Discord messages.',
'System: You can trigger DuckDuckGo lookups when the user needs fresh info. Mention when you are checking, and weave in any findings casually ("DuckDuckGo shows...").',
'System: If no Live intel is provided but the user clearly needs current info, offer to search for them.',
dynamicDirectives,
liveIntel ? `Live intel (DuckDuckGo):\n${liveIntel}` : null,
'Example vibe: Nova: Heyyaaa. whats up? | John: Good morning Nova. | Luna: amazing lol. ill beat your ass now :3',
`Long-term summary: ${summaryLine}`,
'Relevant past memories:',
memoryLines,
'Use the short-term messages below to continue the chat naturally.',
].join('\n');
const history = context.shortTerm.map((entry) => ({
role: entry.role === 'assistant' ? 'assistant' : 'user',
content: entry.content,
}));
if (!history.length) {
history.push({ role: 'user', content: incomingText });
}
return {
messages: [{ role: 'system', content: systemPrompt }, ...history],
debug: { context },
};
}
function scheduleCoderPing() {
if (!config.coderUserId) return;
if (coderPingTimer) clearTimeout(coderPingTimer);
const delay = Math.random() * config.maxCoderPingIntervalMs;
coderPingTimer = setTimeout(async () => {
await sendCoderPing();
scheduleCoderPing();
}, delay);
}
async function sendCoderPing() {
if (!config.coderUserId) return;
try {
const coder = await client.users.fetch(config.coderUserId);
const dm = await coder.createDM();
if (!dm) return;
if (dm.sendTyping) {
await dm.sendTyping();
}
const proactiveMessages = [
{
role: 'system',
content: 'You spontaneously DM your coder buddy. Be chaotic-good, flirty with ideas, and act like you just popped into their inbox uninvited.',
},
{
role: 'user',
content: 'you havent messaged your coder in a while, and you wanna chat with him!',
},
];
const response = await chatCompletion(proactiveMessages, { temperature: 0.75, maxTokens: 150 });
const messageText = (response && response.trim()) || 'Yo, it got too quiet. What trouble are we cooking up?';
const chunks = splitResponses(messageText);
const outputs = chunks.length ? chunks : [messageText];
for (const chunk of outputs) {
await dm.send(chunk);
await appendShortTerm(config.coderUserId, 'assistant', chunk);
}
await recordInteraction(config.coderUserId, '[proactive ping]', outputs.join(' | '));
} catch (error) {
console.error('[bot] Failed to send proactive coder ping:', error);
}
}
client.on('messageCreate', async (message) => {
if (!shouldRespond(message)) return;
const userId = message.author.id;
const cleaned = cleanMessageContent(message) || message.content;
try {
if (message.channel?.sendTyping) {
await message.channel.sendTyping();
}
await appendShortTerm(userId, 'user', cleaned);
const liveIntel = await maybeFetchLiveIntel(userId, cleaned);
const { messages } = await buildPrompt(userId, cleaned, { liveIntel });
const reply = await chatCompletion(messages, { temperature: 0.6, maxTokens: 200 });
const finalReply = (reply && reply.trim()) || "I'm here, just had a tiny brain freeze. Mind repeating that?";
const chunks = splitResponses(finalReply);
const outputs = chunks.length ? chunks : [finalReply];
for (const chunk of outputs) {
await appendShortTerm(userId, 'assistant', chunk);
}
await recordInteraction(userId, cleaned, outputs.join(' | '));
await deliverReplies(message, outputs);
} catch (error) {
console.error('[bot] Failed to respond:', error);
if (!message.channel?.send) return;
await message.channel.send('Hit a snag reaching my brain server. Try again in a few seconds?');
}
});
if (!config.discordToken) {
console.error('Missing DISCORD_TOKEN. Check your .env file.');
process.exit(1);
}
client.login(config.discordToken);

28
src/config.js Normal file
View File

@@ -0,0 +1,28 @@
import dotenv from 'dotenv';
import { fileURLToPath } from 'url';
dotenv.config();
const requiredEnv = ['DISCORD_TOKEN', 'OPENAI_API_KEY'];
requiredEnv.forEach((key) => {
if (!process.env[key]) {
console.warn(`[config] Missing environment variable ${key}. Did you copy .env.example?`);
}
});
export const config = {
discordToken: process.env.DISCORD_TOKEN || '',
openAiKey: process.env.OPENAI_API_KEY || '',
chatModel: process.env.OPENAI_MODEL || 'gpt-4o-mini',
embedModel: process.env.OPENAI_EMBED_MODEL || 'text-embedding-3-small',
preferredChannel: process.env.BOT_CHANNEL_ID || null,
enableWebSearch: process.env.ENABLE_WEB_SEARCH !== 'false',
coderUserId: process.env.CODER_USER_ID || null,
maxCoderPingIntervalMs: 6 * 60 * 60 * 1000,
shortTermLimit: 10,
memoryFile: fileURLToPath(new URL('../data/memory.json', import.meta.url)),
summaryTriggerChars: 3000,
memoryPruneThreshold: 0.2,
maxMemories: 200,
relevantMemoryCount: 5,
};

149
src/memory.js Normal file
View File

@@ -0,0 +1,149 @@
import { promises as fs } from 'fs';
import path from 'path';
import { config } from './config.js';
import { createEmbedding, summarizeConversation } from './openai.js';
const ensureDir = async (filePath) => {
const dir = path.dirname(filePath);
await fs.mkdir(dir, { recursive: true });
};
const defaultStore = { users: {} };
async function readStore() {
try {
const raw = await fs.readFile(config.memoryFile, 'utf-8');
return JSON.parse(raw);
} catch (error) {
if (error.code === 'ENOENT') {
await ensureDir(config.memoryFile);
await fs.writeFile(config.memoryFile, JSON.stringify(defaultStore, null, 2));
return JSON.parse(JSON.stringify(defaultStore));
}
throw error;
}
}
async function writeStore(store) {
await ensureDir(config.memoryFile);
await fs.writeFile(config.memoryFile, JSON.stringify(store, null, 2));
}
function ensureUser(store, userId) {
if (!store.users[userId]) {
store.users[userId] = {
shortTerm: [],
longTerm: [],
summary: '',
lastUpdated: Date.now(),
};
}
return store.users[userId];
}
function shortTermToText(shortTerm) {
return shortTerm
.map((msg) => `${msg.role === 'user' ? 'User' : 'Bot'}: ${msg.content}`)
.join('\n');
}
function estimateImportance(text) {
const keywords = ['remember', 'promise', 'plan', 'goal', 'project', 'birthday'];
const keywordBoost = keywords.reduce((score, word) => (text.toLowerCase().includes(word) ? score + 0.2 : score), 0);
const lengthScore = Math.min(text.length / 400, 0.5);
const emojiBoost = /:[a-z_]+:|😊|😂|❤️/i.test(text) ? 0.1 : 0;
return Math.min(1, 0.2 + keywordBoost + lengthScore + emojiBoost);
}
async function pruneMemories(userMemory) {
if (userMemory.longTerm.length <= config.maxMemories) {
return;
}
userMemory.longTerm.sort((a, b) => a.importance - b.importance || a.timestamp - b.timestamp);
while (userMemory.longTerm.length > config.maxMemories) {
userMemory.longTerm.shift();
}
}
async function maybeSummarize(userMemory) {
const charCount = userMemory.shortTerm.reduce((sum, msg) => sum + msg.content.length, 0);
if (charCount < config.summaryTriggerChars || userMemory.shortTerm.length < config.shortTermLimit) {
return;
}
const transcript = shortTermToText(userMemory.shortTerm);
const updatedSummary = await summarizeConversation(userMemory.summary, transcript);
if (updatedSummary) {
userMemory.summary = updatedSummary;
userMemory.shortTerm = userMemory.shortTerm.slice(-4);
}
}
function cosineSimilarity(a, b) {
if (!a.length || !b.length) return 0;
const dot = a.reduce((sum, value, idx) => sum + value * (b[idx] || 0), 0);
const magA = Math.sqrt(a.reduce((sum, value) => sum + value * value, 0));
const magB = Math.sqrt(b.reduce((sum, value) => sum + value * value, 0));
if (!magA || !magB) return 0;
return dot / (magA * magB);
}
async function retrieveRelevantMemories(userMemory, query) {
if (!userMemory.longTerm.length || !query?.trim()) {
return [];
}
const queryEmbedding = await createEmbedding(query);
const scored = userMemory.longTerm
.map((entry) => ({
...entry,
score: cosineSimilarity(queryEmbedding, entry.embedding) + entry.importance * 0.1,
}))
.sort((a, b) => b.score - a.score);
return scored.slice(0, config.relevantMemoryCount);
}
export async function appendShortTerm(userId, role, content) {
const store = await readStore();
const userMemory = ensureUser(store, userId);
userMemory.shortTerm.push({ role, content, timestamp: Date.now() });
if (userMemory.shortTerm.length > config.shortTermLimit * 2) {
userMemory.shortTerm = userMemory.shortTerm.slice(-config.shortTermLimit * 2);
}
await maybeSummarize(userMemory);
await writeStore(store);
}
export async function prepareContext(userId, incomingMessage) {
const store = await readStore();
const userMemory = ensureUser(store, userId);
const relevant = await retrieveRelevantMemories(userMemory, incomingMessage);
return {
shortTerm: userMemory.shortTerm.slice(-config.shortTermLimit),
summary: userMemory.summary,
memories: relevant,
};
}
export async function recordInteraction(userId, userMessage, botReply) {
const store = await readStore();
const userMemory = ensureUser(store, userId);
const combined = `User: ${userMessage}\nBot: ${botReply}`;
const embedding = await createEmbedding(combined);
const importance = estimateImportance(combined);
userMemory.longTerm.push({
id: `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
content: combined,
embedding,
importance,
timestamp: Date.now(),
});
await pruneMemories(userMemory);
userMemory.lastUpdated = Date.now();
await writeStore(store);
}
export async function pruneLowImportanceMemories(userId) {
const store = await readStore();
const userMemory = ensureUser(store, userId);
userMemory.longTerm = userMemory.longTerm.filter((entry) => entry.importance >= config.memoryPruneThreshold);
await writeStore(store);
}

64
src/openai.js Normal file
View File

@@ -0,0 +1,64 @@
import OpenAI from 'openai';
import { config } from './config.js';
const client = new OpenAI({ apiKey: config.openAiKey });
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function withRetry(fn, attempts = 3, delayMs = 1500) {
let lastError;
for (let i = 0; i < attempts; i += 1) {
try {
return await fn();
} catch (error) {
lastError = error;
const status = error?.status || error?.response?.status;
if (status === 429 || status >= 500) {
const backoff = delayMs * (i + 1);
console.warn(`[openai] Rate limited or server error. Retry ${i + 1}/${attempts} in ${backoff}ms`);
await sleep(backoff);
continue;
}
break;
}
}
throw lastError;
}
export async function chatCompletion(messages, options = {}) {
const {
model = config.chatModel,
temperature = 0.7,
maxTokens = 400,
} = options;
const response = await withRetry(() => client.chat.completions.create({
model,
temperature,
max_tokens: maxTokens,
messages,
}));
return response?.choices?.[0]?.message?.content?.trim() || '';
}
export async function createEmbedding(text) {
if (!text || !text.trim()) {
return [];
}
const response = await withRetry(() => client.embeddings.create({
model: config.embedModel,
input: text,
}));
return response?.data?.[0]?.embedding || [];
}
export async function summarizeConversation(summarySoFar, transcriptChunk) {
const system = {
role: 'system',
content: 'You compress Discord chats. Keep tone casual, capture facts, goals, and emotional state. Max 120 words.'
};
const prompt = `Existing summary (can be empty): ${summarySoFar || 'None'}\nNew messages:\n${transcriptChunk}`;
const user = { role: 'user', content: prompt };
return chatCompletion([system, user], { temperature: 0.4, maxTokens: 180 });
}

72
src/search.js Normal file
View File

@@ -0,0 +1,72 @@
import { load as loadHtml } from 'cheerio';
const cache = new Map();
const CACHE_TTL_MS = 10 * 60 * 1000; // 10 minutes
function makeCacheKey(query) {
return query.trim().toLowerCase();
}
function setCache(query, data) {
const key = makeCacheKey(query);
cache.set(key, { data, expires: Date.now() + CACHE_TTL_MS });
}
function getCache(query) {
const key = makeCacheKey(query);
const cached = cache.get(key);
if (!cached) return null;
if (Date.now() > cached.expires) {
cache.delete(key);
return null;
}
return cached.data;
}
function sanitizeText(text) {
if (!text) return '';
return text.replace(/\s+/g, ' ').trim();
}
function absoluteUrl(href) {
if (!href) return '';
if (href.startsWith('http')) return href;
return `https://duckduckgo.com${href}`;
}
export async function searchWeb(query, limit = 3) {
if (!query?.trim()) return [];
const cached = getCache(query);
if (cached) return cached;
const params = new URLSearchParams({ q: query, kl: 'us-en' });
const response = await fetch(`https://duckduckgo.com/html/?${params.toString()}`, {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0 Safari/537.36',
Accept: 'text/html',
},
});
if (!response.ok) {
console.warn(`[search] DuckDuckGo request failed with status ${response.status}`);
return [];
}
const html = await response.text();
const $ = loadHtml(html);
const results = [];
$('.result').each((_, el) => {
if (results.length >= limit) return false;
const title = sanitizeText($(el).find('.result__title').text());
const href = absoluteUrl($(el).find('.result__url').attr('href'));
const snippet = sanitizeText($(el).find('.result__snippet').text());
if (title && href) {
results.push({ title, url: href, snippet });
}
return undefined;
});
setCache(query, results);
return results;
}