From f5fe606c949874b82bc7cb72bb135b971a8ba984 Mon Sep 17 00:00:00 2001 From: Joby <56961121+jobyid@users.noreply.github.com> Date: Tue, 24 Dec 2024 09:59:26 +0000 Subject: [PATCH] mongo-db adaptor Added an adaptor which connects to mongo db atlas. Allowing you to store agent data in the cloud. If you have the appropriate tier you can also take advantage of their vector search functionaility. --- .env.example | 4 + .idea/.gitignore | 5 + agent/src/index.ts | 33 +- packages/adapter-mongodb/.npmignore | 6 + packages/adapter-mongodb/eslint.config.mjs | 3 + packages/adapter-mongodb/package.json | 26 + packages/adapter-mongodb/src/index.ts | 910 +++++++++++++++++++++ packages/adapter-mongodb/tsconfig.json | 23 + packages/adapter-mongodb/tsup.config.ts | 28 + 9 files changed, 1036 insertions(+), 2 deletions(-) create mode 100644 .idea/.gitignore create mode 100644 packages/adapter-mongodb/.npmignore create mode 100644 packages/adapter-mongodb/eslint.config.mjs create mode 100644 packages/adapter-mongodb/package.json create mode 100644 packages/adapter-mongodb/src/index.ts create mode 100644 packages/adapter-mongodb/tsconfig.json create mode 100644 packages/adapter-mongodb/tsup.config.ts diff --git a/.env.example b/.env.example index b1f05998b7..6cfbca1474 100644 --- a/.env.example +++ b/.env.example @@ -335,3 +335,7 @@ STORY_PRIVATE_KEY= # Story private key STORY_API_BASE_URL= # Story API base URL STORY_API_KEY= # Story API key PINATA_JWT= # Pinata JWT for uploading files to IPFS + +# MongoDB +MONGODB_CONNECTION_STRING= #mongodb connection string +MONGODB_DATABASE= #name of the database in mongoDB atlas diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000000..b58b603fea --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,5 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ diff --git a/agent/src/index.ts b/agent/src/index.ts index 1e49bae84f..2ac854c018 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -328,7 +328,37 @@ export function getTokenForProvider( } function initializeDatabase(dataDir: string) { - if (process.env.POSTGRES_URL) { + if (process.env.MONGODB_CONNECTION_STRING) { + elizaLogger.log("Initializing database on MongoDB Atlas"); + const client = new MongoClient(process.env.MONGODB_CONNECTION_STRING, { + maxPoolSize: 100, + minPoolSize: 5, + maxIdleTimeMS: 60000, + connectTimeoutMS: 10000, + serverSelectionTimeoutMS: 5000, + socketTimeoutMS: 45000, + compressors: ['zlib'], + retryWrites: true, + retryReads: true + }); + + const dbName = process.env.MONGODB_DATABASE_NAME || 'CumulusAiAgent'; // Default database name + const db = new MongoDBDatabaseAdapter(client, dbName); + + // Test the connection + db.init() + .then(() => { + elizaLogger.success( + "Successfully connected to MongoDB Atlas" + ); + }) + .catch((error) => { + elizaLogger.error("Failed to connect to MongoDB Atlas:", error); + throw error; // Re-throw to handle it in the calling code + }); + + return db; + } else if (process.env.POSTGRES_URL) { elizaLogger.info("Initializing PostgreSQL connection..."); const db = new PostgresDatabaseAdapter({ connectionString: process.env.POSTGRES_URL, @@ -350,7 +380,6 @@ function initializeDatabase(dataDir: string) { } else { const filePath = process.env.SQLITE_FILE ?? path.resolve(dataDir, "db.sqlite"); - // ":memory:"; const db = new SqliteDatabaseAdapter(new Database(filePath)); return db; } diff --git a/packages/adapter-mongodb/.npmignore b/packages/adapter-mongodb/.npmignore new file mode 100644 index 0000000000..078562ecea --- /dev/null +++ b/packages/adapter-mongodb/.npmignore @@ -0,0 +1,6 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts \ No newline at end of file diff --git a/packages/adapter-mongodb/eslint.config.mjs b/packages/adapter-mongodb/eslint.config.mjs new file mode 100644 index 0000000000..92fe5bbebe --- /dev/null +++ b/packages/adapter-mongodb/eslint.config.mjs @@ -0,0 +1,3 @@ +import eslintGlobalConfig from "../../eslint.config.mjs"; + +export default [...eslintGlobalConfig]; diff --git a/packages/adapter-mongodb/package.json b/packages/adapter-mongodb/package.json new file mode 100644 index 0000000000..2eb0b78f4e --- /dev/null +++ b/packages/adapter-mongodb/package.json @@ -0,0 +1,26 @@ +{ + "name": "@ai16z/adapter-mongodb", + "version": "0.1.0", + "main": "dist/index.js", + "type": "module", + "types": "dist/index.d.ts", + "dependencies": { + "@ai16z/eliza": "workspace:*", + "mongodb": "^6.3.0", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "@types/uuid": "^10.0.0", + "tsup": "8.3.5", + "typescript": "^5.0.0" + }, + "scripts": { + "build": "tsup --format esm --dts", + "dev": "tsup --format esm --dts --watch", + "lint": "eslint --fix --cache ." + }, + "engines": { + "node": ">=16.0.0" + } +} diff --git a/packages/adapter-mongodb/src/index.ts b/packages/adapter-mongodb/src/index.ts new file mode 100644 index 0000000000..f711ada52b --- /dev/null +++ b/packages/adapter-mongodb/src/index.ts @@ -0,0 +1,910 @@ +import { MongoClient } from 'mongodb'; +import { + DatabaseAdapter, + IDatabaseCacheAdapter, + Account, + Actor, + GoalStatus, + Participant, + type Goal, + type Memory, + type Relationship, + type UUID, +} from "@ai16z/eliza"; +import { v4 } from "uuid"; + +export class MongoDBDatabaseAdapter + extends DatabaseAdapter + implements IDatabaseCacheAdapter +{ + private database: any; + private databaseName: string; + private hasVectorSearch: boolean; + + constructor(client: MongoClient, databaseName: string) { + super(); + this.db = client; + this.databaseName = databaseName; + this.hasVectorSearch = false; + } + + async init() { + await this.db.connect(); + this.database = this.db.db(this.databaseName); + + // Track whether vector search is available + this.hasVectorSearch = false; + + // Create required indexes + await Promise.all([ + // Try to create vector search index, but don't fail if unavailable + (async () => { + try { + await this.database.collection('memories').createIndex( + { embedding: "vectorSearch" }, + { + name: "vector_index", + definition: { + vectorSearchConfig: { + dimensions: 1536, + similarity: "cosine", + numLists: 100, + efConstruction: 128 + } + } + } + ); + this.hasVectorSearch = true; + console.log("Vector search capabilities are available and enabled"); + } catch (error) { + console.log("Vector search not available, falling back to standard search", error); + // Create a standard index on embedding field instead + await this.database.collection('memories').createIndex( + { embedding: 1 } + ); + } + })(), + + // Regular indexes that should always be created + this.database.collection('memories').createIndex( + { type: 1, roomId: 1, agentId: 1, createdAt: -1 } + ), + this.database.collection('participants').createIndex( + { userId: 1, roomId: 1 }, + { unique: true } + ), + this.database.collection('memories').createIndex( + { "content": "text" }, + { weights: { "content": 10 } } + ), + this.database.collection('cache').createIndex( + { expiresAt: 1 }, + { expireAfterSeconds: 0 } + ) + ]); + + try { + // Enable sharding for better performance + await this.database.command({ + enableSharding: this.database.databaseName + }); + await this.database.command({ + shardCollection: `${this.database.databaseName}.memories`, + key: { roomId: "hashed" } + }); + } catch (error) { + console.log("Sharding may already be enabled or insufficient permissions", error); + } + } + + async close() { + await this.db.close(); + } + + // Rest of your methods, but replace this.db with this.database when accessing MongoDB collections + async getRoom(roomId: UUID): Promise { + const room = await this.database.collection('rooms').findOne({ id: roomId }); + return room ? room.id : null; + } + + async getParticipantsForAccount(userId: UUID): Promise { + return await this.database.collection('participants') + .find({ userId }) + .toArray(); + } + + async getParticipantsForRoom(roomId: UUID): Promise { + const participants = await this.database.collection('participants') + .find({ roomId }) + .toArray(); + return participants.map(p => p.userId); + } + + async getParticipantUserState( + roomId: UUID, + userId: UUID + ): Promise<"FOLLOWED" | "MUTED" | null> { + const participant = await this.database.collection('participants') + .findOne({ roomId, userId }); + return participant?.userState ?? null; + } + + async setParticipantUserState( + roomId: UUID, + userId: UUID, + state: "FOLLOWED" | "MUTED" | null + ): Promise { + await this.database.collection('participants').updateOne( + { roomId, userId }, + { $set: { userState: state } } + ); + } + + async getAccountById(userId: UUID): Promise { + const account = await this.database.collection('accounts').findOne({ id: userId }); + if (!account) return null; + return { + ...account, + details: typeof account.details === 'string' ? + JSON.parse(account.details) : account.details + }; + } + + async createAccount(account: Account): Promise { + try { + await this.database.collection('accounts').insertOne({ + ...account, + id: account.id ?? v4(), + details: JSON.stringify(account.details), + createdAt: new Date() + }); + return true; + } catch (error) { + console.log("Error creating account", error); + return false; + } + } + + async getActorDetails(params: { roomId: UUID }): Promise { + const actors = await this.database.collection('participants') + .aggregate([ + { $match: { roomId: params.roomId } }, + { + $lookup: { + from: 'accounts', + localField: 'userId', + foreignField: 'id', + as: 'account' + } + }, + { $unwind: '$account' }, + { + $project: { + id: '$account.id', + name: '$account.name', + username: '$account.username', + details: '$account.details' + } + } + ]).toArray(); + + return actors + .map(actor => ({ + ...actor, + details: typeof actor.details === 'string' ? + JSON.parse(actor.details) : actor.details + })) + .filter((actor): actor is Actor => actor !== null); + } + + async getMemoriesByRoomIds(params: { + agentId: UUID; + roomIds: UUID[]; + tableName: string; + }): Promise { + if (!params.tableName) { + params.tableName = "messages"; + } + + const memories = await this.database.collection('memories') + .find({ + type: params.tableName, + agentId: params.agentId, + roomId: { $in: params.roomIds } + }) + .toArray(); + + return memories.map(memory => ({ + ...memory, + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + })); + } + + async getMemoryById(memoryId: UUID): Promise { + const memory = await this.database.collection('memories').findOne({ id: memoryId }); + if (!memory) return null; + + return { + ...memory, + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + }; + } + + async createMemory(memory: Memory, tableName: string): Promise { + let isUnique = true; + + if (memory.embedding) { + const similarMemories = await this.searchMemoriesByEmbedding( + memory.embedding, + { + tableName, + agentId: memory.agentId, + roomId: memory.roomId, + match_threshold: 0.95, + count: 1 + } + ); + isUnique = similarMemories.length === 0; + } + + const content = JSON.stringify(memory.content); + const createdAt = memory.createdAt ?? Date.now(); + + await this.database.collection('memories').insertOne({ + id: memory.id ?? v4(), + type: tableName, + content, + embedding: new Float32Array(memory.embedding!), + userId: memory.userId, + roomId: memory.roomId, + agentId: memory.agentId, + unique: isUnique, + createdAt: new Date(createdAt) + }); + } + + private async searchMemoriesFallback(params: { + embedding: number[]; + query: any; + limit?: number; + }): Promise { + // Implement a basic similarity search using standard MongoDB operations + const memories = await this.database.collection('memories') + .find(params.query) + .limit(params.limit || 10) + .toArray(); + + // Sort by cosine similarity computed in application + return memories + .map(memory => ({ + ...memory, + similarity: this.cosineSimilarity(params.embedding, memory.embedding) + })) + .sort((a, b) => b.similarity - a.similarity) + .map(memory => ({ + ...memory, + createdAt: typeof memory.createdAt === "string" ? + Date.parse(memory.createdAt) : memory.createdAt, + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + })); + } + + private cosineSimilarity(a: number[], b: number[]): number { + const dotProduct = a.reduce((sum, val, i) => sum + val * b[i], 0); + const magnitudeA = Math.sqrt(a.reduce((sum, val) => sum + val * val, 0)); + const magnitudeB = Math.sqrt(b.reduce((sum, val) => sum + val * val, 0)); + return dotProduct / (magnitudeA * magnitudeB); + } + + async searchMemories(params: { + tableName: string; + roomId: UUID; + agentId?: UUID; + embedding: number[]; + match_threshold: number; + match_count: number; + unique: boolean; + }): Promise { + const query = { + type: params.tableName, + roomId: params.roomId, + ...(params.unique && { unique: true }), + ...(params.agentId && { agentId: params.agentId }) + }; + + if (this.hasVectorSearch) { + const pipeline = [ + { + $search: { + vectorSearch: { + queryVector: new Float32Array(params.embedding), + path: "embedding", + numCandidates: params.match_count * 2, + limit: params.match_count, + index: "vector_index", + } + } + }, + { $match: query } + ]; + + try { + const memories = await this.database.collection('memories') + .aggregate(pipeline) + .toArray(); + + return memories.map(memory => ({ + ...memory, + createdAt: typeof memory.createdAt === "string" ? + Date.parse(memory.createdAt) : memory.createdAt, + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + })); + } catch (error) { + console.log("Vector search failed, falling back to standard search", error); + return this.searchMemoriesFallback({ + embedding: params.embedding, + query, + limit: params.match_count + }); + } + } + + return this.searchMemoriesFallback({ + embedding: params.embedding, + query, + limit: params.match_count + }); + } + + async searchMemoriesByEmbedding( + embedding: number[], + params: { + match_threshold?: number; + count?: number; + roomId?: UUID; + agentId: UUID; + unique?: boolean; + tableName: string; + } + ): Promise { + const pipeline = [ + { + $search: { + vectorSearch: { + queryVector: new Float32Array(embedding), + path: "embedding", + numCandidates: (params.count ?? 10) * 2, + limit: params.count, + index: "vector_index" + } + } + }, + { + $match: { + type: params.tableName, + agentId: params.agentId, + ...(params.unique && { unique: true }), + ...(params.roomId && { roomId: params.roomId }) + } + } + ]; + + const memories = await this.database.collection('memories') + .aggregate(pipeline) + .toArray(); + + return memories.map(memory => ({ + ...memory, + createdAt: typeof memory.createdAt === "string" ? + Date.parse(memory.createdAt) : memory.createdAt, + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + })); + } + + async getCachedEmbeddings(opts: { + query_table_name: string; + query_threshold: number; + query_input: string; + query_field_name: string; + query_field_sub_name: string; + query_match_count: number; + }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { + const BATCH_SIZE = 1000; // Process in chunks of 1000 documents + let results: { embedding: number[]; levenshtein_score: number }[] = []; + + try { + // Get total count for progress tracking + const totalCount = await this.database.collection('memories').countDocuments({ + type: opts.query_table_name, + [`content.${opts.query_field_name}.${opts.query_field_sub_name}`]: { $exists: true } + }); + + let processed = 0; + + while (processed < totalCount) { + // Fetch batch of documents + const memories = await this.database.collection('memories') + .find({ + type: opts.query_table_name, + [`content.${opts.query_field_name}.${opts.query_field_sub_name}`]: { $exists: true } + }) + .skip(processed) + .limit(BATCH_SIZE) + .toArray(); + + // Process batch + const batchResults = memories + .map(memory => { + try { + const content = memory.content[opts.query_field_name][opts.query_field_sub_name]; + if (!content || typeof content !== 'string') { + return null; + } + + return { + embedding: Array.from(memory.embedding), + levenshtein_score: this.calculateLevenshteinDistanceOptimized( + content.toLowerCase(), + opts.query_input.toLowerCase() + ) + }; + } catch (error) { + console.warn(`Error processing memory document: ${error}`); + return null; + } + }) + .filter((result): result is { embedding: number[]; levenshtein_score: number } => + result !== null); + + // Merge batch results + results = this.mergeAndSortResults(results, batchResults, opts.query_match_count); + processed += memories.length; + + // Log progress for long operations + if (totalCount > BATCH_SIZE) { + console.log(`Processed ${processed}/${totalCount} documents`); + } + } + + return results; + + } catch (error) { + console.error("Error in getCachedEmbeddings:", error); + if (results.length > 0) { + console.log("Returning partial results"); + return results; + } + return []; + } + } + + /** + * Optimized Levenshtein distance calculation with early termination + * and matrix reuse for better performance + */ + private calculateLevenshteinDistanceOptimized(str1: string, str2: string): number { + // Early termination for identical strings + if (str1 === str2) return 0; + + // Early termination for empty strings + if (str1.length === 0) return str2.length; + if (str2.length === 0) return str1.length; + + // Use shorter string as inner loop for better performance + if (str1.length > str2.length) { + [str1, str2] = [str2, str1]; + } + + // Reuse matrix to avoid garbage collection + const matrix = this.getLevenshteinMatrix(str1.length + 1, str2.length + 1); + + // Initialize first row and column + for (let i = 0; i <= str1.length; i++) matrix[i][0] = i; + for (let j = 0; j <= str2.length; j++) matrix[0][j] = j; + + // Calculate minimum edit distance + for (let i = 1; i <= str1.length; i++) { + for (let j = 1; j <= str2.length; j++) { + if (str1[i-1] === str2[j-1]) { + matrix[i][j] = matrix[i-1][j-1]; + } else { + matrix[i][j] = Math.min( + matrix[i-1][j-1] + 1, // substitution + matrix[i][j-1] + 1, // insertion + matrix[i-1][j] + 1 // deletion + ); + } + } + } + + return matrix[str1.length][str2.length]; + } + +// Cache for reusing Levenshtein distance matrix + private levenshteinMatrix: number[][] = []; + private maxMatrixSize = 0; + + private getLevenshteinMatrix(rows: number, cols: number): number[][] { + const size = rows * cols; + if (size > this.maxMatrixSize) { + this.levenshteinMatrix = Array(rows).fill(null) + .map(() => Array(cols).fill(0)); + this.maxMatrixSize = size; + } + return this.levenshteinMatrix; + } + + /** + * Efficiently merge and sort two arrays of results while maintaining top K items + */ + private mergeAndSortResults( + existing: { embedding: number[]; levenshtein_score: number }[], + newResults: { embedding: number[]; levenshtein_score: number }[], + limit: number + ): { embedding: number[]; levenshtein_score: number }[] { + const merged = [...existing, ...newResults]; + + // Use quick select algorithm if array is large + if (merged.length > 1000) { + return this.quickSelectTopK(merged, limit); + } + + // Use simple sort for smaller arrays + return merged + .sort((a, b) => a.levenshtein_score - b.levenshtein_score) + .slice(0, limit); + } + + /** + * Quick select algorithm to efficiently find top K items + */ + private quickSelectTopK( + arr: { embedding: number[]; levenshtein_score: number }[], + k: number + ): { embedding: number[]; levenshtein_score: number }[] { + if (arr.length <= k) return arr.sort((a, b) => a.levenshtein_score - b.levenshtein_score); + + const pivot = arr[Math.floor(Math.random() * arr.length)].levenshtein_score; + const left = arr.filter(x => x.levenshtein_score < pivot); + const equal = arr.filter(x => x.levenshtein_score === pivot); + const right = arr.filter(x => x.levenshtein_score > pivot); + + if (k <= left.length) { + return this.quickSelectTopK(left, k); + } + if (k <= left.length + equal.length) { + return [...left, ...equal.slice(0, k - left.length)] + .sort((a, b) => a.levenshtein_score - b.levenshtein_score); + } + return [...left, ...equal, ...this.quickSelectTopK(right, k - left.length - equal.length)] + .sort((a, b) => a.levenshtein_score - b.levenshtein_score); + } + + async updateGoalStatus(params: { + goalId: UUID; + status: GoalStatus; + }): Promise { + await this.database.collection('goals').updateOne( + { id: params.goalId }, + { $set: { status: params.status } } + ); + } + + async log(params: { + body: { [key: string]: unknown }; + userId: UUID; + roomId: UUID; + type: string; + }): Promise { + await this.database.collection('logs').insertOne({ + id: v4(), + body: JSON.stringify(params.body), + userId: params.userId, + roomId: params.roomId, + type: params.type, + createdAt: new Date() + }); + } + + async getMemories(params: { + roomId: UUID; + count?: number; + unique?: boolean; + tableName: string; + agentId: UUID; + start?: number; + end?: number; + }): Promise { + if (!params.tableName) { + throw new Error("tableName is required"); + } + if (!params.roomId) { + throw new Error("roomId is required"); + } + + const query: any = { + type: params.tableName, + agentId: params.agentId, + roomId: params.roomId + }; + + if (params.unique) { + query.unique = true; + } + + if (params.start || params.end) { + query.createdAt = {}; + if (params.start) query.createdAt.$gte = new Date(params.start); + if (params.end) query.createdAt.$lte = new Date(params.end); + } + + const memories = await this.database.collection('memories') + .find(query) + .sort({ createdAt: -1 }) + .limit(params.count || 0) + .toArray(); + + return memories.map(memory => ({ + ...memory, + createdAt: new Date(memory.createdAt).getTime(), + content: typeof memory.content === 'string' ? + JSON.parse(memory.content) : memory.content + })); + } + + async removeMemory(memoryId: UUID, tableName: string): Promise { + await this.database.collection('memories').deleteOne({ + id: memoryId, + type: tableName + }); + } + + async removeAllMemories(roomId: UUID, tableName: string): Promise { + await this.database.collection('memories').deleteMany({ + roomId, + type: tableName + }); + } + + async countMemories( + roomId: UUID, + unique = true, + tableName = "" + ): Promise { + if (!tableName) { + throw new Error("tableName is required"); + } + + const query: any = { + type: tableName, + roomId + }; + + if (unique) { + query.unique = true; + } + + return await this.database.collection('memories').countDocuments(query); + } + + async getGoals(params: { + roomId: UUID; + userId?: UUID | null; + onlyInProgress?: boolean; + count?: number; + }): Promise { + const query: any = { roomId: params.roomId }; + + if (params.userId) { + query.userId = params.userId; + } + + if (params.onlyInProgress) { + query.status = 'IN_PROGRESS'; + } + + const goals = await this.database.collection('goals') + .find(query) + .limit(params.count || 0) + .toArray(); + + return goals.map(goal => ({ + ...goal, + objectives: typeof goal.objectives === 'string' ? + JSON.parse(goal.objectives) : goal.objectives + })); + } + + async updateGoal(goal: Goal): Promise { + await this.database.collection('goals').updateOne( + { id: goal.id }, + { + $set: { + name: goal.name, + status: goal.status, + objectives: JSON.stringify(goal.objectives) + } + } + ); + } + + async createGoal(goal: Goal): Promise { + await this.database.collection('goals').insertOne({ + ...goal, + id: goal.id ?? v4(), + objectives: JSON.stringify(goal.objectives), + createdAt: new Date() + }); + } + + async removeGoal(goalId: UUID): Promise { + await this.database.collection('goals').deleteOne({ id: goalId }); + } + + async removeAllGoals(roomId: UUID): Promise { + await this.database.collection('goals').deleteMany({ roomId }); + } + + async createRoom(roomId?: UUID): Promise { + const newRoomId = roomId || v4() as UUID; + try { + await this.database.collection('rooms').insertOne({ + id: newRoomId, + createdAt: new Date() + }); + } catch (error) { + console.log("Error creating room", error); + } + return newRoomId; + } + + async removeRoom(roomId: UUID): Promise { + await this.database.collection('rooms').deleteOne({ id: roomId }); + } + + async getRoomsForParticipant(userId: UUID): Promise { + const rooms = await this.database.collection('participants') + .find({ userId }) + .project({ roomId: 1 }) + .toArray(); + return rooms.map(r => r.roomId); + } + + async getRoomsForParticipants(userIds: UUID[]): Promise { + const rooms = await this.database.collection('participants') + .distinct('roomId', { userId: { $in: userIds } }); + return rooms; + } + + async addParticipant(userId: UUID, roomId: UUID): Promise { + try { + await this.database.collection('participants').insertOne({ + id: v4(), + userId, + roomId, + createdAt: new Date() + }); + return true; + } catch (error) { + console.log("Error adding participant", error); + return false; + } + } + + async removeParticipant(userId: UUID, roomId: UUID): Promise { + try { + await this.database.collection('participants').deleteOne({ + userId, + roomId + }); + return true; + } catch (error) { + console.log("Error removing participant", error); + return false; + } + } + + async createRelationship(params: { + userA: UUID; + userB: UUID; + }): Promise { + if (!params.userA || !params.userB) { + throw new Error("userA and userB are required"); + } + + try { + await this.database.collection('relationships').insertOne({ + id: v4(), + userA: params.userA, + userB: params.userB, + userId: params.userA, + createdAt: new Date() + }); + return true; + } catch (error) { + console.log("Error creating relationship", error); + return false; + } + } + + async getRelationship(params: { + userA: UUID; + userB: UUID; + }): Promise { + return await this.database.collection('relationships').findOne({ + $or: [ + { userA: params.userA, userB: params.userB }, + { userA: params.userB, userB: params.userA } + ] + }); + } + + async getRelationships(params: { userId: UUID }): Promise { + return await this.database.collection('relationships') + .find({ + $or: [ + { userA: params.userId }, + { userB: params.userId } + ] + }) + .toArray(); + } + + async getCache(params: { + key: string; + agentId: UUID; + }): Promise { + const cached = await this.database.collection('cache') + .findOne({ + key: params.key, + agentId: params.agentId, + expiresAt: { $gt: new Date() } + }); + return cached?.value; + } + + async setCache(params: { + key: string; + agentId: UUID; + value: string; + }): Promise { + try { + await this.database.collection('cache').updateOne( + { key: params.key, agentId: params.agentId }, + { + $set: { + value: params.value, + createdAt: new Date(), + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000) // 24 hours expiry + } + }, + { upsert: true } + ); + return true; + } catch (error) { + console.log("Error setting cache", error); + return false; + } + } + + async deleteCache(params: { + key: string; + agentId: UUID; + }): Promise { + try { + await this.database.collection('cache').deleteOne({ + key: params.key, + agentId: params.agentId + }); + return true; + } catch (error) { + console.log("Error removing cache", error); + return false; + } + } +} diff --git a/packages/adapter-mongodb/tsconfig.json b/packages/adapter-mongodb/tsconfig.json new file mode 100644 index 0000000000..7dddb9a641 --- /dev/null +++ b/packages/adapter-mongodb/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "../core/tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "lib": ["ES2021", "DOM"], + "target": "ES2021" + }, + "include": [ + "src/**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} diff --git a/packages/adapter-mongodb/tsup.config.ts b/packages/adapter-mongodb/tsup.config.ts new file mode 100644 index 0000000000..fe2cf357e1 --- /dev/null +++ b/packages/adapter-mongodb/tsup.config.ts @@ -0,0 +1,28 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + outDir: "dist", + sourcemap: true, + clean: true, + format: ["esm"], + dts: true, + target: "node16", + external: [ + "mongodb", + "uuid", + "@ai16z/eliza", + "dotenv", + "fs", + "path", + "@reflink/reflink", + "@node-llama-cpp", + "https", + "http", + "agentkeepalive", + "@anush008/tokenizers" + ], + esbuildOptions(options) { + options.conditions = ["module"] + }, +});