diff --git a/src/index.ts b/src/index.ts index 464460c..8195332 100644 --- a/src/index.ts +++ b/src/index.ts @@ -348,27 +348,234 @@ export default function nativeFilePlugin( return null; } + // Helper function to resolve an npm package and find a .node file + // Returns the path to the .node file if found, null otherwise + function resolveNpmPackageNodeFile( + packageName: string, + fromDir: string + ): string | null { + // Walk up directories looking for node_modules + let currentDir = fromDir; + const root = path.parse(fromDir).root; + + while (currentDir !== root && currentDir !== path.dirname(currentDir)) { + const nodeModulesDir = path.join(currentDir, "node_modules"); + + if (fs.existsSync(nodeModulesDir)) { + // Handle scoped packages (@scope/name) and regular packages + const packageDir = path.join(nodeModulesDir, packageName); + + if (fs.existsSync(packageDir)) { + // Try to read package.json to find the main entry + const packageJsonPath = path.join(packageDir, "package.json"); + + if (fs.existsSync(packageJsonPath)) { + try { + const packageJson = JSON.parse( + fs.readFileSync(packageJsonPath, "utf-8") + ); + + // Check if main points to a .node file + if (packageJson.main && packageJson.main.endsWith(".node")) { + const mainPath = path.join(packageDir, packageJson.main); + if (fs.existsSync(mainPath)) { + return mainPath; + } + } + } catch { + // Ignore JSON parse errors + } + } + + // Check for index.node as fallback + const indexNodePath = path.join(packageDir, "index.node"); + if (fs.existsSync(indexNodePath)) { + return indexNodePath; + } + + // Check for any .node file directly in the package directory + try { + const files = fs.readdirSync(packageDir); + const nodeFile = files.find((f) => f.endsWith(".node")); + if (nodeFile) { + return path.join(packageDir, nodeFile); + } + } catch { + // Ignore read errors + } + } + } + + currentDir = path.dirname(currentDir); + } + + return null; + } + + // Helper function to find platform-specific native packages matching a scope pattern + // Used for template literal requires like require(`@libsql/${target}`) + // Returns the path to the .node file for the current platform, or null + function findPlatformSpecificNativePackage( + scopePrefix: string, // e.g., "@libsql/" or "@scope/prefix-" + fromDir: string + ): { packageName: string; nodeFilePath: string } | null { + // Common platform/arch combinations for native modules + const platform = process.platform; + const arch = process.arch; + + // Common naming patterns for platform-specific packages + const platformPatterns = [ + `${platform}-${arch}`, // darwin-arm64, linux-x64 + `${platform}-${arch}-gnu`, // linux-x64-gnu + `${platform}-${arch}-musl`, // linux-x64-musl + `${platform}${arch === "x64" ? "64" : arch === "ia32" ? "32" : arch}`, // darwin64, linux64 + ]; + + // Walk up directories looking for node_modules + let currentDir = fromDir; + const root = path.parse(fromDir).root; + + while (currentDir !== root && currentDir !== path.dirname(currentDir)) { + const nodeModulesDir = path.join(currentDir, "node_modules"); + + if (fs.existsSync(nodeModulesDir)) { + // Try each platform pattern + for (const platformPattern of platformPatterns) { + const packageName = `${scopePrefix}${platformPattern}`; + const result = resolveNpmPackageNodeFile(packageName, currentDir); + if (result) { + return { packageName, nodeFilePath: result }; + } + } + + // If scope prefix starts with @, also try scanning the scope directory + if (scopePrefix.startsWith("@")) { + const scopeName = scopePrefix.split("/")[0]; // @libsql + const scopeDir = path.join(nodeModulesDir, scopeName); + + if (fs.existsSync(scopeDir)) { + try { + const packages = fs.readdirSync(scopeDir); + for (const pkg of packages) { + // Check if this package matches current platform + const lowerPkg = pkg.toLowerCase(); + const lowerPlatform = platform.toLowerCase(); + const lowerArch = arch.toLowerCase(); + + if ( + lowerPkg.includes(lowerPlatform) && + lowerPkg.includes(lowerArch) + ) { + const packageName = `${scopeName}/${pkg}`; + const result = resolveNpmPackageNodeFile( + packageName, + currentDir + ); + if (result) { + return { packageName, nodeFilePath: result }; + } + } + } + } catch { + // Ignore read errors + } + } + } + } + + currentDir = path.dirname(currentDir); + } + + return null; + } + + // Helper function to extract package name from a file path + // For paths like /node_modules/@libsql/darwin-arm64/index.node -> libsql-darwin-arm64 + // For paths like /node_modules/sql/native.node -> sql + function extractPackageName(filePath: string): string | null { + const nodeModulesMatch = filePath.match( + /node_modules[/\\](@[^/\\]+[/\\][^/\\]+|[^/\\]+)/ + ); + if (nodeModulesMatch) { + // Convert to file-safe format: @scope/package -> scope-package (remove @ and replace slashes) + return nodeModulesMatch[1].replace(/^@/, "").replace(/[/\\]/g, "-"); + } + return null; + } + // Helper function to generate hashed filename based on format option + // originalPath is optional - when provided, we can extract package name for prefix function generateHashedFilename( originalFilename: string, - hash: string + hash: string, + originalPath?: string ): string { const lastDotIndex = originalFilename.lastIndexOf("."); const extension = lastDotIndex > 0 ? originalFilename.slice(lastDotIndex) : ""; + const baseName = + lastDotIndex > 0 ? originalFilename.slice(0, lastDotIndex) : originalFilename; if (options.filenameFormat === "hash-only") { // Hash-only format: HASH.node return `${hash.toUpperCase()}${extension}`; } else { - // Preserve format (default): filename-HASH.node - return lastDotIndex > 0 - ? `${originalFilename.slice( - 0, - lastDotIndex - )}-${hash.toUpperCase()}${extension}` - : `${originalFilename}-${hash.toUpperCase()}`; + // Preserve format (default): packagename-filename-HASH.node + // Extract package name if we have the original path + let prefix = ""; + if (originalPath) { + const packageName = extractPackageName(originalPath); + if (packageName) { + prefix = `${packageName}-`; + } + } + return `${prefix}${baseName}-${hash.toUpperCase()}${extension}`; + } + } + + // Helper to register a native file and return its info + // Centralizes the hash generation, storage, and reverse mapping logic + function registerNativeFile(absolutePath: string): NativeFileInfo { + let info = nativeFiles.get(absolutePath); + if (!info) { + const content = fs.readFileSync(absolutePath); + const hash = crypto + .createHash("md5") + .update(content) + .digest("hex") + .slice(0, 8); + const filename = path.basename(absolutePath); + const hashedFilename = generateHashedFilename(filename, hash, absolutePath); + info = { + content, + hashedFilename, + originalPath: absolutePath, + }; + nativeFiles.set(absolutePath, info); + hashedFilenameToPath.set(hashedFilename, absolutePath); } + return info; + } + + // Helper to detect module type using Rollup context if available, with fallback + // Centralizes the try/catch pattern used in multiple places + function detectModuleTypeWithContext( + context: { getModuleInfo?: (id: string) => unknown }, + fileId: string, + code?: string + ): boolean { + try { + if (typeof context.getModuleInfo === "function") { + const moduleInfo = context.getModuleInfo(fileId); + const format = (moduleInfo as { format?: string })?.format; + if (format) { + return format === "es"; + } + } + } catch { + // Fall through to fallback + } + return detectModuleType(fileId, code); } return { @@ -426,15 +633,19 @@ export default function nativeFilePlugin( // Return proxy code that requires the hashed file // The hashed file will be in the same directory as the output bundle + // + // We use syntheticNamedExports (set in resolveId) to tell Rollup to resolve + // any named export requests from the default export's properties. + // This way, `const { databaseOpen } = require(...)` works correctly + // because Rollup gets databaseOpen from default.databaseOpen. if (isESModule) { - // ES module syntax return ` import { createRequire } from 'node:module'; const createRequireLocal = createRequire(import.meta.url); - export default createRequireLocal('./${info.hashedFilename}'); + const nativeModule = createRequireLocal('./${info.hashedFilename}'); + export default nativeModule; `; } else { - // CommonJS syntax - use require directly since we're in CommonJS context return ` module.exports = require('./${info.hashedFilename}'); `; @@ -462,32 +673,17 @@ export default function nativeFilePlugin( // Check if this matches a hashed filename we've generated if (hashedFilenameToPath.has(basename)) { const originalPath = hashedFilenameToPath.get(basename)!; - // Detect module type of the importing file using Rollup's getModuleInfo if available - let importingModuleType = false; - try { - if (typeof this.getModuleInfo === "function" && importer) { - const moduleInfo = this.getModuleInfo(importer); - // ModuleInfo may have format property at runtime even if TypeScript types don't include it - const format = (moduleInfo as { format?: string }).format; - if (moduleInfo && format) { - importingModuleType = format === "es"; - } else { - // Fallback to detectModuleType if format is not available - importingModuleType = detectModuleType(importer); - } - } else { - // Fallback to detectModuleType if getModuleInfo is not available - importingModuleType = detectModuleType(importer); - } - } catch { - // Fallback to detectModuleType if getModuleInfo throws - importingModuleType = detectModuleType(importer); - } + const importingModuleType = detectModuleTypeWithContext(this, importer); const virtualId = `\0native:${originalPath}`; - // Always track module type for this virtual module (even if false/CommonJS) virtualModuleTypes.set(virtualId, importingModuleType); - // Return virtual module ID so load hook can handle it - return virtualId; + // Return virtual module ID with syntheticNamedExports enabled + // This tells Rollup to resolve named exports from the default export's properties, + // which fixes the getAugmentedNamespace issue where destructuring fails + // because properties like databaseOpen aren't copied to the namespace. + return { + id: virtualId, + syntheticNamedExports: true, + }; } // Check if this file should be processed @@ -499,53 +695,14 @@ export default function nativeFilePlugin( // Check if file exists if (!fs.existsSync(resolved)) return null; - // Generate hash from file content - const content = fs.readFileSync(resolved); - const hash = crypto - .createHash("md5") - .update(content) - .digest("hex") - .slice(0, 8); + // Register the native file (generates hash, stores mapping) + registerNativeFile(resolved); - // Generate hashed filename - const filename = path.basename(source); - const hashedFilename = generateHashedFilename(filename, hash); - - // Store the mapping - nativeFiles.set(resolved, { - content, - hashedFilename, - originalPath: resolved, - }); - // Track reverse mapping for resolveId hook - hashedFilenameToPath.set(hashedFilename, resolved); - - // Detect module type of the importing file using Rollup's getModuleInfo if available - let importingModuleType = false; - try { - if (typeof this.getModuleInfo === "function" && importer) { - const moduleInfo = this.getModuleInfo(importer); - // ModuleInfo may have format property at runtime even if TypeScript types don't include it - const format = (moduleInfo as { format?: string }).format; - if (moduleInfo && format) { - importingModuleType = format === "es"; - } else { - // Fallback to detectModuleType if format is not available - importingModuleType = detectModuleType(importer); - } - } else { - // Fallback to detectModuleType if getModuleInfo is not available - importingModuleType = detectModuleType(importer); - } - } catch { - // Fallback to detectModuleType if getModuleInfo throws - importingModuleType = detectModuleType(importer); - } + // Track module type and return virtual module ID + const importingModuleType = detectModuleTypeWithContext(this, importer); const virtualId = `\0native:${resolved}`; - // Always track module type for this virtual module (even if false/CommonJS) virtualModuleTypes.set(virtualId, importingModuleType); - // Return a virtual module ID return virtualId; }, @@ -555,7 +712,7 @@ export default function nativeFilePlugin( if (!enabled) return null; - // Only process files that mention .node, node-gyp-build, or bindings + // Only process files that mention .node, node-gyp-build, bindings, or native platform packages // For bindings, we check for the exact package name patterns to avoid false positives const hasBindingsPackage = code.includes("require('bindings')") || @@ -563,10 +720,17 @@ export default function nativeFilePlugin( code.includes("from 'bindings'") || code.includes('from "bindings"'); + // Check for template literal requires that might be platform-specific native packages + // These patterns are used by NAPI-RS/neon-rs for platform-specific native modules + // e.g., require(`@libsql/${target}`) or require(`@scope/${platform}`) + const hasTemplateLiteralNativePackage = + /require\s*\(\s*`@[a-z0-9-]+\//.test(code); + if ( !code.includes(".node") && !code.includes("node-gyp-build") && - !hasBindingsPackage + !hasBindingsPackage && + !hasTemplateLiteralNativePackage ) return null; @@ -602,33 +766,9 @@ export default function nativeFilePlugin( const fileURLToPathVars = new Set(); // Variables that reference 'fileURLToPath' // Detect if this is an ES6 module (vs CommonJS) - // Try to use Rollup's built-in module info first (most reliable) - let isESModule = false; + let isESModule = detectModuleTypeWithContext(this, id, code); let hasCreateRequireImport = false; - // Use Rollup's getModuleInfo if available (most reliable) - // getModuleInfo returns module metadata including format - try { - if (typeof this.getModuleInfo === "function") { - const moduleInfo = this.getModuleInfo(id); - // ModuleInfo may have format property at runtime even if TypeScript types don't include it - const format = (moduleInfo as { format?: string }).format; - if (moduleInfo && format) { - // format indicates the module format: 'es' = ES module, 'cjs' = CommonJS - isESModule = format === "es"; - } else { - // Fallback to our detection if format is not available - isESModule = detectModuleType(id, code); - } - } else { - // Fallback to our detection if getModuleInfo is not available - isESModule = detectModuleType(id, code); - } - } catch { - // Fallback to our detection if getModuleInfo throws - isESModule = detectModuleType(id, code); - } - // Also check AST for ImportDeclaration/ExportDeclaration nodes (most reliable) // This will override other detection if we find import/export statements @@ -1062,6 +1202,7 @@ export default function nativeFilePlugin( } } // Pattern 5: Regular require('./addon.node') calls + // Note: Using nested if instead of early return to allow Pattern 7 to run else if ( node.arguments.length === 1 && isLiteral(node.arguments[0]) && @@ -1071,48 +1212,171 @@ export default function nativeFilePlugin( const relativePath = literalNode.value as string; // Check if this file should be processed (either .node or package-specific) - if (!shouldProcessFile(relativePath, id)) return; - - // Resolve the actual path - const absolutePath = path.resolve(path.dirname(id), relativePath); - - if (!fs.existsSync(absolutePath)) return; - - // Check if we already processed this file - let info = nativeFiles.get(absolutePath); - - if (!info) { - // Generate hash and store - const content = fs.readFileSync(absolutePath); - const hash = crypto - .createHash("md5") - .update(content) - .digest("hex") - .slice(0, 8); - - // Generate hashed filename - // e.g., addon.node -> addon-HASH.node (or HASH.node if hash-only) - // native-file.node-macos -> native-file-HASH.node-macos (or HASH.node-macos if hash-only) - const filename = path.basename(relativePath); - const hashedFilename = generateHashedFilename(filename, hash); - - info = { - content, - hashedFilename, - originalPath: absolutePath, - }; - nativeFiles.set(absolutePath, info); - // Track reverse mapping for resolveId hook - hashedFilenameToPath.set(hashedFilename, absolutePath); + // Only process relative paths with .node extension here + // Non-relative paths will be handled by Pattern 7 + if (shouldProcessFile(relativePath, id)) { + const absolutePath = path.resolve(path.dirname(id), relativePath); + + if (fs.existsSync(absolutePath)) { + const info = registerNativeFile(absolutePath); + replacements.push({ + start: literalNode.start, + end: literalNode.end, + value: `"./${info.hashedFilename}"`, + }); + modified = true; + } } + } - // Record the replacement - replacements.push({ - start: literalNode.start, - end: literalNode.end, - value: `"./${info.hashedFilename}"`, - }); - modified = true; + // Pattern 6 & 6b: NAPI-RS style path.join/__dirname patterns + // Pattern 6: path.join(__dirname, 'xxx.node') or pathAlias.join(__dirname, 'xxx.node') + // Pattern 6b: join(__dirname, 'xxx.node') (destructured) + // Used by NAPI-RS loaders like libsql-js: existsSync(join(__dirname, 'libsql.darwin-arm64.node')) + const isPathJoinCall = + (isMemberExpression(calleeNode) && + isIdentifier(calleeNode.object) && + (pathModuleVars.has(calleeNode.object.name) || + calleeNode.object.name === "path") && + isIdentifier(calleeNode.property) && + (calleeNode.property.name === "join" || + calleeNode.property.name === "resolve")) || + (isIdentifier(calleeNode) && calleeNode.name === "join"); + + if (isPathJoinCall && node.arguments.length >= 2) { + // Resolve base directory from first argument + const firstArg = node.arguments[0]; + let baseDir: string | null = null; + + if (isIdentifier(firstArg) && firstArg.name === "__dirname") { + baseDir = path.dirname(id); + } else if ( + isIdentifier(firstArg) && + directoryVars.has(firstArg.name) + ) { + baseDir = directoryVars.get(firstArg.name)!; + } + + if (baseDir) { + // Check if last argument is a .node file string literal + const lastArg = node.arguments[node.arguments.length - 1]; + if ( + isLiteral(lastArg) && + typeof lastArg.value === "string" && + lastArg.value.endsWith(".node") + ) { + // Resolve the full path from all arguments + const parts: string[] = [baseDir]; + for (let i = 1; i < node.arguments.length; i++) { + const arg = node.arguments[i]; + if (isLiteral(arg) && typeof arg.value === "string") { + parts.push(arg.value); + } + } + const absolutePath = path.join(...parts); + + if (fs.existsSync(absolutePath)) { + const info = registerNativeFile(absolutePath); + replacements.push({ + start: lastArg.start, + end: lastArg.end, + value: `'${info.hashedFilename}'`, + }); + modified = true; + } + } + } + } + + // Pattern 7: npm package require that resolves to a .node file + // Handles: require('@libsql/darwin-arm64') or require('native-addon') + // where the package's main entry is a .node file + if ( + isIdentifier(calleeNode) && + (calleeNode.name === "require" || + customRequireVars.has(calleeNode.name)) && + node.arguments.length === 1 && + isLiteral(node.arguments[0]) && + typeof node.arguments[0].value === "string" + ) { + const packageName = node.arguments[0].value as string; + + // Skip relative paths (already handled by Pattern 5) + // Skip Node.js built-ins + if ( + !packageName.startsWith(".") && + !packageName.startsWith("/") && + !packageName.startsWith("node:") + ) { + // Try to resolve the package and find a .node file + const nodeFilePath = resolveNpmPackageNodeFile( + packageName, + path.dirname(id) + ); + + if (nodeFilePath) { + const info = registerNativeFile(nodeFilePath); + const literalNode = node.arguments[0] as LiteralNode; + replacements.push({ + start: literalNode.start, + end: literalNode.end, + value: `"./${info.hashedFilename}"`, + }); + modified = true; + } + } + } + + // Pattern 8: Template literal require with platform-specific packages + // Handles: require(`@libsql/${target}`) or require(`@scope/${variable}`) + // where the package name is dynamically constructed but follows platform patterns + if ( + isIdentifier(calleeNode) && + (calleeNode.name === "require" || + customRequireVars.has(calleeNode.name)) && + node.arguments.length === 1 && + node.arguments[0].type === "TemplateLiteral" + ) { + const templateLiteral = node.arguments[0] as BaseASTNode & { + quasis: Array<{ value: { raw: string; cooked: string } }>; + expressions: BaseASTNode[]; + }; + + // Check if this is a simple template like `@scope/${expr}` + // We need at least one quasi (the prefix) and exactly one expression + if ( + templateLiteral.quasis.length >= 1 && + templateLiteral.expressions.length >= 1 + ) { + const prefix = templateLiteral.quasis[0].value.cooked; + + // Check if the prefix looks like a scoped package pattern + // e.g., "@libsql/", "@scope/prefix-" + if (prefix && prefix.startsWith("@") && prefix.includes("/")) { + // Try to find a matching platform-specific package + const result = findPlatformSpecificNativePackage( + prefix, + path.dirname(id) + ); + + if (result) { + const { nodeFilePath } = result; + const info = registerNativeFile(nodeFilePath); + const templateNode = node.arguments[0]; + if ( + templateNode.start !== undefined && + templateNode.end !== undefined + ) { + replacements.push({ + start: templateNode.start, + end: templateNode.end, + value: `"./${info.hashedFilename}"`, + }); + modified = true; + } + } + } + } } } @@ -1246,30 +1510,7 @@ export default function nativeFilePlugin( nodeFilePath: string, callNode: CallExpressionNode ): void { - // Check if we already processed this file - let info = nativeFiles.get(nodeFilePath); - - if (!info) { - // Generate hash and store - const content = fs.readFileSync(nodeFilePath); - const hash = crypto - .createHash("md5") - .update(content) - .digest("hex") - .slice(0, 8); - - const filename = path.basename(nodeFilePath); - const hashedFilename = generateHashedFilename(filename, hash); - - info = { - content, - hashedFilename, - originalPath: nodeFilePath, - }; - nativeFiles.set(nodeFilePath, info); - // Track reverse mapping for resolveId hook - hashedFilenameToPath.set(hashedFilename, nodeFilePath); - } + const info = registerNativeFile(nodeFilePath); // Determine how to generate the replacement code let replacementCode: string; diff --git a/test/bindings.test.ts b/test/bindings.test.ts index 2a070e3..1324b9d 100644 --- a/test/bindings.test.ts +++ b/test/bindings.test.ts @@ -656,7 +656,9 @@ module.exports = load('binding');`; ); expect(resolveResult).toBeDefined(); - expect(resolveResult).toMatch(/^\0native:/); + // resolveId now returns an object with { id, syntheticNamedExports } + const resolvedId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; + expect(resolvedId).toMatch(/^\0native:/); }); it("should resolve hashed filenames with query parameters", async () => { @@ -692,7 +694,9 @@ module.exports = load('binding');`; ); expect(resolveResult).toBeDefined(); - expect(resolveResult).toMatch(/^\0native:/); + // resolveId now returns an object with { id, syntheticNamedExports } + const resolvedId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; + expect(resolvedId).toMatch(/^\0native:/); }); it("should resolve hash-only format filenames", async () => { @@ -734,7 +738,9 @@ module.exports = load('binding');`; ); expect(resolveResult).toBeDefined(); - expect(resolveResult).toMatch(/^\0native:/); + // resolveId now returns an object with { id, syntheticNamedExports } + const resolvedId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; + expect(resolvedId).toMatch(/^\0native:/); }); it("should resolve node-gyp-build transformed paths", async () => { @@ -777,7 +783,9 @@ module.exports = load('binding');`; ); expect(resolveResult).toBeDefined(); - expect(resolveResult).toMatch(/^\0native:/); + // resolveId now returns an object with { id, syntheticNamedExports } + const resolvedId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; + expect(resolvedId).toMatch(/^\0native:/); }); }); @@ -862,13 +870,15 @@ module.exports = { addon };`; if (!match) return; const hashedFilename = match[1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, cjsFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); expect(loadResult).toContain("module.exports"); diff --git a/test/module-format-detection.test.ts b/test/module-format-detection.test.ts index 1b7263e..d5dab5a 100644 --- a/test/module-format-detection.test.ts +++ b/test/module-format-detection.test.ts @@ -77,14 +77,16 @@ export { addon };`; const hashedFilename = match![1]; // Resolve the virtual module ID - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, esmFilePath, {} ); - expect(virtualId).toBeDefined(); + expect(resolveResult).toBeDefined(); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; expect(virtualId).toContain("\0native:"); // Check load hook output @@ -151,16 +153,18 @@ export { addon };`; expect(match).not.toBeNull(); const hashedFilename = match![1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult2 = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, esmFilePath, {} ); - const loadResult = await (plugin.load as any).call({} as any, virtualId); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId2 = typeof resolveResult2 === "object" ? resolveResult2.id : resolveResult2; + const loadResult = await (plugin.load as any).call({} as any, virtualId2); expect(loadResult).toBeDefined(); - + // Should generate ES module syntax expect(loadResult).toContain("import { createRequire }"); expect(loadResult).toContain("export default"); @@ -208,20 +212,22 @@ module.exports = { addon };`; expect(match).not.toBeNull(); const hashedFilename = match![1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, cjsFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); - + // Should generate CommonJS syntax expect(loadResult).toContain("module.exports"); expect(loadResult).toContain("require("); - + // Should NOT contain ES module syntax expect(loadResult).not.toContain("import { createRequire }"); expect(loadResult).not.toContain("export default"); @@ -279,16 +285,18 @@ const binding = nodeGypBuild(__dirname);`; expect(match).not.toBeNull(); const hashedFilename = match![1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, esmFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); - + // Should generate ES module syntax expect(loadResult).toContain("import { createRequire }"); expect(loadResult).toContain("export default"); @@ -350,16 +358,18 @@ const binding = nodeGypBuild(__dirname);`; expect(match).not.toBeNull(); const hashedFilename = match![1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, esmFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); - + // Should generate ES module syntax (detected from package.json type: module) expect(loadResult).toContain("import { createRequire }"); expect(loadResult).toContain("export default"); @@ -411,16 +421,18 @@ const binding = nodeGypBuild(__dirname);`; expect(match).not.toBeNull(); const hashedFilename = match![1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, cjsFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); - + // Should generate CommonJS syntax expect(loadResult).toContain("module.exports"); expect(loadResult).toContain("require("); diff --git a/test/napi-rs.test.ts b/test/napi-rs.test.ts new file mode 100644 index 0000000..29afeb6 --- /dev/null +++ b/test/napi-rs.test.ts @@ -0,0 +1,920 @@ +import { describe, expect, it, beforeEach, afterEach } from "vitest"; +import nativeFilePlugin from "../src/index.js"; +import type { Plugin } from "vite"; +import fs from "node:fs"; +import path from "node:path"; +import os from "node:os"; +import { parse as acornParse } from "acorn"; + +// Wrapper to provide the same parse signature as Rollup +const parse = (code: string) => + acornParse(code, { ecmaVersion: "latest", sourceType: "module" }); + +/** + * Tests for NAPI-RS auto-generated loader support + * + * NAPI-RS generates native module loaders with a pattern like: + * const { existsSync } = require('fs') + * const { join } = require('path') + * localFileExisted = existsSync(join(__dirname, 'libsql.darwin-arm64.node')) + * if (localFileExisted) { + * nativeBinding = require('./libsql.darwin-arm64.node') + * } else { + * nativeBinding = require('@libsql/darwin-arm64') + * } + * + * This plugin detects these patterns and rewrites BOTH the existsSync path + * AND the require path to use the hashed filename. + */ +describe("NAPI-RS Support", () => { + let tempDir: string; + const platform = process.platform; + const arch = process.arch; + + beforeEach(() => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "napi-rs-test-")); + }); + + afterEach(() => { + if (fs.existsSync(tempDir)) { + fs.rmSync(tempDir, { recursive: true, force: true }); + } + }); + + describe("join(__dirname, 'xxx.node') pattern", () => { + it("should rewrite .node path in join() calls", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // Code that uses join(__dirname, 'xxx.node') + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { join } = require('path'); + const filePath = join(__dirname, '${nodeFileName}'); + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toContain(".node"); + // Should contain hashed filename (uppercase hash) + // Format: libsql.{platform}-{arch}-{HASH}.node + expect(result.code).toMatch(/libsql\.[a-z]+-[a-z0-9]+-[A-F0-9]+\.node/); + // Should NOT contain original unhashed filename + expect(result.code).not.toContain(`'${nodeFileName}'`); + }); + + it("should rewrite path in existsSync(join(__dirname, 'xxx.node'))", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // Code that uses existsSync with join + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { existsSync } = require('fs'); + const { join } = require('path'); + const exists = existsSync(join(__dirname, '${nodeFileName}')); + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toContain(".node"); + // Should contain hashed filename + expect(result.code).toMatch(/libsql\.[a-z]+-[a-z0-9]+-[A-F0-9]+\.node/); + }); + + it("should use consistent hash for same file", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // Code that references the same file twice + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { join } = require('path'); + const path1 = join(__dirname, '${nodeFileName}'); + const path2 = join(__dirname, '${nodeFileName}'); + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + // Extract all hashed filenames from the result + const matches = result.code.match(/libsql\.[a-z]+-[a-z0-9]+-[A-F0-9]+\.node/g); + expect(matches).toBeDefined(); + expect(matches!.length).toBe(2); + // Both should have the same hash + expect(matches![0]).toBe(matches![1]); + }); + }); + + describe("Coordinated rewriting", () => { + it("should rewrite BOTH existsSync path AND require path with same hash", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // The actual NAPI-RS pattern + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { existsSync } = require('fs'); + const { join } = require('path'); + + let nativeBinding; + const localFileExisted = existsSync(join(__dirname, '${nodeFileName}')); + if (localFileExisted) { + nativeBinding = require('./${nodeFileName}'); + } else { + nativeBinding = require('@libsql/${platform}-${arch}'); + } + + module.exports = nativeBinding; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + + // Extract hashed filename from existsSync path (in join call) + const joinMatch = result.code.match(/join\(__dirname,\s*['"]([^'"]+)['"]\)/); + expect(joinMatch).toBeDefined(); + const joinFilename = joinMatch![1]; + + // Extract hashed filename from require call + const requireMatch = result.code.match(/require\(['"]\.\/([^'"]+)['"]\)/); + expect(requireMatch).toBeDefined(); + const requireFilename = requireMatch![1]; + + // CRITICAL: Both must use the SAME hashed filename + expect(joinFilename).toBe(requireFilename); + + // Both should be hashed (contain uppercase hex) + expect(joinFilename).toMatch(/[A-F0-9]{8}/); + expect(requireFilename).toMatch(/[A-F0-9]{8}/); + }); + }); + + describe("Real-world libsql structure", () => { + it("should handle complete NAPI-RS loader code", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file for the current platform + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding content")); + + // Simplified NAPI-RS loader pattern (like libsql-js) + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { existsSync } = require('fs'); + const { join } = require('path'); + + let nativeBinding; + let loadError; + + switch (process.platform) { + case '${platform}': + switch (process.arch) { + case '${arch}': + const localFileExisted = existsSync(join(__dirname, '${nodeFileName}')); + try { + if (localFileExisted) { + nativeBinding = require('./${nodeFileName}'); + } else { + nativeBinding = require('@libsql/${platform}-${arch}'); + } + } catch (e) { + loadError = e; + } + break; + default: + throw new Error('Unsupported architecture'); + } + break; + default: + throw new Error('Unsupported platform'); + } + + if (!nativeBinding) { + throw loadError || new Error('Failed to load native binding'); + } + + module.exports.Database = nativeBinding.Database; + module.exports.Statement = nativeBinding.Statement; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Should have rewritten the .node references + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + + // Original unhashed filename should not appear + expect(result.code).not.toContain(`'${nodeFileName}'`); + expect(result.code).not.toContain(`"./${nodeFileName}"`); + + // The npm package fallback should remain unchanged + expect(result.code).toContain(`@libsql/${platform}-${arch}`); + }); + + it("should only bundle .node files that exist for current platform", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create .node file ONLY for current platform + const currentPlatformFile = `libsql.${platform}-${arch}.node`; + fs.writeFileSync( + path.join(tempDir, currentPlatformFile), + Buffer.from("current platform binding") + ); + + // Also create a file for a different platform that should NOT be processed + const otherPlatformFile = "libsql.other-platform.node"; + // Don't create this file - it shouldn't exist + + // Code references current platform (which exists) and other platform (which doesn't) + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { existsSync } = require('fs'); + const { join } = require('path'); + + let nativeBinding; + + // Current platform - file exists + if (existsSync(join(__dirname, '${currentPlatformFile}'))) { + nativeBinding = require('./${currentPlatformFile}'); + } + // Other platform - file does NOT exist + else if (existsSync(join(__dirname, '${otherPlatformFile}'))) { + nativeBinding = require('./${otherPlatformFile}'); + } + + module.exports = nativeBinding; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Only the current platform's file should be hashed (it exists) + // Other platforms' files should remain unchanged (they don't exist) + const hashedPattern = /[A-F0-9]{8}\.node/g; + const hashedMatches = result.code.match(hashedPattern) || []; + + // Should have hashed references for the file that exists (appears twice in code) + expect(hashedMatches.length).toBe(2); + + // The other platform file should remain unchanged (not hashed) + expect(result.code).toContain(otherPlatformFile); + }); + }); + + describe("ES module context", () => { + it("should handle NAPI-RS pattern in ESM files", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // ESM version of NAPI-RS pattern + const jsFilePath = path.join(tempDir, "index.mjs"); + const code = ` + import { existsSync } from 'fs'; + import { join, dirname } from 'path'; + import { fileURLToPath } from 'url'; + import { createRequire } from 'module'; + + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + const require = createRequire(import.meta.url); + + let nativeBinding; + const localFileExisted = existsSync(join(__dirname, '${nodeFileName}')); + if (localFileExisted) { + nativeBinding = require('./${nodeFileName}'); + } + + export const Database = nativeBinding?.Database; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + + // Should have rewritten the .node references with hash + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + }); + + describe("path.join variant patterns", () => { + it("should handle path.join(__dirname, 'xxx.node') pattern", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `binding.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // Code using path.join (not destructured) + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const path = require('path'); + const fs = require('fs'); + const exists = fs.existsSync(path.join(__dirname, '${nodeFileName}')); + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + + it("should handle imported path module with alias", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFileName = `binding.node`; + const nodeFilePath = path.join(tempDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding")); + + // Code using aliased path import + const jsFilePath = path.join(tempDir, "index.mjs"); + const code = ` + import nodePath from 'path'; + import { existsSync } from 'fs'; + const exists = existsSync(nodePath.join(__dirname, '${nodeFileName}')); + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + }); + + describe("Platform-specific npm package require pattern", () => { + it("should detect and rewrite require('@scope/platform-arch') with .node file", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a mock npm package structure with a .node file + // node_modules/@libsql/darwin-arm64/libsql.darwin-arm64.node + const scopeDir = path.join(tempDir, "node_modules", "@libsql"); + const packageDir = path.join(scopeDir, `${platform}-${arch}`); + fs.mkdirSync(packageDir, { recursive: true }); + + const nodeFileName = `libsql.${platform}-${arch}.node`; + const nodeFilePath = path.join(packageDir, nodeFileName); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding from npm")); + + // Create package.json pointing to the .node file + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ name: `@libsql/${platform}-${arch}`, main: nodeFileName }) + ); + + // The NAPI-RS loader code that uses the npm package fallback + const jsFilePath = path.join(tempDir, "node_modules", "libsql", "index.js"); + fs.mkdirSync(path.dirname(jsFilePath), { recursive: true }); + const code = ` + const { existsSync } = require('fs'); + const { join } = require('path'); + + let nativeBinding; + const localFileExisted = existsSync(join(__dirname, '${nodeFileName}')); + if (localFileExisted) { + nativeBinding = require('./${nodeFileName}'); + } else { + nativeBinding = require('@libsql/${platform}-${arch}'); + } + + module.exports = nativeBinding; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // The npm package require should be rewritten with hashed filename + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + // Original package name should be replaced + expect(result.code).not.toContain(`require('@libsql/${platform}-${arch}')`); + }); + + it("should handle require('@scope/package') that resolves to .node file", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a simpler package structure + const packageDir = path.join(tempDir, "node_modules", "@test", "native"); + fs.mkdirSync(packageDir, { recursive: true }); + + const nodeFilePath = path.join(packageDir, "binding.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("test native binding")); + + // Package.json with main pointing to the .node file + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ name: "@test/native", main: "binding.node" }) + ); + + // Code that requires the package (include .node reference to trigger transform) + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + // This package exports a .node binding + const native = require('@test/native'); + module.exports = native; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Should be rewritten to use hashed filename + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + + it("should handle package with index.node as entry point", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create package with index.node + const packageDir = path.join(tempDir, "node_modules", "native-addon"); + fs.mkdirSync(packageDir, { recursive: true }); + + fs.writeFileSync( + path.join(packageDir, "index.node"), + Buffer.from("index native binding") + ); + + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ name: "native-addon" }) + // No main field - should default to index.node + ); + + const jsFilePath = path.join(tempDir, "index.js"); + // Include .node reference to trigger transform + const code = ` + // Load native .node addon + const addon = require('native-addon'); + module.exports = addon; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Should be rewritten to use hashed filename + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + + it("should NOT rewrite require for packages without .node files", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a normal JS package (no .node file) + const packageDir = path.join(tempDir, "node_modules", "regular-package"); + fs.mkdirSync(packageDir, { recursive: true }); + + fs.writeFileSync( + path.join(packageDir, "index.js"), + "module.exports = { foo: 'bar' };" + ); + + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ name: "regular-package", main: "index.js" }) + ); + + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const pkg = require('regular-package'); + module.exports = pkg; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + // Should return null (no transformation) or code unchanged + if (result) { + expect(result.code).toContain("require('regular-package')"); + expect(result.code).not.toMatch(/[A-F0-9]{8}\.node/); + } + }); + }); + + describe("Template literal require patterns (Pattern 8)", () => { + it("should handle template literal require with scoped packages", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a scoped package with a .node file for current platform + const nodeModulesDir = path.join(tempDir, "node_modules"); + const scopeDir = path.join(nodeModulesDir, "@libsql"); + const packageDir = path.join(scopeDir, `${platform}-${arch}`); + fs.mkdirSync(packageDir, { recursive: true }); + + // Create index.node file + const nodeFilePath = path.join(packageDir, "index.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("platform native binding")); + + // Create package.json with main pointing to index.node + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ + name: `@libsql/${platform}-${arch}`, + main: "index.node", + }) + ); + + // Code uses template literal require like real libsql does + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { currentTarget } = require('@neon-rs/load'); + let target = currentTarget(); + const binding = require(\`@libsql/\${target}\`); + module.exports = binding; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Should have transformed the template literal to a hashed path + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + + // Original template literal should be replaced + expect(result.code).not.toContain("`@libsql/"); + }); + + it("should find platform package by scanning scope directory", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a package with slightly different naming convention + const nodeModulesDir = path.join(tempDir, "node_modules"); + const scopeDir = path.join(nodeModulesDir, "@nativelib"); + // Use platform name in a different format (matches platform scanning logic) + const packageDir = path.join(scopeDir, `${platform}-${arch}-binding`); + fs.mkdirSync(packageDir, { recursive: true }); + + // Create native.node file + const nodeFilePath = path.join(packageDir, "native.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("native binding content")); + + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ + name: `@nativelib/${platform}-${arch}-binding`, + main: "native.node", + }) + ); + + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const lib = require(\`@nativelib/\${process.platform}-\${process.arch}-binding\`); + module.exports = lib; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + expect(result).toBeDefined(); + expect(result.code).toBeDefined(); + + // Should have transformed the template literal + expect(result.code).toMatch(/[A-F0-9]{8}\.node/); + }); + + it("should NOT transform template literals that don't match platform packages", () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Don't create any platform packages + + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const config = require(\`@myapp/\${env}\`); + module.exports = config; + `; + + const context = { parse }; + const result = (plugin.transform as any).call(context, code, jsFilePath); + + // Should not transform (no matching packages) + if (result) { + expect(result.code).toContain("`@myapp/"); + expect(result.code).not.toMatch(/[A-F0-9]{8}\.node/); + } + }); + }); + + describe("Rollup interop - syntheticNamedExports", () => { + /** + * This test suite covers the fix for the "databaseOpen is not a function" error. + * + * The issue: When Rollup bundles a native module, it wraps it with getAugmentedNamespace + * which creates { __esModule: true, default: nativeModule }. When code destructures + * like `const { databaseOpen } = require('@libsql/...')`, it fails because databaseOpen + * is on the default export, not the namespace object. + * + * The fix: resolveId returns { id, syntheticNamedExports: true } which tells Rollup + * to resolve named exports from the default export's properties. + */ + + it("should return syntheticNamedExports: true from resolveId for hashed .node files", async () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFilePath = path.join(tempDir, "native.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("native binary")); + + // Transform code to generate hashed filename + const jsFilePath = path.join(tempDir, "index.js"); + const code = `const native = require('./native.node');`; + + const context = { parse }; + const transformResult = (plugin.transform as any).call( + context, + code, + jsFilePath + ); + + expect(transformResult).toBeDefined(); + + // Extract hashed filename from transformed code + const match = transformResult.code.match(/require\("\.\/([^"]+\.node)"\)/); + expect(match).toBeDefined(); + const hashedFilename = match![1]; + + // Now test resolveId returns object with syntheticNamedExports + const resolveResult = await (plugin.resolveId as any).call( + {} as any, + `./${hashedFilename}`, + jsFilePath, + {} + ); + + expect(resolveResult).toBeDefined(); + expect(typeof resolveResult).toBe("object"); + expect(resolveResult.id).toContain("\0native:"); + expect(resolveResult.syntheticNamedExports).toBe(true); + }); + + it("should generate ES module code in load hook that enables destructuring", async () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create a .node file + const nodeFilePath = path.join(tempDir, "native.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("native binary")); + + // Transform code to generate hashed filename + const esmFilePath = path.join(tempDir, "index.mjs"); + const code = ` + import { createRequire } from 'module'; + const require = createRequire(import.meta.url); + const native = require('./native.node'); + `; + + const moduleAwareParse = (code: string) => + acornParse(code, { ecmaVersion: "latest", sourceType: "module" }); + const context = { parse: moduleAwareParse }; + const transformResult = (plugin.transform as any).call( + context, + code, + esmFilePath + ); + + expect(transformResult).toBeDefined(); + + // Extract hashed filename - try multiple patterns + let match = transformResult.code.match( + /createRequire\(import\.meta\.url\)\("\.\/([^"]+\.node)"\)/ + ); + if (!match) { + match = transformResult.code.match(/require\("\.\/([^"]+\.node)"\)/); + } + if (!match) { + match = transformResult.code.match(/require\('\.\/([^']+\.node)'\)/); + } + expect(match).not.toBeNull(); + const hashedFilename = match![1]; + + // Get virtual module ID + const resolveResult = await (plugin.resolveId as any).call( + {} as any, + `./${hashedFilename}`, + esmFilePath, + {} + ); + + const virtualId = + typeof resolveResult === "object" ? resolveResult.id : resolveResult; + + // Test load hook output + const loadResult = await (plugin.load as any).call({} as any, virtualId); + + expect(loadResult).toBeDefined(); + // Should use ES module syntax with default export + expect(loadResult).toContain("export default"); + expect(loadResult).toContain("createRequire"); + // The default export enables syntheticNamedExports to work + // When Rollup sees `import { foo } from 'virtual-module'` and syntheticNamedExports is true, + // it will look for `foo` on the default export + }); + + it("should work with libsql-style destructuring pattern", async () => { + const plugin = nativeFilePlugin() as Plugin; + + (plugin.configResolved as any)({ + command: "build", + mode: "production", + }); + + // Create platform package with .node file + const nodeModulesDir = path.join(tempDir, "node_modules"); + const scopeDir = path.join(nodeModulesDir, "@libsql"); + const packageDir = path.join(scopeDir, `${platform}-${arch}`); + fs.mkdirSync(packageDir, { recursive: true }); + + const nodeFilePath = path.join(packageDir, "index.node"); + fs.writeFileSync(nodeFilePath, Buffer.from("libsql native binding")); + + fs.writeFileSync( + path.join(packageDir, "package.json"), + JSON.stringify({ + name: `@libsql/${platform}-${arch}`, + main: "index.node", + }) + ); + + // Code that destructures named exports (like libsql does) + // This pattern was failing with "databaseOpen is not a function" + const jsFilePath = path.join(tempDir, "index.js"); + const code = ` + const { currentTarget } = require('@neon-rs/load'); + let target = currentTarget(); + + // This destructuring pattern requires syntheticNamedExports to work + const { + databaseOpen, + databaseClose, + databaseExecSync, + } = require(\`@libsql/\${target}\`); + + module.exports = { databaseOpen, databaseClose, databaseExecSync }; + `; + + const context = { parse }; + const transformResult = (plugin.transform as any).call( + context, + code, + jsFilePath + ); + + expect(transformResult).toBeDefined(); + expect(transformResult.code).toBeDefined(); + + // Should have transformed the template literal to use hashed .node file + expect(transformResult.code).toMatch(/[A-F0-9]{8}\.node/); + + // The require should be rewritten to a relative path + expect(transformResult.code).not.toContain("`@libsql/"); + + // Extract the hashed filename and verify resolveId returns syntheticNamedExports + const match = transformResult.code.match(/require\("\.\/([^"]+\.node)"\)/); + expect(match).toBeDefined(); + + const resolveResult = await (plugin.resolveId as any).call( + {} as any, + `./${match![1]}`, + jsFilePath, + {} + ); + + // This is the critical fix - syntheticNamedExports must be true + // so that destructuring like { databaseOpen, ... } works + expect(resolveResult.syntheticNamedExports).toBe(true); + }); + }); +}); diff --git a/test/node-gyp-build.test.ts b/test/node-gyp-build.test.ts index 4c2f0cf..9d0b256 100644 --- a/test/node-gyp-build.test.ts +++ b/test/node-gyp-build.test.ts @@ -1993,13 +1993,15 @@ const binding = nodeGypBuild(__dirname);`; if (!match) return; const hashedFilename = match[1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, esmFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); expect(loadResult).toContain("import { createRequire }"); @@ -2041,13 +2043,15 @@ const binding = nodeGypBuild(__dirname);`; if (!match) return; const hashedFilename = match[1]; - const virtualId = await (plugin.resolveId as any).call( + const resolveResult = await (plugin.resolveId as any).call( {} as any, `./${hashedFilename}`, cjsFilePath, {} ); + // resolveId now returns an object with { id, syntheticNamedExports } + const virtualId = typeof resolveResult === "object" ? resolveResult.id : resolveResult; const loadResult = await (plugin.load as any).call({} as any, virtualId); expect(loadResult).toBeDefined(); expect(loadResult).toContain("module.exports");