diff --git a/README.md b/README.md index 2439615..9b15b72 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,8 @@ Offical [Apache CouchDB](https://couchdb.apache.org/) library for [Node.js](http > Note: Nano >=11.0.0 is a **breaking change for Node.js versions 16 and older**. Nano 11 uses Node.js's built-in "fetch" HTTP client but this is only available in Node.js versions 18 or later. If you are using Node 16 or older then continue using Nano 10. > See our [migration guide](migration_guide_v10_to_v11.md) for moving from Nano 10 to Nano 11. +> Note: Nano >=12.0.0 is ESM-only, meaning that the `import` syntax is used to load the module instead of `require`. + Features: * **Minimalistic** - There is only a minimum of abstraction between you and @@ -26,7 +28,7 @@ or save `nano` as a dependency of your project with npm install --save nano -Note the minimum required version of Node.js is 10. +Note the minimum required version of Node.js is 20. ## Table of contents @@ -108,7 +110,8 @@ Note the minimum required version of Node.js is 10. To use `nano` you need to connect it to your CouchDB install, to do that: ```js -const nano = require('nano')('http://127.0.0.1:5984'); +import Nano from 'nano' +const nano = Nano('http://127.0.0.1:5984') ``` > Note: Supplying authentication credentials in the URL e.g. `http://admin:mypassword@localhost:5984` is deprecated. Use `nano.auth` instead. @@ -116,13 +119,13 @@ const nano = require('nano')('http://127.0.0.1:5984'); To create a new database: ```js -nano.db.create('alice'); +nano.db.create('alice') ``` and to use an existing database: ```js -const alice = nano.db.use('alice'); +const alice = nano.db.use('alice') ``` Under-the-hood, calls like `nano.db.create` are making HTTP API calls to the CouchDB service. Such operations are *asynchronous*. There are two ways to receive the asynchronous data back from the library @@ -184,15 +187,16 @@ You can also see your document in futon (http://127.0.0.1:5984/_utils). Configuring nano to use your database server is as simple as: ```js -const nano = require('nano')('http://127.0.0.1:5984') -const db = nano.use('foo'); +import Nano from 'nano' +const nano = Nano('http://127.0.0.1:5984') +const db = nano.use('foo') ``` If you don't need to instrument database objects you can simply: ```js // nano parses the URL and knows this is a database -const db = require('nano')('http://127.0.0.1:5984/foo'); +const db = Nano('http://127.0.0.1:5984/foo') ``` You can tell nano to not parse the URL (maybe the server is behind a proxy, is accessed through a rewrite rule or other): @@ -200,12 +204,12 @@ You can tell nano to not parse the URL (maybe the server is behind a proxy, is a ```js // nano does not parse the URL and return the server api // "http://127.0.0.1:5984/prefix" is the CouchDB server root -const couch = require('nano')( +const couch = Nano( { url : "http://127.0.0.1:5984/prefix" parseUrl : false - }); -const db = couch.use('foo'); + }) +const db = couch.use('foo') ``` ### Pool size and open sockets @@ -213,6 +217,8 @@ const db = couch.use('foo'); To specify the number of connections, timeouts and pool size, supply an `agentOptions` object when starting up Nano. ```js +import Nano from 'nano' +import undici from 'undici' const agentOptions = { bodyTimeout: 30000, headersTimeout: 30000, @@ -229,7 +235,6 @@ const agentOptions = { connections: null, maxRedirections: 0 } -const undici = require('undici') const undiciOptions = new undici.Agent(agentOptions) const nano = Nano({ url: 'http://127.0.0.1:5984', undiciOptions }) ``` @@ -251,13 +256,13 @@ To supply customer headers with each request, supply a headers object when start ```js -const couch = require('nano')( +const couch = Nano( { url : "http://127.0.0.1:5984/prefix" headers: { mycustomheader: '42' } - }); + }) ``` ## TypeScript @@ -342,9 +347,9 @@ const dblist = await nano.db.list() Lists all the CouchDB databases as a stream: ```js -nano.db.listAsStream() +(await nano.db.listAsStream()) .on('error', (e) => console.error('error', e)) - .pipe(process.stdout); + .pipe(process.stdout) ``` ### nano.db.compact(name, [designname]) @@ -396,7 +401,7 @@ by the call to `replication.enable`: const r = await nano.db.replication.enable('alice', 'http://admin:password@otherhost.com:5984/alice', { create_target:true }) -await nano.db.replication.disable(r.id); +await nano.db.replication.disable(r.id) ``` ### nano.db.changes(name, [params]) @@ -413,7 +418,7 @@ const c = await nano.db.changes('alice') Same as `nano.db.changes` but returns a stream. ```js -nano.db.changes('alice').pipe(process.stdout); +( await nano.db.changes('alice')).pipe(process.stdout) ``` ### nano.db.info() @@ -429,7 +434,7 @@ const info = await nano.db.info() Returns a database object that allows you to perform operations against that database: ```js -const alice = nano.use('alice'); +const alice = nano.use('alice') await alice.insert({ happy: true }, 'rabbit') ``` @@ -505,7 +510,7 @@ The response is an object with [CouchDB cluster information](https://docs.couchd Inserts `doc` in the database with optional `params`. If params is a string, it's assumed it is the intended document `_id`. If params is an object, it's passed as query string parameters and `docName` is checked for defining the document `_id`: ```js -const alice = nano.use('alice'); +const alice = nano.use('alice') const response = await alice.insert({ happy: true }, 'rabbit') ``` @@ -570,7 +575,7 @@ Bulk operations(update/delete/insert) on the database, refer to the const documents = [ { a:1, b:2 }, { _id: 'tiger', striped: true} -]; +] const response = await alice.bulk({ docs: documents }) ``` @@ -581,9 +586,9 @@ List all the docs in the database . ```js const doclist = await alice.list().then((body)=>{ body.rows.forEach((doc) => { - console.log(doc); + console.log(doc) }) -}); +}) ``` or with optional query string additions `params`: @@ -597,7 +602,7 @@ const doclist = await alice.list({include_docs: true}) List all the docs in the database as a stream. ```js -alice.listAsStream() +(await alice.listAsStream()) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) ``` @@ -610,7 +615,7 @@ additional query string `params` can be specified, `include_docs` is always set to `true`. ```js -const keys = ['tiger', 'zebra', 'donkey']; +const keys = ['tiger', 'zebra', 'donkey'] const datat = await alice.fetch({keys: keys}) ``` @@ -632,7 +637,7 @@ Create index on database fields, as specified in const indexDef = { index: { fields: ['foo'] }, name: 'fooindex' -}; +} const response = await alice.createIndex(indexDef) ``` @@ -656,11 +661,11 @@ const db = nano.db.use('mydb') db.changesReader.start() .on('change', (change) => { console.log(change) }) .on('batch', (b) => { - console.log('a batch of', b.length, 'changes has arrived'); + console.log('a batch of', b.length, 'changes has arrived') }).on('seq', (s) => { - console.log('sequence token', s); + console.log('sequence token', s) }).on('error', (e) => { - console.error('error', e); + console.error('error', e) }) ``` @@ -671,7 +676,7 @@ If you want `changesReader` to hold off making the next `_changes` API call unti ```js db.changesReader.get({wait: true}) .on('batch', (b) => { - console.log('a batch of', b.length, 'changes has arrived'); + console.log('a batch of', b.length, 'changes has arrived') // do some asynchronous work here and call "changesReader.resume()" // when you're ready for the next API call to be dispatched. // In this case, wait 5s before the next changes feed request. @@ -679,8 +684,8 @@ db.changesReader.get({wait: true}) db.changesReader.resume() }, 5000) }).on('end', () => { - console.log('changes feed monitoring has stopped'); - }); + console.log('changes feed monitoring has stopped') + }) ``` You may supply a number of options when you start to listen to the changes feed: @@ -853,12 +858,12 @@ Fetch documents from a partition as a stream: ```js // fetch document id/revs from a partition -nano.db.partitionedListAsStream('canidae') +(await nano.db.partitionedListAsStream('canidae')) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) // add document bodies but limit size of response -nano.db.partitionedListAsStream('canidae', { include_docs: true, limit: 5 }) +(await nano.db.partitionedListAsStream('canidae', { include_docs: true, limit: 5 })) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) ``` @@ -878,7 +883,7 @@ Query documents from a partition by supplying a Mango selector as a stream: ```js // find document whose name is 'wolf' in the 'canidae' partition -db.partitionedFindAsStream('canidae', { 'selector' : { 'name': 'Wolf' }}) +(await db.partitionedFindAsStream('canidae', { 'selector' : { 'name': 'Wolf' }})) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) ``` @@ -903,7 +908,7 @@ Search documents from a partition by supplying a Lucene query as a stream: const params = { q: 'name:\'Wolf\'' } -db.partitionedSearchAsStream('canidae', 'search-ddoc', 'search-index', params) +(await db.partitionedSearchAsStream('canidae', 'search-ddoc', 'search-index', params)) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) // { total_rows: ... , bookmark: ..., rows: [ ...] } @@ -933,7 +938,7 @@ const params = { endkey: 'b', limit: 1 } -db.partitionedViewAsStream('canidae', 'view-ddoc', 'view-name', params) +(await db.partitionedViewAsStream('canidae', 'view-ddoc', 'view-name', params)) .on('error', (e) => console.error('error', e)) .pipe(process.stdout) // { rows: [ { key: ... , value: [Object] } ] } @@ -947,13 +952,13 @@ Inserts a `doc` together with `attachments` and `params`. If params is a string, The `attachments` parameter must be an array of objects with `name`, `data` and `content_type` properties. ```js -const fs = require('fs'); +import fs from 'node:fs' fs.readFile('rabbit.png', (err, data) => { if (!err) { await alice.multipart.insert({ foo: 'bar' }, [{name: 'rabbit.png', data: data, content_type: 'image/png'}], 'mydoc') } -}); +}) ``` ### db.multipart.get(docname, [params]) @@ -973,7 +978,7 @@ Inserts an attachment `attname` to `docname`, in most cases [CouchDB doc](https://docs.couchdb.org/en/latest/api/document/attachments.html#db-doc-attachment) for more details. ```js -const fs = require('fs'); +import fs from 'node:fs' fs.readFile('rabbit.png', (err, data) => { if (!err) { @@ -983,13 +988,13 @@ fs.readFile('rabbit.png', (err, data) => { 'image/png', { rev: '12-150985a725ec88be471921a54ce91452' }) } -}); +}) ``` ### db.attachment.insertAsStream(docname, attname, att, contenttype, [params]) As of Nano 9.x, the function `db.attachment.insertAsStream` is now deprecated. Now simply pass -a readable stream to `db.attachment.insert` as the third paramseter. +a readable stream to `db.attachment.insert` as the third parameter. ### db.attachment.get(docname, attname, [params]) @@ -997,7 +1002,7 @@ Get `docname`'s attachment `attname` with optional query string additions `params`. ```js -const fs = require('fs'); +import fs from 'node:fs' const body = await alice.attachment.get('rabbit', 'rabbit.png') fs.writeFile('rabbit.png', body) @@ -1006,10 +1011,10 @@ fs.writeFile('rabbit.png', body) ### db.attachment.getAsStream(docname, attname, [params]) ```js -const fs = require('fs'); -alice.attachment.getAsStream('rabbit', 'rabbit.png') +import fs from 'node:fs' +(await alice.attachment.getAsStream('rabbit', 'rabbit.png')) .on('error', e => console.error) - .pipe(fs.createWriteStream('rabbit.png')); + .pipe(fs.createWriteStream('rabbit.png')) ``` ### db.attachment.destroy(docname, attname, [params]) @@ -1057,9 +1062,9 @@ const body = alice.view('characters', 'happy_ones', { include_docs: true }) Same as `db.view` but returns a stream: ```js -alice.viewAsStream('characters', 'happy_ones', {reduce: false}) +(await alice.viewAsStream('characters', 'happy_ones', {reduce: false})) .on('error', (e) => console.error('error', e)) - .pipe(process.stdout); + .pipe(process.stdout) ``` ### db.viewWithList(designname, viewname, listname, [params]) @@ -1075,9 +1080,9 @@ const body = await alice.viewWithList('characters', 'happy_ones', 'my_list') Calls a list function fed by the given view from the specified design document as a stream. ```js -alice.viewWithListAsStream('characters', 'happy_ones', 'my_list') +(await alice.viewWithListAsStream('characters', 'happy_ones', 'my_list')) .on('error', (e) => console.error('error', e)) - .pipe(process.stdout); + .pipe(process.stdout) ``` ### db.show(designname, showname, doc_id, [params]) @@ -1138,7 +1143,7 @@ Check out the tests for a fully functioning example. Calls a view of the specified design with optional query string additions `params`. Returns stream. ```js -alice.search('characters', 'happy_ones', { q: 'cat' }).pipe(process.stdout); +(await alice.search('characters', 'happy_ones', { q: 'cat' })).pipe(process.stdout) ``` ### db.find(selector) @@ -1154,7 +1159,7 @@ const q = { }, fields: [ "name", "age", "tags", "url" ], limit:50 -}; +} const response = await alice.find(q) ``` @@ -1171,10 +1176,10 @@ const q = { }, fields: [ "name", "age", "tags", "url" ], limit:50 -}; -alice.findAsStream(q) +} +(await alice.findAsStream(q)) .on('error', (e) => console.error('error', e)) - .pipe(process.stdout); + .pipe(process.stdout) ``` ## using cookie authentication @@ -1182,7 +1187,8 @@ alice.findAsStream(q) Nano supports making requests using CouchDB's [cookie authentication](http://guide.couchdb.org/editions/1/en/security.html#cookies) functionality. If you initialise *Nano* so that it is cookie-aware, you may call `nano.auth` first to get a session cookie. Nano will behave like a web browser, remembering your session cookie and refreshing it if a new one is received in a future HTTP response. ```js -const nano = require('nano')({ +import Nano from 'nano' +const nano = Nano({ url: 'http://127.0.0.1:5984' }) const username = 'user' @@ -1237,12 +1243,12 @@ function getrabbitrev(rev) { doc: 'rabbit', method: 'get', params: { rev: rev } - }); + }) } getrabbitrev('4-2e6cdc4c7e26b745c2881a24e0eeece2').then((body) => { - console.log(body); -}); + console.log(body) +}) ``` ### Pipes @@ -1250,12 +1256,13 @@ getrabbitrev('4-2e6cdc4c7e26b745c2881a24e0eeece2').then((body) => { You can pipe the return values of certain nano functions like other stream. For example if our `rabbit` document has an attachment with name `picture.png` you can pipe it to a `writable stream`: ```js -const fs = require('fs'); -const nano = require('nano')('http://127.0.0.1:5984/'); -const alice = nano.use('alice'); -alice.attachment.getAsStream('rabbit', 'picture.png') +import fs from 'node:fs' +import Nano from 'nano' +const nano = Nano('http://127.0.0.1:5984/') +const alice = nano.use('alice') +(await alice.attachment.getAsStream('rabbit', 'picture.png')) .on('error', (e) => console.error('error', e)) - .pipe(fs.createWriteStream('/tmp/rabbit.png')); + .pipe(fs.createWriteStream('/tmp/rabbit.png')) ``` then open `/tmp/rabbit.png` and you will see the rabbit picture. @@ -1285,13 +1292,12 @@ const nano = Nano({ url: process.env.COUCH_URL, log: console.log }) You may supply your own logging function to format the data before output: ```js -const url = require('url') const logger = (data) => { // only output logging if there is an environment variable set if (process.env.LOG === 'nano') { // if this is a request if (typeof data.err === 'undefined') { - const u = new url.URL(data.uri) + const u = new URL(data.uri) console.log(data.method, u.pathname, data.qs) } else { // this is a response diff --git a/lib/changesreader.js b/lib/changesreader.js index 98ca422..68d93b9 100644 --- a/lib/changesreader.js +++ b/lib/changesreader.js @@ -1,13 +1,13 @@ -const EventEmitter = require('events').EventEmitter +import EventEmitter from 'node:events' +import stream from 'node:stream' const AbortController = global.AbortController -const stream = require('stream') const EVENT_BATCH = 'batch' const EVENT_CHANGE = 'change' const EVENT_SEQ = 'seq' const EVENT_ERROR = 'error' // streaming line breaker -const liner = () => { +function liner() { const liner = new stream.Transform({ objectMode: true }) liner._transform = function (chunk, encoding, done) { @@ -33,7 +33,7 @@ const liner = () => { } // streaming change processor -const changeProcessor = (ee, batchSize) => { +function changeProcessor(ee, batchSize) { const changeProcessor = new stream.Transform({ objectMode: true }) const buffer = [] changeProcessor.lastSeq = '0' @@ -81,7 +81,7 @@ const changeProcessor = (ee, batchSize) => { * @param {String} db - Name of the database. * @param {Function} request - Nano.relax */ -class ChangesReader { +export default class ChangesReader { // constructor constructor (db, request) { this.db = db @@ -278,21 +278,23 @@ class ChangesReader { } const lin = liner() const cp = changeProcessor(self.ee, self.batchSize) - self.request(req) - .on(EVENT_ERROR, (e) => { - self.ee.emit(EVENT_ERROR, e) - }) - .pipe(lin) - .pipe(cp) - .on('finish', (lastSeq) => { - // the 'end' event was triggering before the last data event - setTimeout(() => { - self.ee.emit('end', cp.lastSeq) - }, 10) - }) + setImmediate(async () => { + const s = await self.request(req) + s.on(EVENT_ERROR, (e) => { + self.ee.emit(EVENT_ERROR, e) + }) + .pipe(lin) + .pipe(cp) + .on('finish', (lastSeq) => { + // the 'end' event was triggering before the last data event + setTimeout(() => { + self.ee.emit('end', cp.lastSeq) + }, 10) + }) + }) + return self.ee } } -module.exports = ChangesReader diff --git a/lib/cookie.js b/lib/cookie.js index d18a4ce..534f345 100644 --- a/lib/cookie.js +++ b/lib/cookie.js @@ -1,7 +1,6 @@ -const { URL } = require('url') // a simple cookie jar -class CookieJar { +export default class CookieJar { // create new empty cookie jar constructor () { this.jar = [] @@ -126,4 +125,3 @@ class CookieJar { } } -module.exports = CookieJar diff --git a/lib/multipart.js b/lib/multipart.js index cf7f946..ee4e56b 100644 --- a/lib/multipart.js +++ b/lib/multipart.js @@ -4,7 +4,7 @@ const DASHES = '--' // generate the payload, boundary and header for a multipart/related request // to upload binary attachments to CouchDB. // https://www.w3.org/Protocols/rfc1341/7_2_Multipart.html -class MultiPartFactory { +export default class MultiPartFactory { // constructor constructor (parts) { // generate a unique id that forms the boundary between parts @@ -50,4 +50,3 @@ class MultiPartFactory { } } -module.exports = MultiPartFactory diff --git a/lib/nano.d.ts b/lib/nano.d.ts index d1809b9..6294abf 100644 --- a/lib/nano.d.ts +++ b/lib/nano.d.ts @@ -395,10 +395,10 @@ declare namespace nano { list(params: DocumentListParams): Promise>; /** List document from this database as a stream. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/database/bulk-api.html#get--db-_all_docs} */ - listAsStream(): NodeJS.ReadStream; + listAsStream(): Promise; /** List document from this database as a stream with options. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/database/bulk-api.html#get--db-_all_docs} */ - listAsStream(params: DocumentListParams): NodeJS.ReadStream; + listAsStream(params: DocumentListParams): Promise; /** Fetch a list of documents by _id. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/database/bulk-api.html#post--db-_all_docs} */ fetch(docnames: BulkFetchDocsWrapper): Promise>; @@ -482,7 +482,7 @@ declare namespace nano { designname: string, searchname: string, params: DocumentSearchParams - ): NodeJS.ReadStream; + ): Promise; /** Low-level wrapper that executes a view from a Design Document. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/ddoc/views.html#get--db-_design-ddoc-_view-view} */ baseView( @@ -509,14 +509,14 @@ declare namespace nano { viewAsStream( designname: string, viewname: string - ): NodeJS.ReadStream; + ): Promise; /** Executes a view from a Design Document, with options as a stream * @see Docs: {@link http://docs.couchdb.org/en/latest/api/ddoc/views.html#get--db-_design-ddoc-_view-view} */ viewAsStream( designname: string, viewname: string, params: DocumentViewParams - ): NodeJS.ReadStream; + ): Promise; /** Applies a list function to a view. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/ddoc/render.html#db-design-design-doc-list-list-name-view-name} */ viewWithList( @@ -538,7 +538,7 @@ declare namespace nano { designname: string, viewname: string, listname: string - ): NodeJS.ReadStream; + ): Promise; /** Applies a list function to a view with options as a stream. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/ddoc/render.html#db-design-design-doc-list-list-name-view-name} */ viewWithListAsStream( @@ -546,13 +546,13 @@ declare namespace nano { viewname: string, listname: string, params: DocumentViewParams - ): NodeJS.ReadStream; + ): Promise; /** Run Mango query. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/database/find.html#db-find} */ find(query: MangoQuery): Promise >; /** Run Mango query as a stream. * @see Docs: {@link http://docs.couchdb.org/en/latest/api/database/find.html#db-find} */ - findAsStream(query: MangoQuery): NodeJS.ReadStream; + findAsStream(query: MangoQuery): Promise; /** Server scope */ server: ServerScope; /** Fetch information about a single partition in this database. @@ -563,13 +563,13 @@ declare namespace nano { partitionedList(partitionKey: string, params?: DocumentFetchParams): Promise>; /** List documents in a single partition in this database as a stream. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/partitioned-dbs.html#db-partition-partition-all-docs} */ - partitionedListAsStream(partitionKey: string, params?: DocumentFetchParams): NodeJS.ReadStream; + partitionedListAsStream(partitionKey: string, params?: DocumentFetchParams): Promise; /** Run Mango query a single partition in this database. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/partitioned-dbs.html#db-partition-partition-id-find} */ partitionedFind(partitionKey: string, query: MangoQuery): Promise >; /** Run Mango query a single partition in this database, as a stream. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/partitioned-dbs.html#db-partition-partition-id-find} */ - partitionedFindAsStream(partitionKey: string, query: MangoQuery): NodeJS.ReadStream; + partitionedFindAsStream(partitionKey: string, query: MangoQuery): Promise; /** Run a full-text search in a single partition in this database. */ partitionedSearch( partitionKey: string, @@ -583,7 +583,7 @@ declare namespace nano { designname: string, searchname: string, params: DocumentSearchParams - ): NodeJS.ReadStream; + ): Promise; /** Executes the specified view function in a single partition from the specified design document. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/partitioned-dbs.html#db-partition-partition-design-design-doc-view-view-name} */ partitionedView( @@ -599,7 +599,7 @@ declare namespace nano { designname: string, viewname: string, params: DocumentViewParams - ): NodeJS.ReadStream; + ): Promise; } /** attachment data */ @@ -649,7 +649,7 @@ declare namespace nano { get(docname: string, attname: string): Promise; /** Get an attachment as a stream. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/document/attachments.html#get--db-docid-attname} */ - getAsStream(docname: string, attname: string): NodeJS.ReadStream; + getAsStream(docname: string, attname: string): Promise; /** Get an attachment with options. * @see Docs: {@link https://docs.couchdb.org/en/latest/api/document/attachments.html#get--db-docid-attname} */ get( diff --git a/lib/nano.js b/lib/nano.js index f21389a..845b603 100644 --- a/lib/nano.js +++ b/lib/nano.js @@ -10,31 +10,29 @@ // License for the specific language governing permissions and limitations under // the License. -const { URL } = require('url') -const assert = require('assert') -const stream = require('stream') -const Readable = stream.Readable -const ChangesReader = require('./changesreader.js') -const CookieJar = require('./cookie.js') -const MultiPartFactory = require('./multipart.js') -const pkg = require('../package.json') - +import assert from 'node:assert' +import { Readable } from 'node:stream' +import ChangesReader from './changesreader.js' +import CookieJar from './cookie.js' +import MultiPartFactory from './multipart.js' +import pkg from '../package.json' with { type: 'json' } + +const PARAMS_TO_ENCODE = ['startkey', 'endkey', 'key', 'keys', 'start_key', 'end_key'] +const MIME_JSON = 'application/json' +const CONTENT_TYPE = 'content-type' +const SET_COOKIE = 'set-cookie' const SCRUBBED_STR = 'XXXXXX' -function isEmpty (val) { - return val == null || !(Object.keys(val) || val).length -} - // feed this any number of arguments, it will return true if // any of them are missing (falsey) -function missing (...params) { +function missing(...params) { return params.some(param => !param) } // the stock error returned when a call has missing or invalid parameters const invalidParametersError = new Error('Invalid parameters') -module.exports = exports = function dbScope (cfg) { +export default function Nano(cfg) { let serverScope = {} if (typeof cfg === 'string') { @@ -47,14 +45,17 @@ module.exports = exports = function dbScope (cfg) { cfg = Object.assign({}, cfg) serverScope.config = cfg - const dummyLogger = () => {} - const log = typeof cfg.log === 'function' ? cfg.log : dummyLogger + const log = typeof cfg.log === 'function' ? cfg.log : () => { } const parseUrl = 'parseUrl' in cfg ? cfg.parseUrl : true // pre-parse the URL to extract URL without auth details cfg.parsedURL = new URL(cfg.url) cfg.plainURL = `${cfg.parsedURL.origin}${cfg.parsedURL.pathname}` - cfg.headers = cfg.headers || {} + cfg.headers = Object.assign({ + 'content-type': 'application/json', + 'user-agent': `${pkg.name}/${pkg.version} (Node.js ${process.version})`, + 'accept-encoding': 'deflate, gzip' + }, cfg.headers || {}) if (cfg.parsedURL.username && cfg.parsedURL.password) { cfg.headers.Authorization = 'Basic ' + Buffer.from(`${cfg.parsedURL.username}:${cfg.parsedURL.password}`).toString('base64') } @@ -75,7 +76,7 @@ module.exports = exports = function dbScope (cfg) { // create cookieJar for this Nano cfg.cookieJar = new CookieJar() - function maybeExtractDatabaseComponent () { + function maybeExtractDatabaseComponent() { if (!parseUrl) { return } @@ -91,357 +92,132 @@ module.exports = exports = function dbScope (cfg) { } } - function scrubURL (str) { + function scrubURL(str) { if (str) { str = str.replace(/\/\/(.*)@/, `//${SCRUBBED_STR}:${SCRUBBED_STR}@`) } return str } - function scrubRequest (req, cloned) { - // scrub credentials - req.url = scrubURL(req.url) + function scrubRequest(req, cloned) { + // scrub + if (req.url) { + req.url = scrubURL(req.url) + } if (req.headers.cookie) { - req.headers.cookie = 'XXXXXXX' + req.headers.cookie = SCRUBBED_STR } if (req.headers.Authorization) { - req.headers.Authorization = 'XXXXXXX' - } - } - - const responseHandler = async function (response, req, opts, resolve, reject) { - const statusCode = response.status || 500 - let body = response.body - response.statusCode = statusCode - - // cookie parsing - if (response.headers) { - const h = response.headers.get('set-cookie') - if (h) { - cfg.cookieJar.parse(h, req.url) - } + req.headers.Authorization = SCRUBBED_STR } - - const responseHeaders = { - uri: scrubURL(req.url), - statusCode, - ...(response.headers ?? {}) - }; - - if (!response.status) { - log({ err: 'socket', body, headers: responseHeaders }) - if (reject) { - // since #relax might have sent Error rather than Response: - const statusText = response.cause?.toString() ?? response.message - reject(new Error(`error happened in your connection. Reason: ${statusText}`)) - } - return - } - - delete responseHeaders.server - delete responseHeaders['content-length'] - - if (statusCode >= 200 && statusCode < 400) { - // collect response - const contentType = response.headers.get('content-type') - let retval = '' - // when doing head requests, we return the response headers, not the response body - if (req.method === 'head') { - retval = Object.fromEntries(response.headers) - } else if (contentType === 'application/json') { - try { - retval = await response.json() - } catch { - // do nothing - } - } else if (contentType && (contentType.startsWith('text/') || contentType.startsWith('multipart/related'))) { - retval = await response.text() - } else { - const ab = await response.arrayBuffer() - retval = Buffer.from(ab) - } - - // log - log({ err: null, retval, headers: responseHeaders }) - - // promisey - if (resolve) { - resolve(retval) - } - return - } - - // cloudant stacktrace - try { - body = await response.json() - } catch (e) { - // if we were expecting a JSON response but didn't get one, set the body to a blank string - // rather than throw an error. This happens when - // - we do HEAD /db/docid - // - or we get a 500 from CouchDB with no body - // In these cases we expect a rejected promise. - body = '' - } - - if (typeof body === 'string') { - body = { message: body } - } - - if (body && !body.message && (body.reason || body.error)) { - body.message = (body.reason || body.error) - } - - // fix cloudant issues where they give an erlang stacktrace as js - delete body.stack - - // scrub credentials - scrubRequest(req) - - log({ err: 'couch', body, headers: responseHeaders }) - - const message = body.message || 'couch returned ' + statusCode - const errors = new Error(message) - errors.scope = 'couch' - errors.statusCode = statusCode - errors.request = req - errors.headers = responseHeaders - errors.errid = 'non_200' - errors.name = 'Error' - errors.description = message - // add any attributes from the HTTP response into the - // Error object (except message, which would overwrite - // the text message of the Error) - delete body.message - Object.assign(errors, body) - - if (reject) { - reject(errors) + if (req.headers['set-cookie']) { + req.headers['set-cookie'] = SCRUBBED_STR } } - const streamResponseHandler = function (response, req, stream) { - const statusCode = response.status || (response.response && response.response.status) || 500 - const message = response.statusText - scrubRequest(req) - const responseHeaders = Object.assign({ - uri: req.url, - statusCode - }, response.headers ? Object.fromEntries(response.headers) : {}) - - const error = new Error(message) - error.scope = 'couch' - error.statusCode = statusCode - error.request = req - error.headers = responseHeaders - error.errid = 'non_200' - error.name = 'Error' - error.description = message - error.reason = message - - log({ err: 'couch', body: message, headers: responseHeaders }) - - setTimeout(() => { - stream.emit('error', error) - }, 10) - } - function relax (opts) { - if (typeof opts === 'function') { - opts = { path: '' } - } - if (typeof opts === 'string') { - opts = { path: opts } - } - if (!opts) { - opts = { path: '' } + async function relax(relaxOpts) { + if (typeof relaxOpts === 'string') { + relaxOpts = { path: relaxOpts } } - // the building blocks of the request - let body, uri + // create new set of opts based on our defaults, overridden by those passed in + const opts = { ...cfg, ...(relaxOpts || {}) } - // construct headers object to be passed in the request - const headers = { - accept: 'application/json', - 'user-agent': `${pkg.name}/${pkg.version} (Node.js ${process.version})`, - 'Accept-Encoding': 'deflate, gzip' - } - Object.assign(headers, cfg.headers ? cfg.headers : {}) - if (!headers['content-type']) { - headers['content-type'] = 'application/json' - } + // form a new URL from path from our proxy, appended to the origin + if (opts.doc && !/^_design|_local/.test(opts.doc)) opts.doc = encodeURIComponent(opts.doc) + opts.path = [opts.db, opts.path || opts.doc, opts.att] + .filter(Boolean) + .join('/') || '/' + const url = new URL(opts.path, cfg.plainURL) - // prevent bugs where people set encoding when piping - if (opts.encoding !== undefined) { - delete headers['content-type'] - delete headers.accept - } - - if (opts.contentType) { - headers['content-type'] = opts.contentType - delete headers.accept + // if there's a query string object + if (typeof opts.qs === 'object') { + for (const [key, value] of Object.entries(opts.qs)) { + // add each k/v to the URL's seachParams, taking care to JSON.stringify certain items + url.searchParams.set(key, PARAMS_TO_ENCODE.includes(key) ? JSON.stringify(value) : value) + } } - if (opts.accept) { - headers.accept = opts.accept - } + // if we've been given a JavaScript object, it needs stringifying + opts.body = typeof opts.body === 'object' && opts.headers[CONTENT_TYPE].startsWith(MIME_JSON) ? JSON.stringify(opts.body) : opts.body - // http://guide.couchdb.org/draft/security.html#cookies - if (cfg.cookie) { - headers['X-CouchDB-WWW-Authenticate'] = 'Cookie' - headers.cookie = cfg.cookie - } - - // form submission - if (opts.form) { - headers['content-type'] = - 'application/x-www-form-urlencoded; charset=utf-8' - body = new URLSearchParams(opts.form) + // add any cookies for this domain + const urlStr = url.toString() + const cookie = cfg.cookieJar.getCookieString(urlStr) + if (cookie) { + opts.headers.cookie = cookie } - // multipart requests if (opts.multipart) { - // generate the multipart/related body, header and boundary to + // generate the multipart/related body, header and boundary to // upload multiple binary attachments in one request const mp = new MultiPartFactory(opts.multipart) - headers['content-type'] = mp.header - body = mp.data + opts.headers['content-type'] = mp.header + opts.body = mp.data } - // construct the URL - const method = opts.method ? opts.method.toLowerCase() : 'get' - uri = cfg.plainURL - if (opts.db) { - uri = urlResolveFix(uri, encodeURIComponent(opts.db)) + // if the body is a readable stream + if (opts.body && opts.body instanceof Readable) { + opts.duplex = 'half' + opts.keepalive = false } - // http://wiki.apache.org/couchdb/HTTP_database_API#Naming_and_Addressing - if (opts.path) { - if (!uri.endsWith('/')) { - uri += '/' - } - uri += opts.path - } else if (opts.doc) { - if (!/^_design|_local/.test(opts.doc)) { - // http://wiki.apache.org/couchdb/HTTP_Document_API#Naming.2FAddressing - uri += '/' + encodeURIComponent(opts.doc) - } else { - // http://wiki.apache.org/couchdb/HTTP_Document_API#Document_IDs - uri += '/' + opts.doc - } - - // http://wiki.apache.org/couchdb/HTTP_Document_API#Attachments - if (opts.att) { - uri += '/' + opts.att - } - } - - // http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options - const qs = Object.assign({}, opts.qs) - if (typeof qs === 'object' && !isEmpty(qs)) { - ['startkey', 'endkey', 'key', 'keys', 'start_key', 'end_key'].forEach(function (key) { - if (key in qs) { - qs[key] = JSON.stringify(qs[key]) - } - }) - } - - // HTTP request body - if (opts.body) { - if (Buffer.isBuffer(opts.body) || opts.dontStringify) { - body = opts.body - } else { - body = JSON.stringify(opts.body, function (key, value) { - // don't encode functions - if (typeof (value) === 'function') { - return value.toString() - } else { - return value - } - }) - } - } + // make the HTTP request + const response = await fetch(urlStr, opts) // scrub and log const scrubbedReq = { - method, - headers: JSON.parse(JSON.stringify(headers)), - url: uri + method: opts.method, + headers: JSON.parse(JSON.stringify(opts.headers)), + url: urlStr } scrubRequest(scrubbedReq, true) log(scrubbedReq) - // insert basic auth headers, if present - Object.assign(headers, cfg.headers) - - // build the options we send to undici.fetch - const fetchOptions = { - url: uri, - method, - headers, - credentials: 'include', - body: method !== 'get' && body ? body : undefined, - redirect: 'error', - signal: opts.signal, - bodyTimeout: 0 - } - - // add custom agent if present - if (cfg.agent) { - fetchOptions.dispatcher = cfg.agent - } - - // add querystring params - const searchParams = new URLSearchParams(qs) - const queryString = searchParams.toString() - if (queryString.length > 0) { - fetchOptions.url += '?' + queryString - } - - // if the body is readable stream - if (fetchOptions.body && fetchOptions.body instanceof stream.Readable) { - fetchOptions.duplex = 'half' - fetchOptions.keepalive = false + // parse cookies + const cookieHeader = response.headers.get(SET_COOKIE) || '' + if (cookieHeader) cfg.cookieJar.parse(cookieHeader, urlStr) + + // extract the mime type from the response + const contentType = response.headers.get(CONTENT_TYPE) || '' + let output = '' + if (opts.method && opts.method.toLowerCase() === 'head') { + // for HEAD method, we actually output the headers + output = Object.fromEntries(response.headers) + } else if (opts.stream) { + // for streamed output + return Readable.fromWeb(response.body) + } else if (contentType === MIME_JSON) { + // json is json + output = await response.json() + } else if (contentType.startsWith('text/') || contentType.startsWith('multipart/related')) { + // any text mime type is text + output = await response.text() + } else { + // everything else is a Buffer + output = Buffer.from(await response.arrayBuffer()) } - // add any cookies for this domain - const cookie = cfg.cookieJar.getCookieString(uri) - if (cookie) { - fetchOptions.headers.cookie = cookie - } + // either return the output + const loggedObj = { err: null, body: output, headers: Object.fromEntries(response.headers) } + scrubRequest(loggedObj) + log(loggedObj) + if (response.ok) return output - // actually do the HTTP request - if (opts.stream) { - // return the Request object for streaming - const outStream = new stream.PassThrough() - fetch(fetchOptions.url, fetchOptions).then((response) => { - const readableWebStream = response.body - const readableNodeStream = Readable.fromWeb ? Readable.fromWeb(readableWebStream) : Readable.from(readableWebStream) - if (response.status > 300) { - streamResponseHandler(response, fetchOptions, outStream) - } else { - readableNodeStream.pipe(outStream) - } - }).catch((e) => { - streamResponseHandler(e, fetchOptions, outStream) - }) - return outStream - } else { - return new Promise((resolve, reject) => { - fetch(fetchOptions.url, fetchOptions).then((response) => { - responseHandler(response, fetchOptions, opts, resolve, reject) - }).catch((e) => { - responseHandler(e, fetchOptions, opts, resolve, reject) - }) - }) - } + // or throw an Error + const e = new Error(output?.reason || output?.error || `couch returned ${response.status}`) + e.statusCode = response.status + log({ err: 'couch', body: output, headers: response.headers }) + throw e } // http://docs.couchdb.org/en/latest/api/server/authn.html#cookie-authentication - function auth (username, password) { + function auth(username, password) { return relax({ method: 'POST', db: '_session', - form: { + body: { name: username, password } @@ -449,17 +225,17 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/server/authn.html#post--_session - function session () { + function session() { return relax({ db: '_session' }) } // https://docs.couchdb.org/en/latest/api/server/common.html#api-server-root - function info () { + async function info() { return relax({ path: '' }) } // http://docs.couchdb.org/en/latest/api/server/common.html#get--_db_updates - function updates (qs) { + async function updates(qs) { return relax({ db: '_db_updates', qs @@ -467,7 +243,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/common.html#put--db - function createDb (dbName, qs) { + async function createDb(dbName, qs) { if (missing(dbName)) { return Promise.reject(invalidParametersError) } @@ -475,7 +251,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/common.html#delete--db - function destroyDb (dbName) { + async function destroyDb(dbName) { if (missing(dbName)) { return Promise.reject(invalidParametersError) } @@ -483,7 +259,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/common.html#get--db - function getDb (dbName) { + async function getDb(dbName) { if (missing(dbName)) { return Promise.reject(invalidParametersError) } @@ -491,17 +267,17 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/server/common.html#get--_all_dbs - function listDbs () { + async function listDbs() { return relax({ db: '_all_dbs' }) } // http://docs.couchdb.org/en/latest/api/server/common.html#get--_all_dbs - function listDbsAsStream () { + async function listDbsAsStream() { return relax({ db: '_all_dbs', stream: true }) } // http://docs.couchdb.org/en/latest/api/database/compact.html#post--db-_compact - function compactDb (dbName, ddoc) { + async function compactDb(dbName, ddoc) { if (missing(dbName)) { return Promise.reject(invalidParametersError) } @@ -514,18 +290,18 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/changes.html#get--db-_changes - function changesDb (dbName, qs) { + async function changesDb(dbName, qs) { if (missing(dbName)) { return Promise.reject(invalidParametersError) } return relax({ db: dbName, path: '_changes', qs }) } - function changesDbAsStream (dbName, qs) { + async function changesDbAsStream(dbName, qs) { return relax({ db: dbName, path: '_changes', stream: true, qs }) } - function _serializeAsUrl (db) { + function _serializeAsUrl(db) { if (typeof db === 'object' && db.config && db.config.url && db.config.db) { return urlResolveFix(db.config.url, encodeURIComponent(db.config.db)) } else { @@ -541,7 +317,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/server/common.html#post--_replicate - function replicateDb (source, target, opts) { + async function replicateDb(source, target, opts) { if (missing(source, target)) { return Promise.reject(invalidParametersError) } @@ -555,13 +331,13 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/server/common.html#uuids - function uuids (count) { + async function uuids(count) { count = count || 1 return relax({ method: 'GET', path: '_uuids', qs: { count } }) } // http://guide.couchdb.org/draft/replication.html - function enableReplication (source, target, opts) { + async function enableReplication(source, target, opts) { if (missing(source, target)) { return Promise.reject(invalidParametersError) @@ -576,7 +352,7 @@ module.exports = exports = function dbScope (cfg) { } // http://guide.couchdb.org/draft/replication.html - function queryReplication (id, qs) { + async function queryReplication(id, qs) { if (missing(id)) { return Promise.reject(invalidParametersError) } @@ -584,7 +360,7 @@ module.exports = exports = function dbScope (cfg) { } // http://guide.couchdb.org/draft/replication.html - function disableReplication (id, rev, opts) { + async function disableReplication(id, rev, opts) { if (missing(id, rev)) { return Promise.reject(invalidParametersError) } @@ -598,13 +374,13 @@ module.exports = exports = function dbScope (cfg) { return relax(req) } - function docModule (dbName) { + function docModule(dbName) { let docScope = {} dbName = decodeURIComponent(dbName) // http://docs.couchdb.org/en/latest/api/document/common.html#put--db-docid // http://docs.couchdb.org/en/latest/api/database/common.html#post--db - function insertDoc (doc, qs ) { + async function insertDoc(doc, qs) { const req = { db: dbName, body: doc, method: 'POST' } if (typeof qs === 'string') { @@ -624,7 +400,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/document/common.html#delete--db-docid - function destroyDoc (docName, rev) { + async function destroyDoc(docName, rev) { if (missing(docName)) { return Promise.reject(invalidParametersError) } @@ -638,7 +414,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/document/common.html#get--db-docid - function getDoc (docName, qs) { + async function getDoc(docName, qs) { if (missing(docName)) { return Promise.reject(invalidParametersError) } @@ -647,7 +423,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/document/common.html#head--db-docid - function headDoc (docName) { + async function headDoc(docName) { if (missing(docName)) { return Promise.reject(invalidParametersError) } @@ -659,23 +435,23 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/bulk-api.html#get--db-_all_docs - function listDoc (qs) { + async function listDoc(qs) { return relax({ db: dbName, path: '_all_docs', qs }) } // http://docs.couchdb.org/en/latest/api/database/bulk-api.html#get--db-_all_docs - function listDocAsStream (qs) { + async function listDocAsStream(qs) { return relax({ db: dbName, path: '_all_docs', qs, stream: true }) } // http://docs.couchdb.org/en/latest/api/database/bulk-api.html#post--db-_all_docs - function fetchDocs (docNames, qs) { + async function fetchDocs(docNames, qs) { const opts = qs || {} opts.include_docs = true if (missing(docNames) || typeof docNames !== 'object' || - !docNames.keys || !Array.isArray(docNames.keys) || - docNames.keys.length === 0) { + !docNames.keys || !Array.isArray(docNames.keys) || + docNames.keys.length === 0) { return Promise.reject(invalidParametersError) } @@ -688,11 +464,11 @@ module.exports = exports = function dbScope (cfg) { }) } - function fetchRevs (docNames, qs) { + async function fetchRevs(docNames, qs) { if (missing(docNames) || typeof docNames !== 'object' || - !docNames.keys || !Array.isArray(docNames.keys) || - docNames.keys.length === 0) { + !docNames.keys || !Array.isArray(docNames.keys) || + docNames.keys.length === 0) { return Promise.reject(invalidParametersError) } @@ -705,7 +481,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function view (ddoc, viewName, meta, qs) { + async function view(ddoc, viewName, meta, qs) { if (missing(ddoc, viewName) && !meta.viewPath) { return Promise.reject(invalidParametersError) } @@ -718,7 +494,7 @@ module.exports = exports = function dbScope (cfg) { const qs1 = Object.assign({}, qs) const viewPath = meta.viewPath || '_design/' + ddoc + '/_' + meta.type + - '/' + viewName + '/' + viewName if (meta.type === 'search') { return relax({ @@ -767,27 +543,27 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/ddoc/views.html#post--db-_design-ddoc-_view-view - function viewDocs (ddoc, viewName, qs) { + async function viewDocs(ddoc, viewName, qs) { return view(ddoc, viewName, { type: 'view' }, qs) } // http://docs.couchdb.org/en/latest/api/ddoc/views.html#post--db-_design-ddoc-_view-view - function viewDocsAsStream (ddoc, viewName, qs) { + async function viewDocsAsStream(ddoc, viewName, qs) { return view(ddoc, viewName, { type: 'view', stream: true }, qs) } // cloudant - function viewSearch (ddoc, viewName, qs) { + async function viewSearch(ddoc, viewName, qs) { return view(ddoc, viewName, { type: 'search' }, qs) } // cloudant - function viewSearchAsStream (ddoc, viewName, qs) { + async function viewSearchAsStream(ddoc, viewName, qs) { return view(ddoc, viewName, { type: 'search', stream: true }, qs) } // http://docs.couchdb.org/en/latest/api/ddoc/render.html#get--db-_design-ddoc-_show-func - function showDoc (ddoc, viewName, docName, qs) { + async function showDoc(ddoc, viewName, docName, qs) { if (missing(ddoc, viewName, docName)) { return Promise.reject(invalidParametersError) } @@ -796,7 +572,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/ddoc/render.html#put--db-_design-ddoc-_update-func-docid - function updateWithHandler (ddoc, viewName, docName, body) { + async function updateWithHandler(ddoc, viewName, docName, body) { if (missing(ddoc, viewName, docName)) { return Promise.reject(invalidParametersError) } @@ -807,13 +583,13 @@ module.exports = exports = function dbScope (cfg) { }) } - function viewWithList (ddoc, viewName, listName, qs) { + async function viewWithList(ddoc, viewName, listName, qs) { return view(ddoc, listName + '/' + viewName, { type: 'list' }, qs) } - function viewWithListAsStream (ddoc, viewName, listName, qs) { + async function viewWithListAsStream(ddoc, viewName, listName, qs) { return view(ddoc, listName + '/' + viewName, { type: 'list', stream: true @@ -821,7 +597,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/database/bulk-api.html#post--db-_bulksDoc - function bulksDoc (docs, qs) { + async function bulksDoc(docs, qs) { return relax({ db: dbName, path: '_bulk_docs', @@ -832,7 +608,7 @@ module.exports = exports = function dbScope (cfg) { } // http://docs.couchdb.org/en/latest/api/document/common.html#creating-multiple-attachments - function insertMultipart (doc, attachments, qs) { + async function insertMultipart(doc, attachments, qs) { if (typeof qs === 'string') { qs = { docName: qs } } @@ -868,14 +644,13 @@ module.exports = exports = function dbScope (cfg) { return relax({ db: dbName, method: 'PUT', - contentType: 'multipart/related', doc: docName, qs, multipart }) } - function getMultipart (docName, qs) { + async function getMultipart(docName, qs) { qs = qs || {} qs.attachments = true @@ -886,13 +661,14 @@ module.exports = exports = function dbScope (cfg) { return relax({ db: dbName, doc: docName, - encoding: null, - accept: 'multipart/related', + headers: { + accept: 'multipart/related' + }, qs }) } - function insertAtt (docName, attName, att, contentType, qs) { + async function insertAtt(docName, attName, att, contentType, qs) { if (missing(docName, attName, att, contentType)) { return Promise.reject(invalidParametersError) } @@ -901,7 +677,9 @@ module.exports = exports = function dbScope (cfg) { db: dbName, att: attName, method: 'PUT', - contentType, + headers: { + 'content-type': contentType + }, doc: docName, qs, body: att, @@ -909,8 +687,8 @@ module.exports = exports = function dbScope (cfg) { }) } - function getAtt (docName, attName, qs) { - + async function getAtt(docName, attName, qs) { + if (missing(docName, attName)) { return Promise.reject(invalidParametersError) } @@ -925,7 +703,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function getAttAsStream (docName, attName, qs) { + async function getAttAsStream(docName, attName, qs) { return relax({ db: dbName, att: attName, @@ -937,7 +715,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function destroyAtt (docName, attName, qs) { + function destroyAtt(docName, attName, qs) { if (missing(docName, attName)) { return Promise.reject(invalidParametersError) } @@ -951,7 +729,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function find (query) { + async function find(query) { if (missing(query) || typeof query !== 'object') { return Promise.reject(invalidParametersError) } @@ -964,7 +742,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function findAsStream (query) { + async function findAsStream(query) { return relax({ db: dbName, path: '_find', @@ -974,7 +752,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function createIndex (indexDef) { + async function createIndex(indexDef) { if (missing(indexDef) || typeof indexDef !== 'object') { return Promise.reject(invalidParametersError) } @@ -987,7 +765,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionInfo (partitionKey) { + async function partitionInfo(partitionKey) { if (missing(partitionKey)) { return Promise.reject(invalidParametersError) } @@ -998,7 +776,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedList (partitionKey, qs) { + async function partitionedList(partitionKey, qs) { if (missing(partitionKey)) { return Promise.reject(invalidParametersError) } @@ -1009,7 +787,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedListAsStream (partitionKey, qs) { + async function partitionedListAsStream(partitionKey, qs) { return relax({ db: dbName, path: '_partition/' + encodeURIComponent(partitionKey) + '/_all_docs', @@ -1018,7 +796,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedFind (partition, query) { + async function partitionedFind(partition, query) { if (missing(partition, query) || typeof query !== 'object') { return Promise.reject(invalidParametersError) } @@ -1031,7 +809,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedFindAsStream (partition, query) { + async function partitionedFindAsStream(partition, query) { return relax({ db: dbName, path: '_partition/' + encodeURIComponent(partition) + '/_find', @@ -1041,7 +819,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedSearch (partition, ddoc, searchName, qs) { + async function partitionedSearch(partition, ddoc, searchName, qs) { if (missing(partition, ddoc, searchName, qs) || typeof qs !== 'object') { return Promise.reject(invalidParametersError) } @@ -1052,7 +830,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedSearchAsStream (partition, ddoc, searchName, qs) { + async function partitionedSearchAsStream(partition, ddoc, searchName, qs) { return relax({ db: dbName, path: '_partition/' + encodeURIComponent(partition) + '/_design/' + ddoc + '/_search/' + searchName, @@ -1061,7 +839,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedView (partition, ddoc, viewName, qs) { + async function partitionedView(partition, ddoc, viewName, qs) { if (missing(partition, ddoc, viewName)) { return Promise.reject(invalidParametersError) } @@ -1072,7 +850,7 @@ module.exports = exports = function dbScope (cfg) { }) } - function partitionedViewAsStream (partition, ddoc, viewName, qs) { + async function partitionedViewAsStream(partition, ddoc, viewName, qs) { return relax({ db: dbName, path: '_partition/' + encodeURIComponent(partition) + '/_design/' + ddoc + '/_view/' + viewName, @@ -1083,19 +861,19 @@ module.exports = exports = function dbScope (cfg) { // db level exports docScope = { - info: function () { + info: async function () { return getDb(dbName) }, - replicate: function (target, opts) { + replicate: async function (target, opts) { return replicateDb(dbName, target, opts) }, - compact: function () { + compact: async function () { return compactDb(dbName) }, - changes: function (qs) { + changes: async function (qs) { return changesDb(dbName, qs) }, - changesAsStream: function (qs) { + changesAsStream: async function (qs) { return changesDbAsStream(dbName, qs) }, changesReader: new ChangesReader(dbName, relax), @@ -1136,13 +914,13 @@ module.exports = exports = function dbScope (cfg) { viewWithListAsStream, server: serverScope, replication: { - enable: function (target, opts) { + enable: async function (target, opts) { return enableReplication(dbName, target, opts) }, - disable: function (id, revision, opts) { + disable: async function (id, revision, opts) { return disableReplication(id, revision, opts) }, - query: function (id, opts) { + query: async function (id, opts) { return queryReplication(id, opts) } }, @@ -1157,7 +935,7 @@ module.exports = exports = function dbScope (cfg) { partitionedViewAsStream } - docScope.view.compact = function (ddoc) { + docScope.view.compact = async function (ddoc) { return compactDb(dbName, ddoc) } @@ -1212,7 +990,7 @@ module.exports = exports = function dbScope (cfg) { * thanks for visiting! come again! */ -function urlResolveFix (couchUrl, dbName) { +function urlResolveFix(couchUrl, dbName) { if (/[^/]$/.test(couchUrl)) { couchUrl += '/' } diff --git a/package-lock.json b/package-lock.json index 7987105..9b81b8d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "nano", - "version": "11.0.3", + "version": "12.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "nano", - "version": "11.0.3", + "version": "12.0.0", "license": "Apache-2.0", "devDependencies": { "@types/node": "^24.4.0", diff --git a/package.json b/package.json index 483d1f8..1d3520b 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,11 @@ { "name": "nano", "description": "The official CouchDB client for Node.js", + "type": "module", "license": "Apache-2.0", "homepage": "http://github.com/apache/couchdb-nano", "repository": "http://github.com/apache/couchdb-nano", - "version": "11.0.3", + "version": "12.0.0", "author": "Apache CouchDB (http://couchdb.apache.org)", "keywords": [ "couchdb", @@ -24,7 +25,7 @@ "typescript": "^5.9.2" }, "scripts": { - "test": "tsc lib/nano.d.ts && node --test ./test/*.test.js" + "test": "tsc lib/nano.d.ts && node --experimental-test-coverage --test ./test/*.test.js" }, "main": "./lib/nano.js", "types": "./lib/nano.d.ts", diff --git a/test/attachment.destroy.test.js b/test/attachment.destroy.test.js index bb5bbf2..6dbc9c0 100644 --- a/test/attachment.destroy.test.js +++ b/test/attachment.destroy.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to destroy an attachment - DELETE /db/id/attname - db.attachment.destroy', async () => { diff --git a/test/attachment.get.test.js b/test/attachment.get.test.js index fcb70c4..c2e1ed8 100644 --- a/test/attachment.get.test.js +++ b/test/attachment.get.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const image = Buffer.from('R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7', 'base64') diff --git a/test/attachment.getAsStream.test.js b/test/attachment.getAsStream.test.js index 35909b8..90a8ea3 100644 --- a/test/attachment.getAsStream.test.js +++ b/test/attachment.getAsStream.test.js @@ -10,32 +10,29 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const image = Buffer.from('R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7', 'base64') -test('should be able to get an attachment as a stream - GET /db/id/attname - db.attachment.getAsStream', () => { +test('should be able to get an attachment as a stream - GET /db/id/attname - db.attachment.getAsStream', async () => { // mocks mockPool .intercept({ path: '/db/id/transparent.gif' }) .reply(200, image, { headers: { 'content-type': 'image/gif' } }) // test GET /db/id/attname - return new Promise((resolve, reject) => { - const db = nano.db.use('db') - let response = Buffer.from('') - db.attachment.getAsStream('id', 'transparent.gif') - .on('data', (data) => { - response = Buffer.concat([response, data]) - }) - .on('end', () => { - assert.equal(response.toString('base64'), image.toString('base64')) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + const db = nano.db.use('db') + let response = Buffer.from('') + const s = await db.attachment.getAsStream('id', 'transparent.gif') + s.on('data', (data) => { + response = Buffer.concat([response, data]) + }) + .on('end', () => { + assert.equal(response.toString('base64'), image.toString('base64')) + mockAgent.assertNoPendingInterceptors() }) }) @@ -46,12 +43,9 @@ test('should emit an error when stream attachment does not exist - GET /db/id/at .intercept({ path: '/db/id/notexist.gif' }) .reply(404, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - const db = nano.db.use('db') - db.attachment.getAsStream('id', 'notexist.gif') - .on('error', (e) => { - assert.equal(e.statusCode, 404) - resolve() - }) + const db = nano.db.use('db') + const s = await db.attachment.getAsStream('id', 'notexist.gif') + s.on('error', (e) => { + assert.equal(e.statusCode, 404) }) }) diff --git a/test/attachment.insert.test.js b/test/attachment.insert.test.js index af9c9ec..4de7aef 100644 --- a/test/attachment.insert.test.js +++ b/test/attachment.insert.test.js @@ -10,12 +10,13 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import fs from 'node:fs' +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' + const nano = Nano(COUCH_URL) -const fs = require('fs') const image = Buffer.from('R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7', 'base64') test('should be able to insert document attachment - PUT /db/docname/attachment - db.attachment.insert', async () => { diff --git a/test/cookie.test.js b/test/cookie.test.js index 32ba062..8a9fd0f 100644 --- a/test/cookie.test.js +++ b/test/cookie.test.js @@ -10,10 +10,9 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') - -const CookieJar = require('../lib/cookie.js') +import test from 'node:test' +import assert from 'node:assert/strict' +import CookieJar from '../lib/cookie.js' test('should parse cookies correctly', () => { const cj = new CookieJar() diff --git a/test/database.changes.test.js b/test/database.changes.test.js index 44506b8..ea74f57 100644 --- a/test/database.changes.test.js +++ b/test/database.changes.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { diff --git a/test/database.changesAsStream.test.js b/test/database.changesAsStream.test.js index 149f3af..e72d8c5 100644 --- a/test/database.changesAsStream.test.js +++ b/test/database.changesAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { results: [ @@ -37,21 +37,18 @@ test('should get a streamed list of changes - GET /_changes - nano.db.changesAsS .intercept({ path: '/db/_changes' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_changes - const db = nano.db.use('db') - const s = db.changesAsStream() - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_changes + const db = nano.db.use('db') + const s = await db.changesAsStream() + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() + }) + }) test('should get a streamed list of changes with opts - GET /_changes - nano.db.changesAsStream', async () => { @@ -61,19 +58,15 @@ test('should get a streamed list of changes with opts - GET /_changes - nano.db. .intercept({ path: '/db/_changes?include_docs=true' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_changes - const db = nano.db.use('db') - const s = db.changesAsStream(opts) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_changes + const db = nano.db.use('db') + const s = await db.changesAsStream(opts) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/database.compact.test.js b/test/database.compact.test.js index 38c7a8f..61663b8 100644 --- a/test/database.compact.test.js +++ b/test/database.compact.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { ok: true } diff --git a/test/database.create.test.js b/test/database.create.test.js index fef1795..d4c5b86 100644 --- a/test/database.create.test.js +++ b/test/database.create.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { ok: true } diff --git a/test/database.destroy.test.js b/test/database.destroy.test.js index 835d057..403c553 100644 --- a/test/database.destroy.test.js +++ b/test/database.destroy.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { ok: true } diff --git a/test/database.get.test.js b/test/database.get.test.js index 290834e..88f031a 100644 --- a/test/database.get.test.js +++ b/test/database.get.test.js @@ -10,11 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') - -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { db_name: 'db', diff --git a/test/database.list.test.js b/test/database.list.test.js index 6c670da..fb54c4b 100644 --- a/test/database.list.test.js +++ b/test/database.list.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = ['rita', 'sue', 'bob'] diff --git a/test/database.listAsStream.test.js b/test/database.listAsStream.test.js index f8525d5..575573d 100644 --- a/test/database.listAsStream.test.js +++ b/test/database.listAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = ['rita', 'sue', 'bob'] @@ -23,18 +23,14 @@ test('should get a streamed list of databases - GET /_all_dbs - nano.db.listAsSt .intercept({ path: '/_all_dbs' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /_all_dbs - const s = nano.db.listAsStream() - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /_all_dbs + const s = await nano.db.listAsStream() + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/database.replicate.test.js b/test/database.replicate.test.js index 14d332b..d287df6 100644 --- a/test/database.replicate.test.js +++ b/test/database.replicate.test.js @@ -10,11 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') - -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { history: [], diff --git a/test/database.replication.disable.test.js b/test/database.replication.disable.test.js index 04e1f9b..ca88df9 100644 --- a/test/database.replication.disable.test.js +++ b/test/database.replication.disable.test.js @@ -10,11 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') - -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { ok: true, id: 'rep1', rev: '2-123' } const errResponse = { diff --git a/test/database.replication.enable.test.js b/test/database.replication.enable.test.js index 38b64bc..f7f6c27 100644 --- a/test/database.replication.enable.test.js +++ b/test/database.replication.enable.test.js @@ -10,11 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') - -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { ok: true, id: 'abc', rev: '1-123' } diff --git a/test/database.replication.query.test.js b/test/database.replication.query.test.js index e15d016..d84150c 100644 --- a/test/database.replication.query.test.js +++ b/test/database.replication.query.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { _id: 'rep1', diff --git a/test/design.atomic.test.js b/test/design.atomic.test.js index a471b7b..f29ffbe 100644 --- a/test/design.atomic.test.js +++ b/test/design.atomic.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to use an update function - PUT /db/_design/ddoc/_update/updatename/docid - db.atomic', async () => { diff --git a/test/design.createIndex.test.js b/test/design.createIndex.test.js index 4802863..ba9dd21 100644 --- a/test/design.createIndex.test.js +++ b/test/design.createIndex.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to create an index - POST /db/_index - db.createIndex', async () => { diff --git a/test/design.find.test.js b/test/design.find.test.js index 0789582..563ea37 100644 --- a/test/design.find.test.js +++ b/test/design.find.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to query an index - POST /db/_find - db.find', async () => { diff --git a/test/design.findAsStream.test.js b/test/design.findAsStream.test.js index 3a317ce..e46dec5 100644 --- a/test/design.findAsStream.test.js +++ b/test/design.findAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to query an index as a stream- POST /db/_find - db.findAsStream', async () => { @@ -43,19 +43,15 @@ test('should be able to query an index as a stream- POST /db/_find - db.findAsSt }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test POST /db/_find - const db = nano.db.use('db') - const s = db.findAsStream(query) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test POST /db/_find + const db = nano.db.use('db') + const s = await db.findAsStream(query) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/design.search.test.js b/test/design.search.test.js index ac02f85..443241d 100644 --- a/test/design.search.test.js +++ b/test/design.search.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a search index - POST /db/_design/ddoc/_search/searchname - db.search', async () => { diff --git a/test/design.searchAsStream.test.js b/test/design.searchAsStream.test.js index 0d1348f..88156af 100644 --- a/test/design.searchAsStream.test.js +++ b/test/design.searchAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a search index as a stream - POST /db/_design/ddoc/_search/searchname - db.searchAsStream', async () => { @@ -34,18 +34,14 @@ test('should be able to access a search index as a stream - POST /db/_design/ddo }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - const db = nano.db.use('db') - const s = db.searchAsStream('ddoc', 'searchname', params) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + const db = nano.db.use('db') + const s = await db.searchAsStream('ddoc', 'searchname', params) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/design.show.test.js b/test/design.show.test.js index 90d6847..2336825 100644 --- a/test/design.show.test.js +++ b/test/design.show.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to use a show function - GET /db/_design/ddoc/_show/showname/docid - db.show', async () => { diff --git a/test/design.view.test.js b/test/design.view.test.js index a36f24a..4dd347c 100644 --- a/test/design.view.test.js +++ b/test/design.view.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a MapReduce view - GET /db/_design/ddoc/_view/viewname - db.view', async () => { diff --git a/test/design.viewAsStream.test.js b/test/design.viewAsStream.test.js index 44ac2aa..2eb0213 100644 --- a/test/design.viewAsStream.test.js +++ b/test/design.viewAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a MapReduce view as a stream - GET /db/_design/ddoc/_view/viewname - db.viewAsStream', async () => { @@ -26,19 +26,15 @@ test('should be able to access a MapReduce view as a stream - GET /db/_design/dd mockPool .intercept({ path: '/db/_design/ddoc/_view/viewname' }) .reply(200, response, JSON_HEADERS) - - await new Promise((resolve, reject) => { - const db = nano.db.use('db') - const s = db.viewAsStream('ddoc', 'viewname') - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + + const db = nano.db.use('db') + const s = await db.viewAsStream('ddoc', 'viewname') + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/design.viewWithList.test.js b/test/design.viewWithList.test.js index 07d48bc..9847ea0 100644 --- a/test/design.viewWithList.test.js +++ b/test/design.viewWithList.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a MapReduce view with a list - GET /db/_design/ddoc/_list/listname/viewname - db.viewWithList', async () => { diff --git a/test/design.viewWithListAsStream.test.js b/test/design.viewWithListAsStream.test.js index 053b739..6a2c9cc 100644 --- a/test/design.viewWithListAsStream.test.js +++ b/test/design.viewWithListAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a MapReduce view with a list as a stream - GET /db/_design/ddoc/_list/listname/viewname - db.viewWithListAsStream', async () => { @@ -23,18 +23,14 @@ test('should be able to access a MapReduce view with a list as a stream - GET /d .intercept({ path: '/db/_design/ddoc/_list/listname/viewname' }) .reply(200, response, { headers: { 'content-type': 'text/csv' } }) - await new Promise((resolve, reject) => { - const db = nano.db.use('db') - const s = db.viewWithListAsStream('ddoc', 'viewname', 'listname') - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, response) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + const db = nano.db.use('db') + const s = await db.viewWithListAsStream('ddoc', 'viewname', 'listname') + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, response) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/document.bulk.test.js b/test/document.bulk.test.js index 237b395..217a693 100644 --- a/test/document.bulk.test.js +++ b/test/document.bulk.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to insert documents in bulk - POST /db/_bulk_docs - db.bulk', async () => { diff --git a/test/document.changesreader.test.js b/test/document.changesreader.test.js index f22a947..d454a8a 100644 --- a/test/document.changesreader.test.js +++ b/test/document.changesreader.test.js @@ -10,10 +10,11 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import fs from 'node:fs' +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const DBNAME = 'db' @@ -206,7 +207,6 @@ test('should keep polling the changes feed (wait: true) - db.changesReader.start }) test('spooling changes - db.changesReader.spool', async () => { - const fs = require('fs') const reply = fs.readFileSync('./test/changes.json') const replyObj = JSON.parse(reply) mockPool @@ -229,7 +229,6 @@ test('spooling changes - db.changesReader.spool', async () => { }) test('spooling changes - numeric seq - db.changesReader.spool', async () => { - const fs = require('fs') const reply = fs.readFileSync('./test/changes_numeric.json') const replyObj = JSON.parse(reply) mockPool diff --git a/test/document.destroy.test.js b/test/document.destroy.test.js index 82f2d3f..d3c7d15 100644 --- a/test/document.destroy.test.js +++ b/test/document.destroy.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to destroy a document - DELETE /db/id - db.destroy', async () => { diff --git a/test/document.fetch.test.js b/test/document.fetch.test.js index 97df61f..d2ddfee 100644 --- a/test/document.fetch.test.js +++ b/test/document.fetch.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano({ url: COUCH_URL }) test('should be able to fetch a list of documents - POST /db/_all_docs - db.fetch', async () => { diff --git a/test/document.fetchRevs.test.js b/test/document.fetchRevs.test.js index 5d5771d..abd8b2e 100644 --- a/test/document.fetchRevs.test.js +++ b/test/document.fetchRevs.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to fetch a list of document revisions - POST /db/_all_docs - db.fetchRevs', async () => { diff --git a/test/document.get.test.js b/test/document.get.test.js index 1babced..6f87aa8 100644 --- a/test/document.get.test.js +++ b/test/document.get.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to get a document - GET /db/id - db.get', async () => { diff --git a/test/document.head.test.js b/test/document.head.test.js index 70941ce..595055c 100644 --- a/test/document.head.test.js +++ b/test/document.head.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to head a document - HEAD /db/id - db.head', async () => { diff --git a/test/document.insert.test.js b/test/document.insert.test.js index 6895a2f..d6a7a61 100644 --- a/test/document.insert.test.js +++ b/test/document.insert.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to insert document - POST /db - db.insert', async () => { diff --git a/test/document.list.test.js b/test/document.list.test.js index 175d460..04bbb2a 100644 --- a/test/document.list.test.js +++ b/test/document.list.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to get a list of documents - GET /db/_all_docs - db.list', async () => { diff --git a/test/document.listAsStream.test.js b/test/document.listAsStream.test.js index e3590f6..ec1a4f7 100644 --- a/test/document.listAsStream.test.js +++ b/test/document.listAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should get a streamed list of documents - GET /db/_all_docs - db.listAsStream', async () => { @@ -49,20 +49,16 @@ test('should get a streamed list of documents - GET /db/_all_docs - db.listAsStr .intercept({ path: '/db/_all_docs' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_all_docs - const db = nano.db.use('db') - const s = db.listAsStream() - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_all_docs + const db = nano.db.use('db') + const s = await db.listAsStream() + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) @@ -91,19 +87,15 @@ test('should get a streamed list of documents with opts- GET /db/_all_docs - db. .intercept({ path: '/db/_all_docs?limit=1&include_docs=true' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_all_docs - const db = nano.db.use('db') - const s = db.listAsStream({ limit: 1, include_docs: true }) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_all_docs + const db = nano.db.use('db') + const s = await db.listAsStream({ limit: 1, include_docs: true }) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/mock.js b/test/mock.js index d793f95..3ef4e2c 100644 --- a/test/mock.js +++ b/test/mock.js @@ -1,14 +1,11 @@ -const COUCH_URL = 'http://127.0.0.1:5984' -const JSON_HEADERS = { headers: { 'content-type': 'application/json' } } -const { MockAgent, setGlobalDispatcher } = require('undici') -const mockAgent = new MockAgent() +import { MockAgent, setGlobalDispatcher } from 'undici' + +export const mockAgent = new MockAgent() mockAgent.disableNetConnect() -const mockPool = mockAgent.get(COUCH_URL) -setGlobalDispatcher(mockAgent) -module.exports = { - COUCH_URL, - JSON_HEADERS, - mockAgent, - mockPool -} + + +export const COUCH_URL = 'http://127.0.0.1:5984' +export const JSON_HEADERS = { headers: { 'content-type': 'application/json' } } +export const mockPool = mockAgent.get(COUCH_URL) +setGlobalDispatcher(mockAgent) diff --git a/test/multipart.get.test.js b/test/multipart.get.test.js index 5b6056a..31f0183 100644 --- a/test/multipart.get.test.js +++ b/test/multipart.get.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const multipartResponse = ''.concat( diff --git a/test/multipart.insert.test.js b/test/multipart.insert.test.js index 38dce82..26777d5 100644 --- a/test/multipart.insert.test.js +++ b/test/multipart.insert.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const image1 = Buffer.from(''.concat( 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAsV', diff --git a/test/multipart.test.js b/test/multipart.test.js index 32d0870..a670603 100644 --- a/test/multipart.test.js +++ b/test/multipart.test.js @@ -1,6 +1,6 @@ -const test = require('node:test') -const assert = require('node:assert/strict') -const MultiPartFactory = require('../lib/multipart.js') +import test from 'node:test' +import assert from 'node:assert/strict' +import MultiPartFactory from '../lib/multipart.js' const textAttachment = { name: 'test.txt', data: 'Hello\r\nWorld!', content_type: 'text/plain' } const anotherTextAttachment = { name: 'test2.txt', data: 'the quick brown fox', content_type: 'text/plain' } diff --git a/test/nano.agent.test.js b/test/nano.agent.test.js index a124b0d..29eb543 100644 --- a/test/nano.agent.test.js +++ b/test/nano.agent.test.js @@ -1,8 +1,8 @@ -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent } = require('./mock.js') -const Nano = require('../lib/nano') -const undici = require('undici') +import undici from 'undici' +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent } from './mock.js' +import Nano from '../lib/nano.js' test('should be able to supply a custom agent parameters', async () => { const agentOptions = { diff --git a/test/nano.auth.test.js b/test/nano.auth.test.js index 2e5cd3b..bea1c57 100644 --- a/test/nano.auth.test.js +++ b/test/nano.auth.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano({ url: COUCH_URL }) test('should be able to authenticate - POST /_session - nano.auth', async () => { @@ -28,10 +28,7 @@ test('should be able to authenticate - POST /_session - nano.auth', async () => .intercept({ method: 'post', path: '/_session', - body: 'name=u&password=p', - headers: { - 'content-type': 'application/x-www-form-urlencoded; charset=utf-8' - } + body: JSON.stringify({ name: username, password }) }) .reply(200, response, { headers: { @@ -70,10 +67,7 @@ test('should be able to handle cookie refresh - POST /_session - nano.auth', asy .intercept({ method: 'post', path: '/_session', - body: 'name=u&password=p', - headers: { - 'content-type': 'application/x-www-form-urlencoded; charset=utf-8' - } + body: JSON.stringify({ name: username, password }) }) .reply(200, response, { headers: { diff --git a/test/nano.basic.test.js b/test/nano.basic.test.js new file mode 100644 index 0000000..1942608 --- /dev/null +++ b/test/nano.basic.test.js @@ -0,0 +1,38 @@ +// Licensed under the Apache License, Version 2.0 (the 'License'); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' +const username = 'username' +const password = 'password' +const nano = Nano(`http://${username}:${password}@127.0.0.1:5984`) + +test('should be able to authenticate with basic auth', async () => { + // mocks + const response = ['replicator'] + + mockPool + .intercept({ + path: '/_all_dbs', + headers: { + Authorization: 'Basic dXNlcm5hbWU6cGFzc3dvcmQ=' + } + }) + .reply(200, response, JSON_HEADERS) + + // test POST /_session + const p = await nano.db.list() + assert.deepEqual(p, response) + mockAgent.assertNoPendingInterceptors() +}) diff --git a/test/nano.config.test.js b/test/nano.config.test.js index 3209dd7..0dbb929 100644 --- a/test/nano.config.test.js +++ b/test/nano.config.test.js @@ -10,9 +10,9 @@ // License for the specific language governing permissions and limitations under // the License. -const Nano = require('..') -const test = require('node:test') -const assert = require('node:assert/strict') +import test from 'node:test' +import assert from 'node:assert/strict' +import Nano from '../lib/nano.js' test('should be able to supply HTTP url - nano.config', () => { const HTTP_URL = 'http://127.0.0.1:5984' diff --git a/test/nano.customheaders.test.js b/test/nano.customheaders.test.js index 38131f8..c307942 100644 --- a/test/nano.customheaders.test.js +++ b/test/nano.customheaders.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const CUSTOM_HEADER = 'thequickbrownfox' const nano = Nano({ url: COUCH_URL, diff --git a/test/nano.info.test.js b/test/nano.info.test.js index 1e97dff..cedfb58 100644 --- a/test/nano.info.test.js +++ b/test/nano.info.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('../lib/nano') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to get info - GET / - nano.info', async () => { diff --git a/test/nano.logger.test.js b/test/nano.logger.test.js index 1db53c7..941f02f 100644 --- a/test/nano.logger.test.js +++ b/test/nano.logger.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' test('should be able to log output with user-defined function', async () => { // setup Nano with custom logger @@ -38,3 +38,54 @@ test('should be able to log output with user-defined function', async () => { assert.equal(logs.length, 2) mockAgent.assertNoPendingInterceptors() }) + +test('should be able to log output with cookie auth', async () => { + // setup Nano with custom logger + const logs = [] + const nano = Nano({ + url: COUCH_URL, + log: (data) => { + logs.push(data) + } + }) + + // mocks + // mocks + const username = 'u' + const password = 'p' + const response = { ok: true, name: 'admin', roles: ['_admin', 'admin'] } + const c = 'AuthSession=YWRtaW46NUU0MTFBMDE6stHsxYnlDy4mYxwZEcnXHn4fm5w' + const cookie = `${c}; Version=1; Expires=Mon, 10-Feb-2050 09:03:21 GMT; Max-Age=600; Path=/; HttpOnly` + + mockPool + .intercept({ + method: 'post', + path: '/_session', + body: JSON.stringify({ name: username, password }) + }) + .reply(200, response, { + headers: { + 'content-type': 'application/json', + 'Set-Cookie': cookie + } + }) + mockPool + .intercept({ + path: '/_all_dbs', + headers: { + cookie: c + } + }) + .reply(200, ['a'], JSON_HEADERS) + + // test POST /_session + const p = await nano.auth(username, password) + assert.deepEqual(p, response) + const q = await nano.db.list() + assert.deepEqual(q, ['a']) + assert.equal(logs.length, 4) + // check set-cookie and cookie are scrubbed + assert.equal(logs[1].headers['set-cookie'], 'XXXXXX') + assert.equal(logs[2].headers['cookie'], 'XXXXXX') + mockAgent.assertNoPendingInterceptors() +}) diff --git a/test/nano.request.test.js b/test/nano.request.test.js index 02954a6..217d24a 100644 --- a/test/nano.request.test.js +++ b/test/nano.request.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano({ url: COUCH_URL }) test('check request can do GET requests - nano.request', async () => { @@ -321,10 +321,7 @@ test('check request handles cookies - nano.request', async () => { .intercept({ method: 'post', path: '/_session', - body: 'name=u&password=p', - headers: { - 'content-type': 'application/x-www-form-urlencoded; charset=utf-8' - } + body: JSON.stringify({ name: username, password }) }) .reply(200, response, { headers: { @@ -337,7 +334,7 @@ test('check request handles cookies - nano.request', async () => { const req = { method: 'post', path: '_session', - form: { + body: { name: username, password } diff --git a/test/nano.session.test.js b/test/nano.session.test.js index 7190834..e207c8b 100644 --- a/test/nano.session.test.js +++ b/test/nano.session.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to check your session - GET /_session - nano.auth', async () => { diff --git a/test/nano.updates.test.js b/test/nano.updates.test.js index 7058fa4..74f2e2f 100644 --- a/test/nano.updates.test.js +++ b/test/nano.updates.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const response = { diff --git a/test/nano.use.test.js b/test/nano.use.test.js index d867bbd..cd9d397 100644 --- a/test/nano.use.test.js +++ b/test/nano.use.test.js @@ -10,9 +10,9 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import Nano from '../lib/nano.js' const nano = Nano('http://myurl.com') test('should be able to use a database - nano.db.use', () => { diff --git a/test/nano.uuids.test.js b/test/nano.uuids.test.js index 26ffb2d..9eb4843 100644 --- a/test/nano.uuids.test.js +++ b/test/nano.uuids.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to fetch uuids - GET /_uuids - nano.uuids', async () => { diff --git a/test/notnocked.test.js b/test/notnocked.test.js index f017d2f..43444aa 100644 --- a/test/notnocked.test.js +++ b/test/notnocked.test.js @@ -10,9 +10,9 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import Nano from '../lib/nano.js' const COUCH_URL = 'http://admin:admin@127.0.0.1:5984' const nano = Nano(COUCH_URL) const dbName = 'notnocked' + new Date().getTime() diff --git a/test/partition.find.test.js b/test/partition.find.test.js index 8604307..9a9a3f1 100644 --- a/test/partition.find.test.js +++ b/test/partition.find.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to query a partitioned index - POST /db/_partition/partition/_find - db.partitionedFind', async () => { diff --git a/test/partition.findAsStream.test.js b/test/partition.findAsStream.test.js index 4df1748..fc45604 100644 --- a/test/partition.findAsStream.test.js +++ b/test/partition.findAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should get a queried streamed list of documents from a partition- POST /db/_partition/partition/_find - db.partitionedFindAsStream', async () => { @@ -43,19 +43,15 @@ test('should get a queried streamed list of documents from a partition- POST /db }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test /db/_partition/partition/_find - const db = nano.db.use('db') - const s = db.partitionedFindAsStream('partition', query) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test /db/_partition/partition/_find + const db = nano.db.use('db') + const s = await db.partitionedFindAsStream('partition', query) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/partition.info.test.js b/test/partition.info.test.js index 5724aae..59160fe 100644 --- a/test/partition.info.test.js +++ b/test/partition.info.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const db = nano.db.use('db') diff --git a/test/partition.list.test.js b/test/partition.list.test.js index 4bb52d3..05a31e8 100644 --- a/test/partition.list.test.js +++ b/test/partition.list.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) const db = nano.db.use('db') diff --git a/test/partition.listAsStream.test.js b/test/partition.listAsStream.test.js index 9313fa1..e5a27f5 100644 --- a/test/partition.listAsStream.test.js +++ b/test/partition.listAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should get a streamed list of documents from a partition- GET /db/_partition/partition/_all_docs - db.partitionedListAsStream', async () => { @@ -49,20 +49,16 @@ test('should get a streamed list of documents from a partition- GET /db/_partiti .intercept({ path: '/db/_partition/partition/_all_docs' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_partition/_all_docs - const db = nano.db.use('db') - const s = db.partitionedListAsStream('partition') - assert(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_partition/_all_docs + const db = nano.db.use('db') + const s = await db.partitionedListAsStream('partition') + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) @@ -91,19 +87,15 @@ test('should get a streamed list of documents from a partition with opts- GET /d .intercept({ path: '/db/_partition/partition/_all_docs?limit=1&include_docs=true' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_partition/_all_docs - const db = nano.db.use('db') - const s = db.partitionedListAsStream('partition', { limit: 1, include_docs: true }) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_partition/_all_docs + const db = nano.db.use('db') + const s = await db.partitionedListAsStream('partition', { limit: 1, include_docs: true }) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/partition.search.test.js b/test/partition.search.test.js index 1de358a..00b2c54 100644 --- a/test/partition.search.test.js +++ b/test/partition.search.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a partitioned search index - GET /db/_partition/partition/_design/ddoc/_search/searchname - db.partitionedSearch', async () => { diff --git a/test/partition.searchAsStream.test.js b/test/partition.searchAsStream.test.js index f53a724..8fe2378 100644 --- a/test/partition.searchAsStream.test.js +++ b/test/partition.searchAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should get a searched streamed list of documents from a partition- GET /db/_partition/partition/_design/ddoc/_search/searchname - db.partitionedSearchAsStream', async () => { @@ -30,19 +30,15 @@ test('should get a searched streamed list of documents from a partition- GET /db .intercept({ path: '/db/_partition/partition/_design/ddoc/_search/searchname?q=*:*' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_partition/partition/_design/ddoc/_search/searchname - const db = nano.db.use('db') - const s = db.partitionedSearchAsStream('partition', 'ddoc', 'searchname', params) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_partition/partition/_design/ddoc/_search/searchname + const db = nano.db.use('db') + const s = await db.partitionedSearchAsStream('partition', 'ddoc', 'searchname', params) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) }) diff --git a/test/partition.view.test.js b/test/partition.view.test.js index a6fd0c0..79bf6d9 100644 --- a/test/partition.view.test.js +++ b/test/partition.view.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should be able to access a partitioned view index - GET /db/_partition/partition/_design/ddoc/_view/viewname - db.partitionedView', async () => { diff --git a/test/partition.viewAsStream.test.js b/test/partition.viewAsStream.test.js index 1b15805..b4dfebc 100644 --- a/test/partition.viewAsStream.test.js +++ b/test/partition.viewAsStream.test.js @@ -10,10 +10,10 @@ // License for the specific language governing permissions and limitations under // the License. -const test = require('node:test') -const assert = require('node:assert/strict') -const { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } = require('./mock.js') -const Nano = require('..') +import test from 'node:test' +import assert from 'node:assert/strict' +import { COUCH_URL, mockAgent, mockPool, JSON_HEADERS } from './mock.js' +import Nano from '../lib/nano.js' const nano = Nano(COUCH_URL) test('should get a streamed list of documents from a view from partition - GET /db/_partition/partition/_design/ddoc/_view/viewname - db.partitionedViewAsStream', async () => { @@ -33,19 +33,15 @@ test('should get a streamed list of documents from a view from partition - GET .intercept({ path: '/db/_partition/partition/_design/ddoc/_view/viewname?reduce=false&startkey=%22a%22&endkey=%22b%22&limit=1' }) .reply(200, response, JSON_HEADERS) - await new Promise((resolve, reject) => { - // test GET /db/_partition/partition/_design/ddoc/_view/viewnameGET /db/_all_docs - const db = nano.db.use('db') - const s = db.partitionedViewAsStream('partition', 'ddoc', 'viewname', params) - assert.equal(typeof s, 'object') - let buffer = '' - s.on('data', (chunk) => { - buffer += chunk.toString() - }) - s.on('end', () => { - assert.equal(buffer, JSON.stringify(response)) - mockAgent.assertNoPendingInterceptors() - resolve() - }) + // test GET /db/_partition/partition/_design/ddoc/_view/viewnameGET /db/_all_docs + const db = nano.db.use('db') + const s = await db.partitionedViewAsStream('partition', 'ddoc', 'viewname', params) + let buffer = '' + s.on('data', (chunk) => { + buffer += chunk.toString() + }) + s.on('end', () => { + assert.equal(buffer, JSON.stringify(response)) + mockAgent.assertNoPendingInterceptors() }) })