diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d401a774..589a3341 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,11 +1,30 @@ version: 2 updates: - package-ecosystem: npm - directory: "/" + directories: + - "/" schedule: interval: daily time: "10:00" open-pull-requests-limit: 20 commit-message: prefix: "deps" - prefix-development: "deps(dev)" + prefix-development: "chore" + groups: + interplanetary-deps: # Helia/libp2p deps + patterns: + - "*helia*" + - "*libp2p*" + - "*multiformats*" + - "*blockstore*" + - "*datastore*" + kubo-deps: # kubo deps + patterns: + - "*kubo*" + - "ipfsd-ctl" +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: chore diff --git a/.github/workflows/generated-pr.yml b/.github/workflows/generated-pr.yml new file mode 100644 index 00000000..b8c5cc63 --- /dev/null +++ b/.github/workflows/generated-pr.yml @@ -0,0 +1,14 @@ +name: Close Generated PRs + +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +permissions: + issues: write + pull-requests: write + +jobs: + stale: + uses: ipdxco/unified-github-workflows/.github/workflows/reusable-generated-pr.yml@v1 diff --git a/.github/workflows/semantic-pull-request.yml b/.github/workflows/semantic-pull-request.yml index bd00f090..8c0b9eaf 100644 --- a/.github/workflows/semantic-pull-request.yml +++ b/.github/workflows/semantic-pull-request.yml @@ -9,4 +9,4 @@ on: jobs: main: - uses: pl-strflt/.github/.github/workflows/reusable-semantic-pull-request.yml@v0.3 + uses: ipdxco/unified-github-workflows/.github/workflows/reusable-semantic-pull-request.yml@v1 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 16d65d72..7c955c41 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,8 +1,9 @@ -name: Close and mark stale issue +name: Close Stale Issues on: schedule: - cron: '0 0 * * *' + workflow_dispatch: permissions: issues: write @@ -10,4 +11,4 @@ permissions: jobs: stale: - uses: pl-strflt/.github/.github/workflows/reusable-stale-issue.yml@v0.3 + uses: ipdxco/unified-github-workflows/.github/workflows/reusable-stale-issue.yml@v1 diff --git a/.gitignore b/.gitignore index 7ad9e674..f115d26b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ node_modules package-lock.json yarn.lock .vscode +.tmp-compiled-docs +tsconfig-doc-check.aegir.json diff --git a/README.md b/README.md index 17c2080a..9ff2f886 100644 --- a/README.md +++ b/README.md @@ -7,14 +7,16 @@ > JS implementation of the IPFS UnixFS +The UnixFS spec can be found at [ipfs/specs/UNIXFS.md](https://github.com/ipfs/specs/blob/master/UNIXFS.md) + # Packages +- [`benchmarks/import`](https://github.com/ipfs/js-ipfs-unixfs/tree/main/benchmarks/import) Import benchmarks for ipfs-unixfs-importer +- [`benchmarks/memory`](https://github.com/ipfs/js-ipfs-unixfs/tree/main/benchmarks/memory) Memory benchmarks for ipfs-unixfs-importer - [`packages/ipfs-unixfs`](https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs) JavaScript implementation of IPFS' unixfs (a Unix FileSystem representation on top of a MerkleDAG) - [`packages/ipfs-unixfs-exporter`](https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs-exporter) JavaScript implementation of the UnixFs exporter used by IPFS - [`packages/ipfs-unixfs-importer`](https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs-importer) JavaScript implementation of the UnixFs importer used by IPFS -The UnixFS spec can be found at [ipfs/specs/UNIXFS.md](https://github.com/ipfs/specs/blob/master/UNIXFS.md) - # API Docs - diff --git a/benchmarks/import/package.json b/benchmarks/import/package.json index fa9f06ab..2c380ca6 100644 --- a/benchmarks/import/package.json +++ b/benchmarks/import/package.json @@ -1,29 +1,9 @@ { - "name": "ipfs-unixfs-memory-benchmark", + "name": "ipfs-unixfs-import-benchmark", "version": "0.0.0", - "description": "Memory benchmarks for ipfs-unixfs-importer", + "description": "Import benchmarks for ipfs-unixfs-importer", "license": "Apache-2.0 OR MIT", - "private": true, "type": "module", - "types": "./dist/src/index.d.ts", - "files": [ - "src", - "dist", - "!dist/test", - "!**/*.tsbuildinfo" - ], - "exports": { - ".": { - "types": "./dist/src/index.d.ts", - "import": "./dist/src/index.js" - } - }, - "eslintConfig": { - "extends": "ipfs", - "parserOptions": { - "sourceType": "module" - } - }, "scripts": { "build": "aegir build --bundle false", "clean": "aegir clean", @@ -32,11 +12,11 @@ "start": "npm run build && node --expose-gc ./dist/test/index.spec.js" }, "devDependencies": { - "aegir": "^42.2.2", - "blockstore-core": "^4.0.1", - "blockstore-fs": "^1.0.0", - "ipfs-unixfs-importer": "../../packages/ipfs-unixfs-importer", - "it-buffer-stream": "^3.0.1", - "it-drain": "^3.0.5" - } + "aegir": "^47.0.16", + "blockstore-core": "^5.0.4", + "ipfs-unixfs-importer": "^15.0.0", + "it-buffer-stream": "^3.0.11", + "it-drain": "^3.0.10" + }, + "private": true } diff --git a/benchmarks/import/test/index.spec.ts b/benchmarks/import/test/index.spec.ts index dbd65ced..cb7a2df1 100644 --- a/benchmarks/import/test/index.spec.ts +++ b/benchmarks/import/test/index.spec.ts @@ -1,9 +1,10 @@ /* eslint-env mocha */ -import { importer, ImporterOptions } from 'ipfs-unixfs-importer' -import bufferStream from 'it-buffer-stream' import { MemoryBlockstore } from 'blockstore-core' +import { importer } from 'ipfs-unixfs-importer' +import bufferStream from 'it-buffer-stream' import drain from 'it-drain' +import type { ImporterOptions } from 'ipfs-unixfs-importer' const REPEATS = 10 const FILE_SIZE = Math.pow(2, 20) * 500 // 500MB diff --git a/benchmarks/import/tsconfig.json b/benchmarks/import/tsconfig.json index 13a35996..304093f2 100644 --- a/benchmarks/import/tsconfig.json +++ b/benchmarks/import/tsconfig.json @@ -6,5 +6,10 @@ "include": [ "src", "test" + ], + "references": [ + { + "path": "../../packages/ipfs-unixfs-importer" + } ] } diff --git a/benchmarks/memory/package.json b/benchmarks/memory/package.json index ae510405..70f46164 100644 --- a/benchmarks/memory/package.json +++ b/benchmarks/memory/package.json @@ -3,27 +3,7 @@ "version": "0.0.0", "description": "Memory benchmarks for ipfs-unixfs-importer", "license": "Apache-2.0 OR MIT", - "private": true, "type": "module", - "types": "./dist/src/index.d.ts", - "files": [ - "src", - "dist", - "!dist/test", - "!**/*.tsbuildinfo" - ], - "exports": { - ".": { - "types": "./dist/src/index.d.ts", - "import": "./dist/src/index.js" - } - }, - "eslintConfig": { - "extends": "ipfs", - "parserOptions": { - "sourceType": "module" - } - }, "scripts": { "build": "aegir build --bundle false", "clean": "aegir clean", @@ -32,9 +12,10 @@ "start": "npm run build && node --expose-gc ./dist/test/index.spec.js" }, "devDependencies": { - "aegir": "^42.2.2", - "blockstore-fs": "^1.0.0", - "ipfs-unixfs-importer": "../../packages/ipfs-unixfs-importer", - "it-drain": "^3.0.5" - } + "aegir": "^47.0.16", + "blockstore-fs": "^2.0.4", + "ipfs-unixfs-importer": "^15.0.0", + "it-drain": "^3.0.10" + }, + "private": true } diff --git a/benchmarks/memory/test/index.spec.ts b/benchmarks/memory/test/index.spec.ts index 6a02873a..a42603da 100644 --- a/benchmarks/memory/test/index.spec.ts +++ b/benchmarks/memory/test/index.spec.ts @@ -1,11 +1,11 @@ /* eslint-disable no-console */ -import { importer } from 'ipfs-unixfs-importer' -import path from 'node:path' -import os from 'node:os' import fs from 'node:fs' -import drain from 'it-drain' +import os from 'node:os' +import path from 'node:path' import { FsBlockstore } from 'blockstore-fs' +import { importer } from 'ipfs-unixfs-importer' +import drain from 'it-drain' const ONE_MEG = 1024 * 1024 @@ -43,6 +43,6 @@ async function main (): Promise { } main().catch(err => { - console.error(err) // eslint-disable-line no-console + console.error(err) process.exit(1) }) diff --git a/benchmarks/memory/tsconfig.json b/benchmarks/memory/tsconfig.json index 13a35996..304093f2 100644 --- a/benchmarks/memory/tsconfig.json +++ b/benchmarks/memory/tsconfig.json @@ -6,5 +6,10 @@ "include": [ "src", "test" + ], + "references": [ + { + "path": "../../packages/ipfs-unixfs-importer" + } ] } diff --git a/package.json b/package.json index e8139b20..ab2f7c5d 100644 --- a/package.json +++ b/package.json @@ -27,16 +27,18 @@ "build": "aegir run build", "lint": "aegir run lint", "dep-check": "aegir run dep-check", + "doc-check": "aegir run doc-check", "release": "run-s build docs:no-publish npm:release docs", "npm:release": "aegir run release --concurrency 1", "docs": "aegir docs", "docs:no-publish": "aegir docs --publish false" }, "devDependencies": { - "aegir": "^44.1.1", + "aegir": "^47.0.16", "npm-run-all": "^4.1.5" }, "workspaces": [ + "benchmarks/*", "packages/*" ] } diff --git a/packages/ipfs-unixfs-exporter/CHANGELOG.md b/packages/ipfs-unixfs-exporter/CHANGELOG.md index db0ca47b..463832da 100644 --- a/packages/ipfs-unixfs-exporter/CHANGELOG.md +++ b/packages/ipfs-unixfs-exporter/CHANGELOG.md @@ -1,3 +1,84 @@ +## [ipfs-unixfs-exporter-v13.7.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.7.2...ipfs-unixfs-exporter-13.7.3) (2025-08-12) + +### Bug Fixes + +* export basic file from dir or shard ([#440](https://github.com/ipfs/js-ipfs-unixfs/issues/440)) ([b8d33de](https://github.com/ipfs/js-ipfs-unixfs/commit/b8d33deb0dfc76cc53eb82e31a67748a8da24eae)) + +## [ipfs-unixfs-exporter-v13.7.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.7.1...ipfs-unixfs-exporter-13.7.2) (2025-07-31) + +### Bug Fixes + +* add extended to default exporter options ([#439](https://github.com/ipfs/js-ipfs-unixfs/issues/439)) ([278aea4](https://github.com/ipfs/js-ipfs-unixfs/commit/278aea4c2ed76a8d890a0d2d3a079b03a9c00334)) + +## [ipfs-unixfs-exporter-v13.7.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.7.0...ipfs-unixfs-exporter-13.7.1) (2025-07-31) + +### Bug Fixes + +* add option to export non-extended unixfs ([#438](https://github.com/ipfs/js-ipfs-unixfs/issues/438)) ([c9a9bf4](https://github.com/ipfs/js-ipfs-unixfs/commit/c9a9bf45a5c8a779ed73cc2238a58c01e090edb7)) + +## [ipfs-unixfs-exporter-v13.7.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.6...ipfs-unixfs-exporter-13.7.0) (2025-07-30) + +### Features + +* add 'extended' option to exporter ([#437](https://github.com/ipfs/js-ipfs-unixfs/issues/437)) ([332a794](https://github.com/ipfs/js-ipfs-unixfs/commit/332a794227f7792e1ddee1b1e47d01fd510d6cf4)) + +## [ipfs-unixfs-exporter-v13.6.6](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.5...ipfs-unixfs-exporter-13.6.6) (2025-06-18) + +### Bug Fixes + +* constrain the unixfs type ([#435](https://github.com/ipfs/js-ipfs-unixfs/issues/435)) ([7663b87](https://github.com/ipfs/js-ipfs-unixfs/commit/7663b87ed2e3e8cd4da1484ca601638740ea0ae7)) + +## [ipfs-unixfs-exporter-v13.6.5](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.4...ipfs-unixfs-exporter-13.6.5) (2025-06-18) + +### Documentation + +* convert examples to ts, run doc verifier ([#434](https://github.com/ipfs/js-ipfs-unixfs/issues/434)) ([95e0b47](https://github.com/ipfs/js-ipfs-unixfs/commit/95e0b47de62c57b29bd10d48503cef4f208caae1)) + +## [ipfs-unixfs-exporter-v13.6.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.3...ipfs-unixfs-exporter-13.6.4) (2025-06-18) + +### Dependencies + +* bump aegir from 45.2.1 to 47.0.16 ([#431](https://github.com/ipfs/js-ipfs-unixfs/issues/431)) ([1fb2db3](https://github.com/ipfs/js-ipfs-unixfs/commit/1fb2db37f33674eb7a0e00aa88d5312a7644536d)) + +## [ipfs-unixfs-exporter-v13.6.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.2...ipfs-unixfs-exporter-13.6.3) (2025-06-16) + +### Dependencies + +* **dev:** bump sinon from 19.0.5 to 21.0.0 ([#432](https://github.com/ipfs/js-ipfs-unixfs/issues/432)) ([14d7e1c](https://github.com/ipfs/js-ipfs-unixfs/commit/14d7e1cacfa61fad6df86c0875ad52ce6b203457)) + +## [ipfs-unixfs-exporter-v13.6.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.1...ipfs-unixfs-exporter-13.6.2) (2025-03-06) + +### Dependencies + +* **dev:** bump aegir from 44.1.4 to 45.0.8 ([#420](https://github.com/ipfs/js-ipfs-unixfs/issues/420)) ([6eb1064](https://github.com/ipfs/js-ipfs-unixfs/commit/6eb1064ceaf3bbbdadc639e9641f3d9fad8ab23b)) + +## [ipfs-unixfs-exporter-v13.6.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.6.0...ipfs-unixfs-exporter-13.6.1) (2024-09-13) + +### Dependencies + +* **dev:** bump blockstore-core from 4.4.1 to 5.0.0 ([#411](https://github.com/ipfs/js-ipfs-unixfs/issues/411)) ([6663007](https://github.com/ipfs/js-ipfs-unixfs/commit/6663007b46fb18e31157bce7bba4bfcf7b0ef5b5)) +* **dev:** bump sinon from 17.0.2 to 19.0.2 ([#415](https://github.com/ipfs/js-ipfs-unixfs/issues/415)) ([f0a0b37](https://github.com/ipfs/js-ipfs-unixfs/commit/f0a0b37a57754ec7c36913e8ab06072aca7df41d)) + +## [ipfs-unixfs-exporter-v13.6.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.5.1...ipfs-unixfs-exporter-13.6.0) (2024-09-13) + +### Features + +* add name property to errors ([#414](https://github.com/ipfs/js-ipfs-unixfs/issues/414)) ([70145c7](https://github.com/ipfs/js-ipfs-unixfs/commit/70145c7c1e78ede3c481100151a9491c639671be)) + +## [ipfs-unixfs-exporter-v13.5.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.5.0...ipfs-unixfs-exporter-13.5.1) (2024-09-13) + +### Bug Fixes + +* add sideEffects false to package.json to enable tree shaking ([#402](https://github.com/ipfs/js-ipfs-unixfs/issues/402)) ([aea58c4](https://github.com/ipfs/js-ipfs-unixfs/commit/aea58c40a4a2457ddf44454befa1eb25d4caa016)) + +### Trivial Changes + +* rename master to main ([0cdfcd6](https://github.com/ipfs/js-ipfs-unixfs/commit/0cdfcd674513b21aab7e27b446a6f2181c9ba842)) + +### Dependencies + +* **dev:** bump aegir from 42.2.11 to 44.1.1 ([#412](https://github.com/ipfs/js-ipfs-unixfs/issues/412)) ([f94d1ad](https://github.com/ipfs/js-ipfs-unixfs/commit/f94d1ad0a507a0b37dd601490bba22224192f5a3)) + ## ipfs-unixfs-exporter [13.5.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-exporter-13.4.0...ipfs-unixfs-exporter-13.5.0) (2024-02-02) diff --git a/packages/ipfs-unixfs-exporter/CODE_OF_CONDUCT.md b/packages/ipfs-unixfs-exporter/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..6b0fa54c --- /dev/null +++ b/packages/ipfs-unixfs-exporter/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/ipfs-unixfs-exporter/LICENSE b/packages/ipfs-unixfs-exporter/LICENSE deleted file mode 100644 index 20ce483c..00000000 --- a/packages/ipfs-unixfs-exporter/LICENSE +++ /dev/null @@ -1,4 +0,0 @@ -This project is dual licensed under MIT and Apache-2.0. - -MIT: https://www.opensource.org/licenses/mit -Apache-2.0: https://www.apache.org/licenses/license-2.0 diff --git a/packages/ipfs-unixfs-exporter/LICENSE-APACHE b/packages/ipfs-unixfs-exporter/LICENSE-APACHE index 14478a3b..b09cd785 100644 --- a/packages/ipfs-unixfs-exporter/LICENSE-APACHE +++ b/packages/ipfs-unixfs-exporter/LICENSE-APACHE @@ -1,5 +1,201 @@ -Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ -http://www.apache.org/licenses/LICENSE-2.0 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/ipfs-unixfs-exporter/README.md b/packages/ipfs-unixfs-exporter/README.md index 899c2d81..19949f4a 100644 --- a/packages/ipfs-unixfs-exporter/README.md +++ b/packages/ipfs-unixfs-exporter/README.md @@ -28,7 +28,7 @@ The UnixFS Exporter provides a means to read DAGs from a blockstore given a CID. ## Example -```js +```TypeScript // import a file and export it again import { importer } from 'ipfs-unixfs-importer' import { exporter } from 'ipfs-unixfs-exporter' @@ -49,6 +49,10 @@ console.info(files[0].cid) // Qmbaz const entry = await exporter(files[0].cid, blockstore) +if (entry.type !== 'file') { + throw new Error('Unexpected entry type') +} + console.info(entry.cid) // Qmqux console.info(entry.path) // Qmbaz/foo/bar.txt console.info(entry.name) // bar.txt @@ -56,12 +60,12 @@ console.info(entry.unixfs.fileSize()) // 4 // stream content from unixfs node const size = entry.unixfs.fileSize() -const bytes = new Uint8Array(size) +const bytes = new Uint8Array(Number(size)) let offset = 0 for await (const buf of entry.content()) { bytes.set(buf, offset) - offset += chunk.length + offset += buf.byteLength } console.info(bytes) // 0, 1, 2, 3 diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 0f9c27f4..1f3a4177 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs-exporter", - "version": "13.5.0", + "version": "13.7.3", "description": "JavaScript implementation of the UnixFs exporter used by IPFS", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs-exporter#readme", @@ -32,13 +32,6 @@ "import": "./dist/src/index.js" } }, - "eslintConfig": { - "extends": "ipfs", - "parserOptions": { - "project": true, - "sourceType": "module" - } - }, "release": { "branches": [ "main" @@ -121,7 +114,15 @@ "@semantic-release/changelog", "@semantic-release/npm", "@semantic-release/github", - "@semantic-release/git" + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] ] }, "scripts": { @@ -133,43 +134,43 @@ "clean": "aegir clean", "lint": "aegir lint", "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", "release": "aegir release" }, "dependencies": { - "@ipld/dag-cbor": "^9.2.1", - "@ipld/dag-json": "^10.2.2", - "@ipld/dag-pb": "^4.1.2", + "@ipld/dag-cbor": "^9.2.4", + "@ipld/dag-json": "^10.2.5", + "@ipld/dag-pb": "^4.1.5", "@multiformats/murmur3": "^2.1.8", - "err-code": "^3.0.1", "hamt-sharding": "^3.0.6", - "interface-blockstore": "^5.3.0", + "interface-blockstore": "^5.3.2", "ipfs-unixfs": "^11.0.0", - "it-filter": "^3.1.1", - "it-last": "^3.0.6", - "it-map": "^3.1.1", - "it-parallel": "^3.0.8", + "it-filter": "^3.1.4", + "it-last": "^3.0.9", + "it-map": "^3.1.4", + "it-parallel": "^3.0.13", "it-pipe": "^3.0.1", "it-pushable": "^3.2.3", - "multiformats": "^13.2.3", - "p-queue": "^8.0.1", + "multiformats": "^13.3.7", + "p-queue": "^8.1.0", "progress-events": "^1.0.1" }, "devDependencies": { - "@types/readable-stream": "^4.0.15", - "@types/sinon": "^17.0.3", - "aegir": "^44.1.1", - "blockstore-core": "^4.4.1", + "@types/readable-stream": "^4.0.21", + "@types/sinon": "^17.0.4", + "aegir": "^47.0.16", + "blockstore-core": "^5.0.4", "delay": "^6.0.0", "ipfs-unixfs-importer": "^15.0.0", "iso-random-stream": "^2.0.2", - "it-all": "^3.0.6", - "it-buffer-stream": "^3.0.8", - "it-drain": "^3.0.7", - "it-first": "^3.0.6", - "it-to-buffer": "^4.0.7", + "it-all": "^3.0.9", + "it-buffer-stream": "^3.0.11", + "it-drain": "^3.0.10", + "it-first": "^3.0.9", + "it-to-buffer": "^4.0.10", "merge-options": "^3.0.4", - "readable-stream": "^4.5.2", - "sinon": "^17.0.1", + "readable-stream": "^4.7.0", + "sinon": "^21.0.0", "uint8arrays": "^5.1.0", "wherearewe": "^2.0.1" }, diff --git a/packages/ipfs-unixfs-exporter/src/errors.ts b/packages/ipfs-unixfs-exporter/src/errors.ts new file mode 100644 index 00000000..33ff7ee2 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/src/errors.ts @@ -0,0 +1,87 @@ +export class BadPathError extends Error { + static name = 'BadPathError' + static code = 'ERR_BAD_PATH' + name = BadPathError.name + code = BadPathError.code + + constructor (message = 'Bad path') { + super(message) + } +} + +export class NotFoundError extends Error { + static name = 'NotFoundError' + static code = 'ERR_NOT_FOUND' + name = NotFoundError.name + code = NotFoundError.code + + constructor (message = 'Not found') { + super(message) + } +} + +export class NoResolverError extends Error { + static name = 'NoResolverError' + static code = 'ERR_NO_RESOLVER' + name = NoResolverError.name + code = NoResolverError.code + + constructor (message = 'No resolver') { + super(message) + } +} + +export class NotUnixFSError extends Error { + static name = 'NotUnixFSError' + static code = 'ERR_NOT_UNIXFS' + name = NotUnixFSError.name + code = NotUnixFSError.code + + constructor (message = 'Not UnixFS') { + super(message) + } +} + +export class OverReadError extends Error { + static name = 'OverReadError' + static code = 'ERR_OVER_READ' + name = OverReadError.name + code = OverReadError.code + + constructor (message = 'Over read') { + super(message) + } +} + +export class UnderReadError extends Error { + static name = 'UnderReadError' + static code = 'ERR_UNDER_READ' + name = UnderReadError.name + code = UnderReadError.code + + constructor (message = 'Under read') { + super(message) + } +} + +export class NoPropError extends Error { + static name = 'NoPropError' + static code = 'ERR_NO_PROP' + name = NoPropError.name + code = NoPropError.code + + constructor (message = 'No Property found') { + super(message) + } +} + +export class InvalidParametersError extends Error { + static name = 'InvalidParametersError' + static code = 'ERR_INVALID_PARAMS' + name = InvalidParametersError.name + code = InvalidParametersError.code + + constructor (message = 'Invalid parameters') { + super(message) + } +} diff --git a/packages/ipfs-unixfs-exporter/src/index.ts b/packages/ipfs-unixfs-exporter/src/index.ts index b1adc319..35e92277 100644 --- a/packages/ipfs-unixfs-exporter/src/index.ts +++ b/packages/ipfs-unixfs-exporter/src/index.ts @@ -5,7 +5,7 @@ * * @example * - * ```js + * ```TypeScript * // import a file and export it again * import { importer } from 'ipfs-unixfs-importer' * import { exporter } from 'ipfs-unixfs-exporter' @@ -26,6 +26,10 @@ * * const entry = await exporter(files[0].cid, blockstore) * + * if (entry.type !== 'file') { + * throw new Error('Unexpected entry type') + * } + * * console.info(entry.cid) // Qmqux * console.info(entry.path) // Qmbaz/foo/bar.txt * console.info(entry.name) // bar.txt @@ -33,21 +37,21 @@ * * // stream content from unixfs node * const size = entry.unixfs.fileSize() - * const bytes = new Uint8Array(size) + * const bytes = new Uint8Array(Number(size)) * let offset = 0 * * for await (const buf of entry.content()) { * bytes.set(buf, offset) - * offset += chunk.length + * offset += buf.byteLength * } * * console.info(bytes) // 0, 1, 2, 3 * ``` */ -import errCode from 'err-code' import last from 'it-last' import { CID } from 'multiformats/cid' +import { BadPathError, NotFoundError } from './errors.js' import resolve from './resolvers/index.js' import type { PBNode } from '@ipld/dag-pb' import type { Bucket } from 'hamt-sharding' @@ -55,6 +59,8 @@ import type { Blockstore } from 'interface-blockstore' import type { UnixFS } from 'ipfs-unixfs' import type { ProgressOptions, ProgressEvent } from 'progress-events' +export * from './errors.js' + export interface ExportProgress { /** * How many bytes of the file have been read @@ -128,6 +134,34 @@ export interface ExporterOptions extends ProgressOptions * (default: undefined) */ blockReadConcurrency?: number + + /** + * When directory contents are listed, by default the root node of each entry + * is fetched to decode the UnixFS metadata and know if the entry is a file or + * a directory. This can result in fetching extra data which may not be + * desirable, depending on your application. + * + * Pass false here to only return the CID and the name of the entry and not + * any extended metadata. + * + * @default true + */ + extended?: boolean +} + +export interface BasicExporterOptions extends ExporterOptions { + /** + * When directory contents are listed, by default the root node of each entry + * is fetched to decode the UnixFS metadata and know if the entry is a file or + * a directory. This can result in fetching extra data which may not be + * desirable, depending on your application. + * + * Pass false here to only return the CID and the name of the entry and not + * any extended metadata. + * + * @default true + */ + extended: false } export interface Exportable { @@ -178,7 +212,7 @@ export interface Exportable { * * When `entry` is a file or a `raw` node, `offset` and/or `length` arguments can be passed to `entry.content()` to return slices of data: * - * ```javascript + * ```TypeScript * const length = 5 * const data = new Uint8Array(length) * let offset = 0 @@ -199,7 +233,7 @@ export interface Exportable { * * If `entry` is a directory, passing `offset` and/or `length` to `entry.content()` will limit the number of files returned from the directory. * - * ```javascript + * ```TypeScript * const entries = [] * * for await (const entry of dir.content({ @@ -212,13 +246,13 @@ export interface Exportable { * // `entries` contains the first 5 files/directories in the directory * ``` */ - content(options?: ExporterOptions): AsyncGenerator + content(options?: ExporterOptions | BasicExporterOptions): AsyncGenerator } /** * If the entry is a file, `entry.content()` returns an async iterator that yields one or more Uint8Arrays containing the file content: * - * ```javascript + * ```TypeScript * if (entry.type === 'file') { * for await (const chunk of entry.content()) { * // chunk is a Buffer @@ -235,7 +269,7 @@ export interface UnixFSFile extends Exportable { /** * If the entry is a directory, `entry.content()` returns further `entry` objects: * - * ```javascript + * ```TypeScript * if (entry.type === 'directory') { * for await (const entry of dir.content()) { * console.info(entry.name) @@ -262,7 +296,7 @@ export interface ObjectNode extends Exportable { * * `entry.content()` returns an async iterator that yields a buffer containing the node content: * - * ```javascript + * ```TypeScript * for await (const chunk of entry.content()) { * // chunk is a Buffer * } @@ -292,6 +326,23 @@ export interface IdentityNode extends Exportable { */ export type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode +export interface UnixFSBasicEntry { + /** + * The name of the entry + */ + name: string + + /** + * The path of the entry within the DAG in which it was encountered + */ + path: string + + /** + * The CID of the entry + */ + cid: CID +} + export interface NextResult { cid: CID name: string @@ -305,12 +356,20 @@ export interface ResolveResult { } export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise } -export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions): Promise } +export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions | BasicExporterOptions): Promise } export type UnixfsV1FileContent = AsyncIterable | Iterable export type UnixfsV1DirectoryContent = AsyncIterable | Iterable export type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent -export interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage): (options: ExporterOptions) => UnixfsV1Content } + +export interface UnixFsV1ContentResolver { + (options: ExporterOptions): UnixfsV1Content + (options: BasicExporterOptions): UnixFSBasicEntry +} + +export interface UnixfsV1Resolver { + (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage): (options: ExporterOptions) => UnixfsV1Content +} export interface ShardTraversalContext { hamtDepth: number @@ -361,7 +420,7 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str } } - throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH') + throw new BadPathError(`Unknown path type ${path}`) } /** @@ -369,7 +428,7 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str * * @example * - * ```javascript + * ```TypeScript * import { walkPath } from 'ipfs-unixfs-exporter' * * const entries = [] @@ -381,6 +440,8 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str * // entries contains 4x `entry` objects * ``` */ +export function walkPath (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): AsyncGenerator +export function walkPath (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): AsyncGenerator export async function * walkPath (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { let { cid, @@ -394,7 +455,7 @@ export async function * walkPath (path: string | CID, blockstore: ReadableStorag const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options) if (result.entry == null && result.next == null) { - throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') + throw new NotFoundError(`Could not resolve ${path}`) } if (result.entry != null) { @@ -437,11 +498,13 @@ export async function * walkPath (path: string | CID, blockstore: ReadableStorag * } * ``` */ +export async function exporter (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): Promise +export async function exporter (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): Promise export async function exporter (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): Promise { const result = await last(walkPath(path, blockstore, options)) if (result == null) { - throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') + throw new NotFoundError(`Could not resolve ${path}`) } return result @@ -465,6 +528,8 @@ export async function exporter (path: string | CID, blockstore: ReadableStorage, * // entries contains all children of the `Qmfoo/foo/bar` directory and it's children * ``` */ +export function recursive (path: string | CID, blockstore: ReadableStorage, options?: ExporterOptions): AsyncGenerator +export function recursive (path: string | CID, blockstore: ReadableStorage, options: BasicExporterOptions): AsyncGenerator export async function * recursive (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { const node = await exporter(path, blockstore, options) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts index 861e6dc7..0ce21223 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts @@ -1,6 +1,6 @@ -import errCode from 'err-code' import * as mh from 'multiformats/hashes/digest' import { CustomProgressEvent } from 'progress-events' +import { NotFoundError } from '../errors.js' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' import type { ExporterOptions, Resolver, ExportProgress } from '../index.js' @@ -28,7 +28,7 @@ const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGene const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { if (toResolve.length > 0) { - throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') + throw new NotFoundError(`No link named ${path} found in raw node ${cid}`) } const buf = mh.decode(cid.multihash.bytes) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/index.ts b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts index c314fa67..099b8444 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/index.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts @@ -1,10 +1,10 @@ import * as dagCbor from '@ipld/dag-cbor' import * as dagJson from '@ipld/dag-json' import * as dagPb from '@ipld/dag-pb' -import errCode from 'err-code' import * as json from 'multiformats/codecs/json' import * as raw from 'multiformats/codecs/raw' import { identity } from 'multiformats/hashes/identity' +import { NoResolverError } from '../errors.js' import dagCborResolver from './dag-cbor.js' import dagJsonResolver from './dag-json.js' import identifyResolver from './identity.js' @@ -26,7 +26,7 @@ const resolve: Resolve = async (cid, name, path, toResolve, depth, blockstore, o const resolver = resolvers[cid.code] if (resolver == null) { - throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER') + throw new NoResolverError(`No resolver for code ${cid.code}`) } return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts index 7b5d2429..4082e388 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts @@ -1,5 +1,5 @@ -import errCode from 'err-code' import { CustomProgressEvent } from 'progress-events' +import { NotFoundError } from '../errors.js' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' import type { ExporterOptions, Resolver, ExportProgress } from '../index.js' @@ -27,7 +27,7 @@ const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGene const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { if (toResolve.length > 0) { - throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') + throw new NotFoundError(`No link named ${path} found in raw node ${cid}`) } const block = await blockstore.get(cid, options) diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts index afab2634..73dc5c06 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts @@ -3,10 +3,11 @@ import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' import { CustomProgressEvent } from 'progress-events' -import type { ExporterOptions, ExportWalk, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' +import { isBasicExporterOptions } from '../../../utils/is-basic-exporter-options.ts' +import type { BasicExporterOptions, ExporterOptions, ExportWalk, UnixFSBasicEntry, UnixfsV1Resolver } from '../../../index.js' const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { - async function * yieldDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { + async function * yieldDirectoryContent (options: ExporterOptions | BasicExporterOptions = {}): any { const offset = options.offset ?? 0 const length = options.length ?? node.Links.length const links = node.Links.slice(offset, length) @@ -21,6 +22,17 @@ const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, de return async () => { const linkName = link.Name ?? '' const linkPath = `${path}/${linkName}` + + if (isBasicExporterOptions(options)) { + const basic: UnixFSBasicEntry = { + cid: link.Hash, + name: linkName, + path: linkPath + } + + return basic + } + const result = await resolve(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options) return result.entry } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts index f65a449a..3d549048 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts @@ -1,16 +1,17 @@ import * as dagPb from '@ipld/dag-pb' -import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' -import { type Pushable, pushable } from 'it-pushable' +import { pushable } from 'it-pushable' import * as raw from 'multiformats/codecs/raw' import PQueue from 'p-queue' import { CustomProgressEvent } from 'progress-events' +import { NotUnixFSError, OverReadError, UnderReadError } from '../../../errors.js' import extractDataFromBlock from '../../../utils/extract-data-from-block.js' import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver, ReadableStorage, ExportProgress, ExportWalk } from '../../../index.js' +import type { Pushable } from 'it-pushable' async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8Array, queue: Pushable, streamPosition: bigint, start: bigint, end: bigint, options: ExporterOptions): Promise { // a `raw` node @@ -23,7 +24,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A } if (node.Data == null) { - throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('no data in PBNode') } let file: UnixFS @@ -31,7 +32,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A try { file = UnixFS.unmarshal(node.Data) } catch (err: any) { - throw errCode(err, 'ERR_NOT_UNIXFS') + throw new NotUnixFSError(err.message) } // might be a unixfs `raw` node or have data on intermediate nodes @@ -47,7 +48,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A const childOps: Array<{ link: dagPb.PBLink, blockStart: bigint }> = [] if (node.Links.length !== file.blockSizes.length) { - throw errCode(new Error('Inconsistent block sizes and dag links'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('Inconsistent block sizes and dag links') } for (let i = 0; i < node.Links.length; i++) { @@ -98,7 +99,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A child = block break default: - queue.end(errCode(new Error(`Unsupported codec: ${link.Hash.code}`), 'ERR_NOT_UNIXFS')) + queue.end(new NotUnixFSError(`Unsupported codec: ${link.Hash.code}`)) return } @@ -171,7 +172,7 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, if (read > wanted) { queue.end() - throw errCode(new Error('Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect'), 'ERR_OVER_READ') + throw new OverReadError('Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect') } if (read === wanted) { @@ -188,7 +189,7 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, } if (read < wanted) { - throw errCode(new Error('Traversed entire DAG but did not read enough bytes'), 'ERR_UNDER_READ') + throw new UnderReadError('Traversed entire DAG but did not read enough bytes') } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts index 1c482c68..d191a688 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts @@ -1,14 +1,16 @@ -import { decode, type PBNode } from '@ipld/dag-pb' -import errCode from 'err-code' +import { decode } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' import map from 'it-map' import parallel from 'it-parallel' import { pipe } from 'it-pipe' import { CustomProgressEvent } from 'progress-events' -import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk } from '../../../index.js' +import { NotUnixFSError } from '../../../errors.js' +import { isBasicExporterOptions } from '../../../utils/is-basic-exporter-options.ts' +import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk, BasicExporterOptions, UnixFSBasicEntry } from '../../../index.js' +import type { PBNode } from '@ipld/dag-pb' const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { - function yieldHamtDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { + function yieldHamtDirectoryContent (options: ExporterOptions | BasicExporterOptions = {}): UnixfsV1DirectoryContent { options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', { cid })) @@ -19,22 +21,22 @@ const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, return yieldHamtDirectoryContent } -async function * listDirectory (node: PBNode, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions): UnixfsV1DirectoryContent { +async function * listDirectory (node: PBNode, path: string, resolve: Resolve, depth: number, blockstore: ReadableStorage, options: ExporterOptions | BasicExporterOptions): any { const links = node.Links if (node.Data == null) { - throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('no data in PBNode') } let dir: UnixFS try { dir = UnixFS.unmarshal(node.Data) } catch (err: any) { - throw errCode(err, 'ERR_NOT_UNIXFS') + throw new NotUnixFSError(err.message) } if (dir.fanout == null) { - throw errCode(new Error('missing fanout'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('missing fanout') } const padLength = (dir.fanout - 1n).toString(16).length @@ -46,9 +48,29 @@ async function * listDirectory (node: PBNode, path: string, resolve: Resolve, de const name = link.Name != null ? link.Name.substring(padLength) : null if (name != null && name !== '') { - const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options) + const linkPath = `${path}/${name}` - return { entries: result.entry == null ? [] : [result.entry] } + if (isBasicExporterOptions(options)) { + const basic: UnixFSBasicEntry = { + cid: link.Hash, + name, + path: linkPath + } + + return { + entries: [ + basic + ] + } + } + + const result = await resolve(link.Hash, name, linkPath, [], depth + 1, blockstore, options) + + return { + entries: [ + result.entry + ].filter(Boolean) + } } else { // descend into subshard const block = await blockstore.get(link.Hash, options) @@ -58,7 +80,9 @@ async function * listDirectory (node: PBNode, path: string, resolve: Resolve, de cid: link.Hash })) - return { entries: listDirectory(node, path, resolve, depth, blockstore, options) } + return { + entries: listDirectory(node, path, resolve, depth, blockstore, options) + } } } }), diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts index 148ac81f..4fd337f3 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts @@ -1,11 +1,13 @@ -import { decode, type PBNode } from '@ipld/dag-pb' -import errCode from 'err-code' +import { decode } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' +import { NotFoundError, NotUnixFSError } from '../../errors.js' import findShardCid from '../../utils/find-cid-in-shard.js' +import { isBasicExporterOptions } from '../../utils/is-basic-exporter-options.ts' import contentDirectory from './content/directory.js' import contentFile from './content/file.js' import contentHamtShardedDirectory from './content/hamt-sharded-directory.js' -import type { Resolver, UnixfsV1Resolver } from '../../index.js' +import type { Resolver, UnixFSBasicEntry, UnixfsV1Resolver } from '../../index.js' +import type { PBNode } from '@ipld/dag-pb' import type { CID } from 'multiformats/cid' const findLinkCid = (node: PBNode, name: string): CID | undefined => { @@ -29,6 +31,18 @@ const contentExporters: Record = { // @ts-expect-error types are wrong const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { + if (isBasicExporterOptions(options) && toResolve.length === 0) { + const basic: UnixFSBasicEntry = { + cid, + name, + path + } + + return { + entry: basic + } + } + const block = await blockstore.get(cid, options) const node = decode(block) let unixfs @@ -39,14 +53,14 @@ const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, dep } if (node.Data == null) { - throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('no data in PBNode') } try { unixfs = UnixFS.unmarshal(node.Data) } catch (err: any) { // non-UnixFS dag-pb node? It could happen. - throw errCode(err, 'ERR_NOT_UNIXFS') + throw new NotUnixFSError(err.message) } if (path == null) { @@ -64,7 +78,7 @@ const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, dep } if (linkCid == null) { - throw errCode(new Error('file does not exist'), 'ERR_NOT_FOUND') + throw new NotFoundError('file does not exist') } // remove the path component we have resolved @@ -82,7 +96,7 @@ const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, dep const content = contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore) if (content == null) { - throw errCode(new Error('could not find content exporter'), 'ERR_NOT_FOUND') + throw new NotFoundError('could not find content exporter') } if (unixfs.isDirectory()) { diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts index 13346bf7..d604ff28 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts @@ -1,9 +1,11 @@ -import { decode, type PBLink, type PBNode } from '@ipld/dag-pb' +import { decode } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' -import errCode from 'err-code' -import { Bucket, type BucketPosition, createHAMT } from 'hamt-sharding' +import { Bucket, createHAMT } from 'hamt-sharding' import { UnixFS } from 'ipfs-unixfs' +import { NotUnixFSError } from '../errors.js' import type { ExporterOptions, ShardTraversalContext, ReadableStorage } from '../index.js' +import type { PBLink, PBNode } from '@ipld/dag-pb' +import type { BucketPosition } from 'hamt-sharding' import type { CID } from 'multiformats/cid' // FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js @@ -66,21 +68,21 @@ const toBucketPath = (position: BucketPosition): Array> const findShardCid = async (node: PBNode, name: string, blockstore: ReadableStorage, context?: ShardTraversalContext, options?: ExporterOptions): Promise => { if (context == null) { if (node.Data == null) { - throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('no data in PBNode') } let dir: UnixFS try { dir = UnixFS.unmarshal(node.Data) } catch (err: any) { - throw errCode(err, 'ERR_NOT_UNIXFS') + throw new NotUnixFSError(err.message) } if (dir.type !== 'hamt-sharded-directory') { - throw errCode(new Error('not a HAMT'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('not a HAMT') } if (dir.fanout == null) { - throw errCode(new Error('missing fanout'), 'ERR_NOT_UNIXFS') + throw new NotUnixFSError('missing fanout') } const rootBucket = createHAMT({ diff --git a/packages/ipfs-unixfs-exporter/src/utils/is-basic-exporter-options.ts b/packages/ipfs-unixfs-exporter/src/utils/is-basic-exporter-options.ts new file mode 100644 index 00000000..95190ea5 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/src/utils/is-basic-exporter-options.ts @@ -0,0 +1,5 @@ +import type { BasicExporterOptions } from '../index.js' + +export function isBasicExporterOptions (obj?: any): obj is BasicExporterOptions { + return obj?.extended === false +} diff --git a/packages/ipfs-unixfs-exporter/src/utils/resolve-object-path.ts b/packages/ipfs-unixfs-exporter/src/utils/resolve-object-path.ts index addb7066..b17f1ebc 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/resolve-object-path.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/resolve-object-path.ts @@ -1,5 +1,5 @@ -import errCode from 'err-code' import { CID } from 'multiformats/cid' +import { NoPropError } from '../errors.js' import type { ResolveResult } from '../index.js' export function resolveObjectPath (object: any, block: Uint8Array, cid: CID, name: string, path: string, toResolve: string[], depth: number): ResolveResult { @@ -41,7 +41,7 @@ export function resolveObjectPath (object: any, block: Uint8Array, cid: CID, nam subObject = subObject[prop] } else { // cannot resolve further - throw errCode(new Error(`No property named ${prop} found in node ${cid}`), 'ERR_NO_PROP') + throw new NoPropError(`No property named ${prop} found in node ${cid}`) } } diff --git a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts index da6d9427..ca3e073a 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts +++ b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts @@ -1,4 +1,4 @@ -import errCode from 'err-code' +import { InvalidParametersError } from '../errors.js' const validateOffsetAndLength = (size: number | bigint, offset: number | bigint = 0, length: number | bigint = size): { start: bigint, end: bigint } => { const fileSize = BigInt(size) @@ -14,19 +14,19 @@ const validateOffsetAndLength = (size: number | bigint, offset: number | bigint } if (start < 0n) { - throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Offset must be greater than or equal to 0') } if (start > fileSize) { - throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Offset must be less than the file size') } if (end < 0n) { - throw errCode(new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Length must be greater than or equal to 0') } if (end > fileSize) { - throw errCode(new Error('Length must be less than the file size'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Length must be less than the file size') } return { diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts index 00a3f1a8..a6abbc85 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts @@ -4,7 +4,7 @@ import * as dagPb from '@ipld/dag-pb' import { expect } from 'aegir/chai' import { MemoryBlockstore } from 'blockstore-core' import { UnixFS } from 'ipfs-unixfs' -import { importer, type ImportCandidate } from 'ipfs-unixfs-importer' +import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' import randomBytes from 'it-buffer-stream' import last from 'it-last' @@ -13,6 +13,7 @@ import { sha256 } from 'multiformats/hashes/sha2' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { exporter, walkPath } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' +import type { ImportCandidate } from 'ipfs-unixfs-importer' const SHARD_SPLIT_THRESHOLD = 10 @@ -362,4 +363,88 @@ describe('exporter sharded', function () { content: file?.node }]).to.deep.equal(files) }) + + it('exports basic sharded directory', async () => { + const files: Record = {} + + // needs to result in a block that is larger than SHARD_SPLIT_THRESHOLD bytes + for (let i = 0; i < 100; i++) { + files[`file-${Math.random()}.txt`] = { + content: uint8ArrayConcat(await all(randomBytes(100))) + } + } + + const imported = await all(importer(Object.keys(files).map(path => ({ + path, + content: asAsyncIterable(files[path].content) + })), block, { + wrapWithDirectory: true, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD, + rawLeaves: false + })) + + const dirCid = imported.pop()?.cid + + if (dirCid == null) { + throw new Error('No directory CID found') + } + + const exported = await exporter(dirCid, block) + const dirFiles = await all(exported.content()) + + // delete shard contents + for (const entry of dirFiles) { + await block.delete(entry.cid) + } + + // list the contents again, this time just the basic version + const basicDirFiles = await all(exported.content({ + extended: false + })) + expect(basicDirFiles.length).to.equal(dirFiles.length) + + for (let i = 0; i < basicDirFiles.length; i++) { + const dirFile = basicDirFiles[i] + + expect(dirFile).to.have.property('name') + expect(dirFile).to.have.property('path') + expect(dirFile).to.have.property('cid') + + // should fail because we have deleted this block + await expect(exporter(dirFile.cid, block)).to.eventually.be.rejected() + } + }) + + it('exports basic file from sharded directory', async () => { + const files: Record = {} + + // needs to result in a block that is larger than SHARD_SPLIT_THRESHOLD bytes + for (let i = 0; i < 100; i++) { + files[`file-${Math.random()}.txt`] = { + content: uint8ArrayConcat(await all(randomBytes(100))) + } + } + + const imported = await all(importer(Object.keys(files).map(path => ({ + path, + content: asAsyncIterable(files[path].content) + })), block, { + wrapWithDirectory: true, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD, + rawLeaves: false + })) + + const file = imported[0] + const dir = imported[imported.length - 1] + + const basicfile = await exporter(`/ipfs/${dir.cid}/${file.path}`, block, { + extended: false + }) + + expect(basicfile).to.have.property('name', file.path) + expect(basicfile).to.have.property('path', `${dir.cid}/${file.path}`) + expect(basicfile).to.have.deep.property('cid', file.cid) + expect(basicfile).to.not.have.property('unixfs') + expect(basicfile).to.not.have.property('content') + }) }) diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts index c1bce7d9..7d4de993 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts @@ -9,7 +9,7 @@ import delay from 'delay' import { UnixFS } from 'ipfs-unixfs' import { importer } from 'ipfs-unixfs-importer' import { fixedSize } from 'ipfs-unixfs-importer/chunker' -import { balanced, type FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' +import { balanced, flat, trickle } from 'ipfs-unixfs-importer/layout' import all from 'it-all' import randomBytes from 'it-buffer-stream' import drain from 'it-drain' @@ -31,7 +31,9 @@ import { exporter, recursive } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' import type { PBNode } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' +import type { UnixFSType } from 'ipfs-unixfs' import type { Chunker } from 'ipfs-unixfs-importer/chunker' +import type { FileLayout } from 'ipfs-unixfs-importer/layout' const ONE_MEG = Math.pow(1024, 2) @@ -45,7 +47,7 @@ describe('exporter', () => { smallFile = uint8ArrayConcat(await all(randomBytes(200))) }) - async function dagPut (options: { type?: string, content?: Uint8Array, links?: dagPb.PBLink[] } = {}): Promise<{ file: UnixFS, node: PBNode, cid: CID }> { + async function dagPut (options: { type?: UnixFSType, content?: Uint8Array, links?: dagPb.PBLink[] } = {}): Promise<{ file: UnixFS, node: PBNode, cid: CID }> { options.type = options.type ?? 'file' options.content = options.content ?? Uint8Array.from([0x01, 0x02, 0x03]) options.links = options.links ?? [] @@ -1603,4 +1605,136 @@ describe('exporter', () => { expect(actualInvocations).to.deep.equal(expectedInvocations) }) + + it('exports basic directory contents', async () => { + const files: Record = {} + + for (let i = 0; i < 10; i++) { + files[`file-${Math.random()}.txt`] = { + content: uint8ArrayConcat(await all(randomBytes(100))) + } + } + + const imported = await all(importer(Object.keys(files).map(path => ({ + path, + content: asAsyncIterable(files[path].content) + })), block, { + wrapWithDirectory: true, + rawLeaves: false + })) + + const dirCid = imported.pop()?.cid + + if (dirCid == null) { + throw new Error('No directory CID found') + } + + const exported = await exporter(dirCid, block) + const dirFiles = await all(exported.content()) + + // delete shard contents + for (const entry of dirFiles) { + await block.delete(entry.cid) + } + + // list the contents again, this time just the basic version + const basicDirFiles = await all(exported.content({ + extended: false + })) + expect(basicDirFiles.length).to.equal(dirFiles.length) + + for (let i = 0; i < basicDirFiles.length; i++) { + const dirFile = basicDirFiles[i] + + expect(dirFile).to.have.property('name') + expect(dirFile).to.have.property('path') + expect(dirFile).to.have.property('cid') + + // should fail because we have deleted this block + await expect(exporter(dirFile.cid, block)).to.eventually.be.rejected() + } + }) + + it('exports basic file', async () => { + const imported = await all(importer([{ + content: uint8ArrayFromString('hello') + }], block, { + rawLeaves: false + })) + + const regularFile = await exporter(imported[0].cid, block) + expect(regularFile).to.have.property('unixfs') + + const basicFile = await exporter(imported[0].cid, block, { + extended: false + }) + + expect(basicFile).to.have.property('name') + expect(basicFile).to.have.property('path') + expect(basicFile).to.have.property('cid') + expect(basicFile).to.not.have.property('unixfs') + }) + + it('exports basic directory', async () => { + const files: Record = {} + + for (let i = 0; i < 10; i++) { + files[`file-${Math.random()}.txt`] = { + content: uint8ArrayConcat(await all(randomBytes(100))) + } + } + + const imported = await all(importer(Object.keys(files).map(path => ({ + path, + content: asAsyncIterable(files[path].content) + })), block, { + wrapWithDirectory: true, + rawLeaves: false + })) + + const dirCid = imported.pop()?.cid + + if (dirCid == null) { + throw new Error('No directory CID found') + } + + const basicDir = await exporter(dirCid, block, { + extended: false + }) + + expect(basicDir).to.have.property('name') + expect(basicDir).to.have.property('path') + expect(basicDir).to.have.property('cid') + expect(basicDir).to.not.have.property('unixfs') + expect(basicDir).to.not.have.property('content') + }) + + it('exports basic file from directory', async () => { + const files: Record = { + 'file.txt': { + content: uint8ArrayConcat(await all(randomBytes(100))) + } + } + + const imported = await all(importer(Object.keys(files).map(path => ({ + path, + content: asAsyncIterable(files[path].content) + })), block, { + wrapWithDirectory: true, + rawLeaves: false + })) + + const file = imported[0] + const dir = imported[imported.length - 1] + + const basicfile = await exporter(`/ipfs/${dir.cid}/${file.path}`, block, { + extended: false + }) + + expect(basicfile).to.have.property('name', file.path) + expect(basicfile).to.have.property('path', `${dir.cid}/${file.path}`) + expect(basicfile).to.have.deep.property('cid', file.cid) + expect(basicfile).to.not.have.property('unixfs') + expect(basicfile).to.not.have.property('content') + }) }) diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts index 2f4d5451..7140ea52 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts @@ -8,8 +8,9 @@ import last from 'it-last' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { exporter, type UnixFSDirectory, type UnixFSEntry } from '../src/index.js' +import { exporter } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' +import type { UnixFSDirectory, UnixFSEntry } from '../src/index.js' import type { CID } from 'multiformats/cid' describe('builder: directory sharding', () => { diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts index e1ef084a..46dd856c 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts @@ -7,8 +7,9 @@ import all from 'it-all' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { exporter, type UnixFSEntry } from '../src/index.js' +import { exporter } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' +import type { UnixFSEntry } from '../src/index.js' import type { CID } from 'multiformats/cid' describe('import and export: directory', () => { @@ -113,7 +114,7 @@ function normalizeNode (node: { path?: string, cid: CID }): { path: string, mult } function byPath (a: { path: string }, b: { path: string }): number { - if (a.path > b.path) return -1 - if (a.path < b.path) return 1 + if (a.path > b.path) { return -1 } + if (a.path < b.path) { return 1 } return 0 } diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts index de7e6479..82b8c24e 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts @@ -4,10 +4,12 @@ import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' import { MemoryBlockstore } from 'blockstore-core' -import { importer, type ImporterOptions } from 'ipfs-unixfs-importer' -import { flat, balanced, trickle, type FileLayout } from 'ipfs-unixfs-importer/layout' +import { importer } from 'ipfs-unixfs-importer' +import { flat, balanced, trickle } from 'ipfs-unixfs-importer/layout' import { exporter } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' +import type { ImporterOptions } from 'ipfs-unixfs-importer' +import type { FileLayout } from 'ipfs-unixfs-importer/layout' const bigFile = loadFixture(('test') + '/fixtures/1.2MiB.txt') diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.ts b/packages/ipfs-unixfs-exporter/test/importer.spec.ts index 8a354159..2684468b 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -4,13 +4,14 @@ import { decode } from '@ipld/dag-pb' import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' import { MemoryBlockstore } from 'blockstore-core' -import { type Mtime, UnixFS } from 'ipfs-unixfs' -import { importer, type ImporterOptions } from 'ipfs-unixfs-importer' +import { UnixFS } from 'ipfs-unixfs' +import { importer } from 'ipfs-unixfs-importer' import { fixedSize } from 'ipfs-unixfs-importer/chunker' -import { balanced, type FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' +import { balanced, flat, trickle } from 'ipfs-unixfs-importer/layout' import all from 'it-all' import first from 'it-first' import last from 'it-last' +// @ts-expect-error https://github.com/schnittstabil/merge-options/pull/28 import extend from 'merge-options' import { base58btc } from 'multiformats/bases/base58' import { CID } from 'multiformats/cid' @@ -21,6 +22,9 @@ import { exporter, recursive } from '../src/index.js' import asAsyncIterable from './helpers/as-async-iterable.js' import collectLeafCids from './helpers/collect-leaf-cids.js' import type { Blockstore } from 'interface-blockstore' +import type { Mtime } from 'ipfs-unixfs' +import type { ImporterOptions } from 'ipfs-unixfs-importer' +import type { FileLayout } from 'ipfs-unixfs-importer/layout' const bigFile = loadFixture('test/fixtures/1.2MiB.txt') const smallFile = loadFixture('test/fixtures/200Bytes.txt') @@ -1074,7 +1078,7 @@ describe('configuration', () => { content: 'content' }], block, { /** @type {import('ipfs-unixfs-importer').DAGBuilder} */ - dagBuilder: async function * (source, block) { // eslint-disable-line require-await + dagBuilder: async function * (source, block) { yield async function () { return Promise.resolve({ cid, @@ -1085,7 +1089,7 @@ describe('configuration', () => { } }, /** @type {import('ipfs-unixfs-importer').TreeBuilder} */ - treeBuilder: async function * (source, block) { // eslint-disable-line require-await + treeBuilder: async function * (source, block) { builtTree = true yield * source } @@ -1106,7 +1110,7 @@ describe('configuration', () => { path: 'path', content: asAsyncIterable(uint8ArrayFromString('content')) }], block, { - chunkValidator: async function * (source) { // eslint-disable-line require-await + chunkValidator: async function * (source) { validated = true for await (const str of source) { @@ -1117,7 +1121,7 @@ describe('configuration', () => { } } }, - chunker: async function * (source) { // eslint-disable-line require-await + chunker: async function * (source) { chunked = true yield * source } diff --git a/packages/ipfs-unixfs-exporter/typedoc.json b/packages/ipfs-unixfs-exporter/typedoc.json index f599dc72..db0b0747 100644 --- a/packages/ipfs-unixfs-exporter/typedoc.json +++ b/packages/ipfs-unixfs-exporter/typedoc.json @@ -1,4 +1,5 @@ { + "readme": "none", "entryPoints": [ "./src/index.ts" ] diff --git a/packages/ipfs-unixfs-importer/CHANGELOG.md b/packages/ipfs-unixfs-importer/CHANGELOG.md index 99b97f89..98e3b825 100644 --- a/packages/ipfs-unixfs-importer/CHANGELOG.md +++ b/packages/ipfs-unixfs-importer/CHANGELOG.md @@ -1,3 +1,46 @@ +## [ipfs-unixfs-importer-v15.4.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.3.4...ipfs-unixfs-importer-15.4.0) (2025-06-18) + +### Features + +* enable custom file/directory builders ([#413](https://github.com/ipfs/js-ipfs-unixfs/issues/413)) ([d06d9fe](https://github.com/ipfs/js-ipfs-unixfs/commit/d06d9fe5aa4c0e7f82f3265bf09b0db064d2b563)) + +## [ipfs-unixfs-importer-v15.3.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.3.3...ipfs-unixfs-importer-15.3.4) (2025-06-18) + +### Documentation + +* convert examples to ts, run doc verifier ([#434](https://github.com/ipfs/js-ipfs-unixfs/issues/434)) ([95e0b47](https://github.com/ipfs/js-ipfs-unixfs/commit/95e0b47de62c57b29bd10d48503cef4f208caae1)) + +## [ipfs-unixfs-importer-v15.3.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.3.2...ipfs-unixfs-importer-15.3.3) (2025-06-18) + +### Dependencies + +* bump aegir from 45.2.1 to 47.0.16 ([#431](https://github.com/ipfs/js-ipfs-unixfs/issues/431)) ([1fb2db3](https://github.com/ipfs/js-ipfs-unixfs/commit/1fb2db37f33674eb7a0e00aa88d5312a7644536d)) + +## [ipfs-unixfs-importer-v15.3.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.3.1...ipfs-unixfs-importer-15.3.2) (2025-03-06) + +### Dependencies + +* **dev:** bump aegir from 44.1.4 to 45.0.8 ([#420](https://github.com/ipfs/js-ipfs-unixfs/issues/420)) ([6eb1064](https://github.com/ipfs/js-ipfs-unixfs/commit/6eb1064ceaf3bbbdadc639e9641f3d9fad8ab23b)) + +## [ipfs-unixfs-importer-v15.3.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.3.0...ipfs-unixfs-importer-15.3.1) (2024-09-13) + +### Dependencies + +* bump interface-store from 5.1.8 to 6.0.0 ([#410](https://github.com/ipfs/js-ipfs-unixfs/issues/410)) ([e3939ec](https://github.com/ipfs/js-ipfs-unixfs/commit/e3939ec99dd5e1a66a00fc249e0bd242724c43d0)) +* **dev:** bump blockstore-core from 4.4.1 to 5.0.0 ([#411](https://github.com/ipfs/js-ipfs-unixfs/issues/411)) ([6663007](https://github.com/ipfs/js-ipfs-unixfs/commit/6663007b46fb18e31157bce7bba4bfcf7b0ef5b5)) + +## [ipfs-unixfs-importer-v15.3.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.2.6...ipfs-unixfs-importer-15.3.0) (2024-09-13) + +### Features + +* add name property to errors ([#414](https://github.com/ipfs/js-ipfs-unixfs/issues/414)) ([70145c7](https://github.com/ipfs/js-ipfs-unixfs/commit/70145c7c1e78ede3c481100151a9491c639671be)) + +## [ipfs-unixfs-importer-v15.2.6](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.2.5...ipfs-unixfs-importer-15.2.6) (2024-09-13) + +### Dependencies + +* **dev:** bump aegir from 42.2.11 to 44.1.1 ([#412](https://github.com/ipfs/js-ipfs-unixfs/issues/412)) ([f94d1ad](https://github.com/ipfs/js-ipfs-unixfs/commit/f94d1ad0a507a0b37dd601490bba22224192f5a3)) + ## ipfs-unixfs-importer [15.2.5](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-importer-15.2.4...ipfs-unixfs-importer-15.2.5) (2024-04-05) diff --git a/packages/ipfs-unixfs-importer/CODE_OF_CONDUCT.md b/packages/ipfs-unixfs-importer/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..6b0fa54c --- /dev/null +++ b/packages/ipfs-unixfs-importer/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/ipfs-unixfs-importer/LICENSE b/packages/ipfs-unixfs-importer/LICENSE deleted file mode 100644 index 20ce483c..00000000 --- a/packages/ipfs-unixfs-importer/LICENSE +++ /dev/null @@ -1,4 +0,0 @@ -This project is dual licensed under MIT and Apache-2.0. - -MIT: https://www.opensource.org/licenses/mit -Apache-2.0: https://www.apache.org/licenses/license-2.0 diff --git a/packages/ipfs-unixfs-importer/LICENSE-APACHE b/packages/ipfs-unixfs-importer/LICENSE-APACHE index 14478a3b..b09cd785 100644 --- a/packages/ipfs-unixfs-importer/LICENSE-APACHE +++ b/packages/ipfs-unixfs-importer/LICENSE-APACHE @@ -1,5 +1,201 @@ -Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ -http://www.apache.org/licenses/LICENSE-2.0 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index c0f69b4a..05c5c174 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -37,7 +37,7 @@ Let's create a little directory to import: And write the importing logic: -```js +```TypeScript import { importer } from 'ipfs-unixfs-importer' import { MemoryBlockstore } from 'blockstore-core/memory' import * as fs from 'node:fs' @@ -61,7 +61,7 @@ for await (const entry of importer(source, blockstore)) { When run, metadata about DAGNodes in the created tree is printed until the root: -```js +``` { cid: CID, // see https://github.com/multiformats/js-cid path: 'tmp/foo/bar', diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 48795894..ac50610b 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs-importer", - "version": "15.2.5", + "version": "15.4.0", "description": "JavaScript implementation of the UnixFs importer used by IPFS", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs-importer#readme", @@ -56,13 +56,6 @@ "import": "./dist/src/layout/index.js" } }, - "eslintConfig": { - "extends": "ipfs", - "parserOptions": { - "project": true, - "sourceType": "module" - } - }, "release": { "branches": [ "main" @@ -145,7 +138,15 @@ "@semantic-release/changelog", "@semantic-release/npm", "@semantic-release/github", - "@semantic-release/git" + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] ] }, "scripts": { @@ -157,30 +158,30 @@ "clean": "aegir clean", "lint": "aegir lint", "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", "release": "aegir release" }, "dependencies": { - "@ipld/dag-pb": "^4.1.2", + "@ipld/dag-pb": "^4.1.5", "@multiformats/murmur3": "^2.1.8", - "err-code": "^3.0.1", "hamt-sharding": "^3.0.6", - "interface-blockstore": "^5.3.0", - "interface-store": "^5.1.8", + "interface-blockstore": "^5.3.2", + "interface-store": "^6.0.3", "ipfs-unixfs": "^11.0.0", - "it-all": "^3.0.6", - "it-batch": "^3.0.6", - "it-first": "^3.0.6", - "it-parallel-batch": "^3.0.6", - "multiformats": "^13.2.3", + "it-all": "^3.0.9", + "it-batch": "^3.0.9", + "it-first": "^3.0.9", + "it-parallel-batch": "^3.0.9", + "multiformats": "^13.3.7", "progress-events": "^1.0.1", "rabin-wasm": "^0.1.5", "uint8arraylist": "^2.4.8", "uint8arrays": "^5.1.0" }, "devDependencies": { - "aegir": "^44.1.1", - "blockstore-core": "^4.4.1", - "it-last": "^3.0.6", + "aegir": "^47.0.16", + "blockstore-core": "^5.0.4", + "it-last": "^3.0.9", "wherearewe": "^2.0.1" }, "browser": { diff --git a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts index a64defdb..d491f04e 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts +++ b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts @@ -1,7 +1,7 @@ -import errcode from 'err-code' // @ts-expect-error no types import { create } from 'rabin-wasm' import { Uint8ArrayList } from 'uint8arraylist' +import { InvalidAvgChunkSizeError, InvalidChunkSizeError, InvalidMinChunkSizeError } from '../errors.js' import type { Chunker } from './index.js' const DEFAULT_MIN_CHUNK_SIZE = 262144 @@ -54,15 +54,15 @@ export const rabin = (options: RabinOptions = {}): Chunker => { if (isInvalidChunkSizes) { if (options.avgChunkSize != null) { - throw errcode(new Error('please specify a valid average chunk size number'), 'ERR_INVALID_AVG_CHUNK_SIZE') + throw new InvalidAvgChunkSizeError('please specify a valid average chunk size number') } - throw errcode(new Error('please specify valid numbers for (min|max|avg)ChunkSize'), 'ERR_INVALID_CHUNK_SIZE') + throw new InvalidChunkSizeError('please specify valid numbers for (min|max|avg)ChunkSize') } // validate min/max/avg in the same way as go if (min < 16) { - throw errcode(new Error('rabin min must be greater than 16'), 'ERR_INVALID_MIN_CHUNK_SIZE') + throw new InvalidMinChunkSizeError('rabin min must be greater than 16') } if (max < min) { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts index c52b7285..b4f033ae 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts @@ -2,8 +2,9 @@ import * as dagPb from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' import * as raw from 'multiformats/codecs/raw' import { CustomProgressEvent } from 'progress-events' -import { persist, type PersistOptions } from '../utils/persist.js' +import { persist } from '../utils/persist.js' import type { BufferImporter } from '../index.js' +import type { PersistOptions } from '../utils/persist.js' import type { CID, Version } from 'multiformats/cid' import type { ProgressOptions, ProgressEvent } from 'progress-events' diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts index a29675f6..06996f46 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts @@ -9,7 +9,11 @@ export interface DirBuilderOptions { signal?: AbortSignal } -export const dirBuilder = async (dir: Directory, blockstore: WritableStorage, options: DirBuilderOptions): Promise => { +export interface DirBuilder { + (dir: Directory, blockstore: WritableStorage, options: DirBuilderOptions): Promise +} + +export const defaultDirBuilder: DirBuilder = async (dir: Directory, blockstore: WritableStorage, options: DirBuilderOptions): Promise => { const unixfs = new UnixFS({ type: 'directory', mtime: dir.mtime, diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts index 3c3b19af..db359606 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts @@ -1,4 +1,4 @@ -import { encode, type PBLink, type PBNode, prepare } from '@ipld/dag-pb' +import { encode, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' import parallelBatch from 'it-parallel-batch' import * as rawCodec from 'multiformats/codecs/raw' @@ -6,6 +6,7 @@ import { CustomProgressEvent } from 'progress-events' import { persist } from '../utils/persist.js' import type { BufferImporter, File, InProgressImportResult, WritableStorage, SingleBlockImportResult, ImporterProgressEvents } from '../index.js' import type { FileLayout, Reducer } from '../layout/index.js' +import type { PBLink, PBNode } from '@ipld/dag-pb' import type { CID, Version } from 'multiformats/cid' import type { ProgressOptions, ProgressEvent } from 'progress-events' @@ -29,7 +30,7 @@ async function * buildFileBatch (file: File, blockstore: WritableStorage, option } continue - } else if (count === 1 && (previous != null)) { + } else if (count === 1 && previous != null) { // we have the second block of a multiple block import so yield the first yield { ...previous, @@ -130,7 +131,7 @@ const reduce = (file: File, blockstore: WritableStorage, options: ReduceOptions) return true } - if ((leaf.unixfs != null) && (leaf.unixfs.data == null) && leaf.unixfs.fileSize() > 0n) { + if (leaf.unixfs != null && leaf.unixfs.data == null && leaf.unixfs.fileSize() > 0n) { return true } @@ -188,10 +189,17 @@ const reduce = (file: File, blockstore: WritableStorage, options: ReduceOptions) return reducer } +export interface FileBuilder { + (file: File, blockstore: WritableStorage, options: FileBuilderOptions): Promise +} + export interface FileBuilderOptions extends BuildFileBatchOptions, ReduceOptions { layout: FileLayout } -export const fileBuilder = async (file: File, block: WritableStorage, options: FileBuilderOptions): Promise => { - return options.layout(buildFileBatch(file, block, options), reduce(file, block, options)) +export const defaultFileBuilder: FileBuilder = async (file: File, block: WritableStorage, options: FileBuilderOptions): Promise => { + return options.layout( + buildFileBatch(file, block, options), + reduce(file, block, options) + ) } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts index 1559b86a..475c3515 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts @@ -1,7 +1,9 @@ -import errCode from 'err-code' import { CustomProgressEvent } from 'progress-events' -import { dirBuilder, type DirBuilderOptions } from './dir.js' -import { fileBuilder, type FileBuilderOptions } from './file.js' +import { InvalidContentError } from '../errors.js' +import { defaultDirBuilder } from './dir.js' +import { defaultFileBuilder } from './file.js' +import type { DirBuilder, DirBuilderOptions } from './dir.js' +import type { FileBuilder, FileBuilderOptions } from './file.js' import type { ChunkValidator } from './validate-chunks.js' import type { Chunker } from '../chunker/index.js' import type { Directory, File, FileCandidate, ImportCandidate, ImporterProgressEvents, InProgressImportResult, WritableStorage } from '../index.js' @@ -43,25 +45,27 @@ function contentAsAsyncIterable (content: Uint8Array | AsyncIterable if (content instanceof Uint8Array) { return (async function * () { yield content - }()) + })() } else if (isIterable(content)) { return (async function * () { yield * content - }()) + })() } else if (isAsyncIterable(content)) { return content } } catch { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + throw new InvalidContentError('Content was invalid') } - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + throw new InvalidContentError('Content was invalid') } export interface DagBuilderOptions extends FileBuilderOptions, DirBuilderOptions, ProgressOptions { chunker: Chunker chunkValidator: ChunkValidator wrapWithDirectory: boolean + dirBuilder?: DirBuilder + fileBuilder?: FileBuilder } export type ImporterSourceStream = AsyncIterable | Iterable @@ -107,6 +111,8 @@ export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { originalPath } + const fileBuilder = options.fileBuilder ?? defaultFileBuilder + yield async () => fileBuilder(file, blockstore, options) } else if (entry.path != null) { const dir: Directory = { @@ -116,6 +122,8 @@ export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { originalPath } + const dirBuilder = options.dirBuilder ?? defaultDirBuilder + yield async () => dirBuilder(dir, blockstore, options) } else { throw new Error('Import candidate must have content or path or both') diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts index d01e0628..c1e3ea01 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts @@ -1,5 +1,5 @@ -import errCode from 'err-code' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { InvalidContentError } from '../errors.js' export interface ChunkValidator { (source: AsyncIterable): AsyncIterable } @@ -7,7 +7,7 @@ export const defaultChunkValidator = (): ChunkValidator => { return async function * validateChunks (source) { for await (const content of source) { if (content.length === undefined) { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + throw new InvalidContentError('Content was invalid') } if (typeof content === 'string' || content instanceof String) { @@ -17,7 +17,7 @@ export const defaultChunkValidator = (): ChunkValidator => { } else if (content instanceof Uint8Array) { yield content } else { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + throw new InvalidContentError('Content was invalid') } } } diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.ts b/packages/ipfs-unixfs-importer/src/dir-flat.ts index b649b430..d3548df4 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.ts +++ b/packages/ipfs-unixfs-importer/src/dir-flat.ts @@ -1,8 +1,11 @@ -import { encode, type PBNode, prepare } from '@ipld/dag-pb' +import { encode, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1, type DirProps } from './dir.js' -import { persist, type PersistOptions } from './utils/persist.js' +import { Dir, CID_V0, CID_V1 } from './dir.js' +import { persist } from './utils/persist.js' +import type { DirProps } from './dir.js' import type { ImportResult, InProgressImportResult } from './index.js' +import type { PersistOptions } from './utils/persist.js' +import type { PBNode } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' import type { CID } from 'multiformats/cid' @@ -35,11 +38,11 @@ export class DirFlat extends Dir { return this.childCount() } - onlyChild (): InProgressImportResult | Dir { + onlyChild (): InProgressImportResult | Dir | undefined { return this._children.values().next().value } - async * eachChildSeries (): AsyncGenerator<{ key: string, child: InProgressImportResult | Dir }, void, undefined> { + * eachChildSeries (): Generator<{ key: string, child: InProgressImportResult | Dir }, void, undefined> { for (const [key, child] of this._children.entries()) { yield { key, diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.ts b/packages/ipfs-unixfs-importer/src/dir-sharded.ts index 7bee99b0..4e65849c 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.ts +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.ts @@ -1,10 +1,14 @@ -import { encode, type PBLink, prepare } from '@ipld/dag-pb' +import { encode, prepare } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' -import { createHAMT, Bucket, type BucketChild } from 'hamt-sharding' +import { createHAMT, Bucket } from 'hamt-sharding' import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1, type DirProps } from './dir.js' -import { persist, type PersistOptions } from './utils/persist.js' +import { Dir, CID_V0, CID_V1 } from './dir.js' +import { persist } from './utils/persist.js' +import type { DirProps } from './dir.js' import type { ImportResult, InProgressImportResult } from './index.js' +import type { PersistOptions } from './utils/persist.js' +import type { PBLink } from '@ipld/dag-pb' +import type { BucketChild } from 'hamt-sharding' import type { Blockstore } from 'interface-blockstore' async function hamtHashFn (buf: Uint8Array): Promise { @@ -60,8 +64,8 @@ class DirSharded extends Dir { return this._bucket.onlyChild() } - async * eachChildSeries (): AsyncGenerator<{ key: string, child: InProgressImportResult | Dir }> { - for await (const { key, value } of this._bucket.eachLeafSeries()) { + * eachChildSeries (): Generator<{ key: string, child: InProgressImportResult | Dir }> { + for (const { key, value } of this._bucket.eachLeafSeries()) { yield { key, child: value diff --git a/packages/ipfs-unixfs-importer/src/dir.ts b/packages/ipfs-unixfs-importer/src/dir.ts index 3e1b2a4c..c13118ca 100644 --- a/packages/ipfs-unixfs-importer/src/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dir.ts @@ -49,7 +49,7 @@ export abstract class Dir { abstract put (name: string, value: InProgressImportResult | Dir): Promise abstract get (name: string): Promise - abstract eachChildSeries (): AsyncIterable<{ key: string, child: InProgressImportResult | Dir }> + abstract eachChildSeries (): Iterable<{ key: string, child: InProgressImportResult | Dir }> abstract flush (blockstore: WritableStorage): AsyncGenerator abstract estimateNodeSize (): number abstract childCount (): number diff --git a/packages/ipfs-unixfs-importer/src/errors.ts b/packages/ipfs-unixfs-importer/src/errors.ts new file mode 100644 index 00000000..a8facda8 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/errors.ts @@ -0,0 +1,54 @@ +export class InvalidParametersError extends Error { + static name = 'InvalidParametersError' + static code = 'ERR_INVALID_PARAMS' + name = InvalidParametersError.name + code = InvalidParametersError.code + + constructor (message = 'Invalid parameters') { + super(message) + } +} + +export class InvalidAvgChunkSizeError extends Error { + static name = 'InvalidAvgChunkSizeError' + static code = 'ERR_INVALID_AVG_CHUNK_SIZE' + name = InvalidAvgChunkSizeError.name + code = InvalidAvgChunkSizeError.code + + constructor (message = 'Invalid avg chunk size') { + super(message) + } +} + +export class InvalidChunkSizeError extends Error { + static name = 'InvalidChunkSizeError' + static code = 'ERR_INVALID_CHUNK_SIZE' + name = InvalidChunkSizeError.name + code = InvalidChunkSizeError.code + + constructor (message = 'Invalid chunk size') { + super(message) + } +} + +export class InvalidMinChunkSizeError extends Error { + static name = 'InvalidMinChunkSizeError' + static code = 'ERR_INVALID_MIN_CHUNK_SIZE' + name = InvalidMinChunkSizeError.name + code = InvalidMinChunkSizeError.code + + constructor (message = 'Invalid min chunk size') { + super(message) + } +} + +export class InvalidContentError extends Error { + static name = 'InvalidContentError' + static code = 'ERR_INVALID_CONTENT' + name = InvalidContentError.name + code = InvalidContentError.code + + constructor (message = 'Invalid content') { + super(message) + } +} diff --git a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts index f7e58959..fd028366 100644 --- a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts +++ b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts @@ -1,5 +1,6 @@ import { DirFlat } from './dir-flat.js' -import DirSharded, { type DirShardedOptions } from './dir-sharded.js' +import DirSharded from './dir-sharded.js' +import type { DirShardedOptions } from './dir-sharded.js' import type { Dir } from './dir.js' export async function flatToShard (child: Dir | null, dir: Dir, threshold: number, options: DirShardedOptions): Promise { @@ -43,7 +44,7 @@ async function convertToShard (oldDir: DirFlat, options: DirShardedOptions): Pro mode: oldDir.mode }, options) - for await (const { key, child } of oldDir.eachChildSeries()) { + for (const { key, child } of oldDir.eachChildSeries()) { await newDir.put(key, child) } diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index aa4c3243..2f00975f 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -14,7 +14,7 @@ * * And write the importing logic: * - * ```js + * ```TypeScript * import { importer } from 'ipfs-unixfs-importer' * import { MemoryBlockstore } from 'blockstore-core/memory' * import * as fs from 'node:fs' @@ -38,7 +38,7 @@ * * When run, metadata about DAGNodes in the created tree is printed until the root: * - * ```js + * ``` * { * cid: CID, // see https://github.com/multiformats/js-cid * path: 'tmp/foo/bar', @@ -62,23 +62,30 @@ * ``` */ -import errcode from 'err-code' import first from 'it-first' import parallelBatch from 'it-parallel-batch' import { fixedSize } from './chunker/fixed-size.js' -import { type BufferImportProgressEvents, defaultBufferImporter } from './dag-builder/buffer-importer.js' -import { type DAGBuilder, type DagBuilderProgressEvents, defaultDagBuilder } from './dag-builder/index.js' -import { type ChunkValidator, defaultChunkValidator } from './dag-builder/validate-chunks.js' -import { balanced, type FileLayout } from './layout/index.js' +import { defaultBufferImporter } from './dag-builder/buffer-importer.js' +import { defaultDagBuilder } from './dag-builder/index.js' +import { defaultChunkValidator } from './dag-builder/validate-chunks.js' +import { InvalidParametersError } from './errors.js' +import { balanced } from './layout/index.js' import { defaultTreeBuilder } from './tree-builder.js' import type { Chunker } from './chunker/index.js' -import type { ReducerProgressEvents } from './dag-builder/file.js' +import type { BufferImportProgressEvents } from './dag-builder/buffer-importer.js' +import type { DirBuilder } from './dag-builder/dir.js' +import type { FileBuilder, ReducerProgressEvents } from './dag-builder/file.js' +import type { DAGBuilder, DagBuilderProgressEvents } from './dag-builder/index.js' +import type { ChunkValidator } from './dag-builder/validate-chunks.js' +import type { FileLayout } from './layout/index.js' import type { Blockstore } from 'interface-blockstore' import type { AwaitIterable } from 'interface-store' import type { UnixFS, Mtime } from 'ipfs-unixfs' import type { CID, Version as CIDVersion } from 'multiformats/cid' import type { ProgressOptions } from 'progress-events' +export * from './errors.js' + export type ByteStream = AwaitIterable export type ImportContent = ByteStream | Uint8Array @@ -270,6 +277,20 @@ export interface ImporterOptions extends ProgressOptions * `Error` */ chunkValidator?: ChunkValidator + + /** + * This option can be used to override how a directory IPLD node is built. + * + * This function takes a `Directory` object and returns a `Promise` that resolves to an `InProgressImportResult`. + */ + dirBuilder?: DirBuilder + + /** + * This option can be used to override how a file IPLD node is built. + * + * This function takes a `File` object and returns a `Promise` that resolves to an `InProgressImportResult`. + */ + fileBuilder?: FileBuilder } export type ImportCandidateStream = AsyncIterable | Iterable @@ -336,7 +357,9 @@ export async function * importer (source: ImportCandidateStream, blockstore: Wri blockWriteConcurrency, reduceSingleLeafToSelf, cidVersion, - onProgress: options.onProgress + onProgress: options.onProgress, + dirBuilder: options.dirBuilder, + fileBuilder: options.fileBuilder }) const buildTree: TreeBuilder = options.treeBuilder ?? defaultTreeBuilder({ wrapWithDirectory, @@ -382,7 +405,7 @@ export async function importFile (content: FileCandidate, blockstore: WritableSt const result = await first(importer([content], blockstore, options)) if (result == null) { - throw errcode(new Error('Nothing imported'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Nothing imported') } return result @@ -413,7 +436,7 @@ export async function importDirectory (content: DirectoryCandidate, blockstore: const result = await first(importer([content], blockstore, options)) if (result == null) { - throw errcode(new Error('Nothing imported'), 'ERR_INVALID_PARAMS') + throw new InvalidParametersError('Nothing imported') } return result diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.ts b/packages/ipfs-unixfs-importer/src/tree-builder.ts index 18e94eb0..9e9b4fcc 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.ts +++ b/packages/ipfs-unixfs-importer/src/tree-builder.ts @@ -112,7 +112,7 @@ export function defaultTreeBuilder (options: TreeBuilderOptions): TreeBuilder { if (options.wrapWithDirectory || (singleRoot && tree.childCount() > 1)) { yield * flushAndYield(tree, block) } else { - for await (const unwrapped of tree.eachChildSeries()) { + for (const unwrapped of tree.eachChildSeries()) { if (unwrapped == null) { continue } diff --git a/packages/ipfs-unixfs-importer/test/custom-dag-builder-params.spec.ts b/packages/ipfs-unixfs-importer/test/custom-dag-builder-params.spec.ts new file mode 100644 index 00000000..605e26fc --- /dev/null +++ b/packages/ipfs-unixfs-importer/test/custom-dag-builder-params.spec.ts @@ -0,0 +1,40 @@ +import { expect } from 'aegir/chai' +import { MemoryBlockstore } from 'blockstore-core' +import { defaultDirBuilder } from '../src/dag-builder/dir.js' +import { defaultFileBuilder } from '../src/dag-builder/file.js' +import { importer } from '../src/index.js' +import type { DirBuilder } from '../src/dag-builder/dir.js' +import type { FileBuilder } from '../src/dag-builder/file.js' + +describe('CustomParamsDagBuilder', () => { + it('should build a dag with custom dir builder', async () => { + const counter = { dirCounter: 0, fileCounter: 0 } + const customDirBuilder: DirBuilder = async (...args) => { + counter.dirCounter++ + return defaultDirBuilder(...args) + } + + const customFileBuilder: FileBuilder = async (...args) => { + counter.fileCounter++ + return defaultFileBuilder(...args) + } + + const blockstore = new MemoryBlockstore() + const files = [] + for await (const file of importer([{ + path: './src/file.txt', + content: new Uint8Array( + 'hello world'.split('').map((char) => char.charCodeAt(0)) + ) + }, { + path: './src' + }], blockstore, { + dirBuilder: customDirBuilder, + fileBuilder: customFileBuilder + })) { + files.push(file) + } + + expect(counter.dirCounter).to.equal(1) + }) +}) diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts index 04637910..0dda35f3 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts @@ -5,9 +5,11 @@ import { MemoryBlockstore } from 'blockstore-core' import first from 'it-first' import last from 'it-last' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { importer, type ImporterOptions } from '../src/index.js' -import { balanced, type FileLayout, flat, trickle } from '../src/layout/index.js' +import { importer } from '../src/index.js' +import { balanced, flat, trickle } from '../src/layout/index.js' import randomByteStream from './helpers/finite-pseudorandom-byte-stream.js' +import type { ImporterOptions } from '../src/index.js' +import type { FileLayout } from '../src/layout/index.js' const strategies: Record<'flat' | 'trickle' | 'balanced', FileLayout> = { flat: flat(), diff --git a/packages/ipfs-unixfs-importer/typedoc.json b/packages/ipfs-unixfs-importer/typedoc.json index 34037848..941072f4 100644 --- a/packages/ipfs-unixfs-importer/typedoc.json +++ b/packages/ipfs-unixfs-importer/typedoc.json @@ -1,4 +1,5 @@ { + "readme": "none", "entryPoints": [ "./src/index.ts", "./src/chunker/index.ts", diff --git a/packages/ipfs-unixfs/CHANGELOG.md b/packages/ipfs-unixfs/CHANGELOG.md index 4a3848f9..f5930d07 100644 --- a/packages/ipfs-unixfs/CHANGELOG.md +++ b/packages/ipfs-unixfs/CHANGELOG.md @@ -1,3 +1,39 @@ +## [ipfs-unixfs-v11.2.5](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.2.4...ipfs-unixfs-11.2.5) (2025-06-18) + +### Bug Fixes + +* constrain the unixfs type ([#435](https://github.com/ipfs/js-ipfs-unixfs/issues/435)) ([7663b87](https://github.com/ipfs/js-ipfs-unixfs/commit/7663b87ed2e3e8cd4da1484ca601638740ea0ae7)) + +## [ipfs-unixfs-v11.2.4](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.2.3...ipfs-unixfs-11.2.4) (2025-06-18) + +### Documentation + +* convert examples to ts, run doc verifier ([#434](https://github.com/ipfs/js-ipfs-unixfs/issues/434)) ([95e0b47](https://github.com/ipfs/js-ipfs-unixfs/commit/95e0b47de62c57b29bd10d48503cef4f208caae1)) + +## [ipfs-unixfs-v11.2.3](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.2.2...ipfs-unixfs-11.2.3) (2025-06-18) + +### Dependencies + +* bump aegir from 45.2.1 to 47.0.16 ([#431](https://github.com/ipfs/js-ipfs-unixfs/issues/431)) ([1fb2db3](https://github.com/ipfs/js-ipfs-unixfs/commit/1fb2db37f33674eb7a0e00aa88d5312a7644536d)) + +## [ipfs-unixfs-v11.2.2](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.2.1...ipfs-unixfs-11.2.2) (2025-06-16) + +### Bug Fixes + +* limit incoming hamt width ([#433](https://github.com/ipfs/js-ipfs-unixfs/issues/433)) ([8ca0144](https://github.com/ipfs/js-ipfs-unixfs/commit/8ca014420094be90b8bb765bb3f703a9ce7260b1)) + +## [ipfs-unixfs-v11.2.1](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.2.0...ipfs-unixfs-11.2.1) (2025-03-06) + +### Dependencies + +* **dev:** bump aegir from 44.1.4 to 45.0.8 ([#420](https://github.com/ipfs/js-ipfs-unixfs/issues/420)) ([6eb1064](https://github.com/ipfs/js-ipfs-unixfs/commit/6eb1064ceaf3bbbdadc639e9641f3d9fad8ab23b)) + +## [ipfs-unixfs-v11.2.0](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.1.5...ipfs-unixfs-11.2.0) (2024-09-13) + +### Features + +* add name property to errors ([#414](https://github.com/ipfs/js-ipfs-unixfs/issues/414)) ([70145c7](https://github.com/ipfs/js-ipfs-unixfs/commit/70145c7c1e78ede3c481100151a9491c639671be)) + ## [ipfs-unixfs-v11.1.5](https://github.com/ipfs/js-ipfs-unixfs/compare/ipfs-unixfs-11.1.4...ipfs-unixfs-11.1.5) (2024-09-13) ### Dependencies diff --git a/packages/ipfs-unixfs/CODE_OF_CONDUCT.md b/packages/ipfs-unixfs/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..6b0fa54c --- /dev/null +++ b/packages/ipfs-unixfs/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Contributor Code of Conduct + +This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) diff --git a/packages/ipfs-unixfs/LICENSE b/packages/ipfs-unixfs/LICENSE deleted file mode 100644 index 20ce483c..00000000 --- a/packages/ipfs-unixfs/LICENSE +++ /dev/null @@ -1,4 +0,0 @@ -This project is dual licensed under MIT and Apache-2.0. - -MIT: https://www.opensource.org/licenses/mit -Apache-2.0: https://www.apache.org/licenses/license-2.0 diff --git a/packages/ipfs-unixfs/LICENSE-APACHE b/packages/ipfs-unixfs/LICENSE-APACHE index 14478a3b..b09cd785 100644 --- a/packages/ipfs-unixfs/LICENSE-APACHE +++ b/packages/ipfs-unixfs/LICENSE-APACHE @@ -1,5 +1,201 @@ -Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ -http://www.apache.org/licenses/LICENSE-2.0 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/ipfs-unixfs/README.md b/packages/ipfs-unixfs/README.md index 52198d1f..aa23bcb0 100644 --- a/packages/ipfs-unixfs/README.md +++ b/packages/ipfs-unixfs/README.md @@ -30,10 +30,12 @@ The UnixFS spec can be found in the [ipfs/specs repository](http://github.com/ip ## Example - Create a file composed of several blocks -```JavaScript +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + const data = new UnixFS({ type: 'file' }) -data.addBlockSize(256) // add the size of each block -data.addBlockSize(256) +data.addBlockSize(256n) // add the size of each block +data.addBlockSize(256n) // ... ``` @@ -41,14 +43,20 @@ data.addBlockSize(256) Creating a directory that contains several files is achieve by creating a unixfs element that identifies a MerkleDAG node as a directory. The links of that MerkleDAG node are the files that are contained in this directory. -```JavaScript +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + const data = new UnixFS({ type: 'directory' }) ``` ## Example - Create an unixfs Data element -```JavaScript -const data = new UnixFS([options]) +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const data = new UnixFS({ + // ...options +}) ``` `options` is an optional object argument that might include the following keys: @@ -67,34 +75,51 @@ const data = new UnixFS([options]) ## Example - Add and remove a block size to the block size list -```JavaScript -data.addBlockSize() +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const data = new UnixFS({ type: 'file' }) +const sizeInBytes = 100n +data.addBlockSize(sizeInBytes) ``` -```JavaScript -data.removeBlockSize() +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const data = new UnixFS({ type: 'file' }) + +const index = 0 +data.removeBlockSize(index) ``` ## Example - Get total fileSize -```JavaScript +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const data = new UnixFS({ type: 'file' }) data.fileSize() // => size in bytes ``` ## Example - Marshal and unmarshal -```javascript +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const data = new UnixFS({ type: 'file' }) const marshaled = data.marshal() -const unmarshaled = Unixfs.unmarshal(marshaled) +const unmarshaled = UnixFS.unmarshal(marshaled) ``` ## Example - Is this UnixFS entry a directory? -```JavaScript -const dir = new Data({ type: 'directory' }) +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const dir = new UnixFS({ type: 'directory' }) dir.isDirectory() // true -const file = new Data({ type: 'file' }) +const file = new UnixFS({ type: 'file' }) file.isDirectory() // false ``` @@ -102,13 +127,15 @@ file.isDirectory() // false If no modification time has been set, no `mtime` property will be present on the `Data` instance: -```JavaScript -const file = new Data({ type: 'file' }) +```TypeScript +import { UnixFS } from 'ipfs-unixfs' + +const file = new UnixFS({ type: 'file' }) file.mtime // undefined Object.prototype.hasOwnProperty.call(file, 'mtime') // false -const dir = new Data({ type: 'dir', mtime: new Date() }) +const dir = new UnixFS({ type: 'directory', mtime: { secs: 5n } }) dir.mtime // { secs: Number, nsecs: Number } ``` diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index c344a864..fbace349 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-unixfs", - "version": "11.1.5", + "version": "11.2.5", "description": "JavaScript implementation of IPFS' unixfs (a Unix FileSystem representation on top of a MerkleDAG)", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/ipfs/js-ipfs-unixfs/tree/main/packages/ipfs-unixfs#readme", @@ -32,16 +32,6 @@ "import": "./dist/src/index.js" } }, - "eslintConfig": { - "extends": "ipfs", - "parserOptions": { - "project": true, - "sourceType": "module" - }, - "ignorePatterns": [ - "src/unixfs.d.ts" - ] - }, "release": { "branches": [ "main" @@ -124,7 +114,15 @@ "@semantic-release/changelog", "@semantic-release/npm", "@semantic-release/github", - "@semantic-release/git" + [ + "@semantic-release/git", + { + "assets": [ + "CHANGELOG.md", + "package.json" + ] + } + ] ] }, "scripts": { @@ -137,16 +135,16 @@ "clean": "aegir clean", "lint": "aegir lint", "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", "release": "aegir release" }, "dependencies": { - "err-code": "^3.0.1", "protons-runtime": "^5.5.0", "uint8arraylist": "^2.4.8" }, "devDependencies": { - "aegir": "^44.1.1", - "protons": "^7.6.0", + "aegir": "^47.0.16", + "protons": "^7.6.1", "uint8arrays": "^5.1.0" }, "browser": { diff --git a/packages/ipfs-unixfs/src/errors.ts b/packages/ipfs-unixfs/src/errors.ts new file mode 100644 index 00000000..6ac59fed --- /dev/null +++ b/packages/ipfs-unixfs/src/errors.ts @@ -0,0 +1,21 @@ +export class InvalidTypeError extends Error { + static name = 'InvalidTypeError' + static code = 'ERR_INVALID_TYPE' + name = InvalidTypeError.name + code = InvalidTypeError.code + + constructor (message = 'Invalid type') { + super(message) + } +} + +export class InvalidUnixFSMessageError extends Error { + static name = 'InvalidUnixFSMessageError' + static code = 'ERR_INVALID_MESSAGE' + name = InvalidUnixFSMessageError.name + code = InvalidUnixFSMessageError.code + + constructor (message = 'Invalid message') { + super(message) + } +} diff --git a/packages/ipfs-unixfs/src/index.ts b/packages/ipfs-unixfs/src/index.ts index b6263d64..04e03973 100644 --- a/packages/ipfs-unixfs/src/index.ts +++ b/packages/ipfs-unixfs/src/index.ts @@ -7,10 +7,12 @@ * * @example Create a file composed of several blocks * - * ```JavaScript + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * * const data = new UnixFS({ type: 'file' }) - * data.addBlockSize(256) // add the size of each block - * data.addBlockSize(256) + * data.addBlockSize(256n) // add the size of each block + * data.addBlockSize(256n) * // ... * ``` * @@ -18,14 +20,20 @@ * * Creating a directory that contains several files is achieve by creating a unixfs element that identifies a MerkleDAG node as a directory. The links of that MerkleDAG node are the files that are contained in this directory. * - * ```JavaScript + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * * const data = new UnixFS({ type: 'directory' }) * ``` * * @example Create an unixfs Data element * - * ```JavaScript - * const data = new UnixFS([options]) + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const data = new UnixFS({ + * // ...options + * }) * ``` * * `options` is an optional object argument that might include the following keys: @@ -44,34 +52,51 @@ * * @example Add and remove a block size to the block size list * - * ```JavaScript - * data.addBlockSize() + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const data = new UnixFS({ type: 'file' }) + * const sizeInBytes = 100n + * data.addBlockSize(sizeInBytes) * ``` * - * ```JavaScript - * data.removeBlockSize() + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const data = new UnixFS({ type: 'file' }) + * + * const index = 0 + * data.removeBlockSize(index) * ``` * * @example Get total fileSize * - * ```JavaScript + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const data = new UnixFS({ type: 'file' }) * data.fileSize() // => size in bytes * ``` * * @example Marshal and unmarshal * - * ```javascript + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const data = new UnixFS({ type: 'file' }) * const marshaled = data.marshal() - * const unmarshaled = Unixfs.unmarshal(marshaled) + * const unmarshaled = UnixFS.unmarshal(marshaled) * ``` * * @example Is this UnixFS entry a directory? * - * ```JavaScript - * const dir = new Data({ type: 'directory' }) + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const dir = new UnixFS({ type: 'directory' }) * dir.isDirectory() // true * - * const file = new Data({ type: 'file' }) + * const file = new UnixFS({ type: 'file' }) * file.isDirectory() // false * ``` * @@ -79,18 +104,20 @@ * * If no modification time has been set, no `mtime` property will be present on the `Data` instance: * - * ```JavaScript - * const file = new Data({ type: 'file' }) + * ```TypeScript + * import { UnixFS } from 'ipfs-unixfs' + * + * const file = new UnixFS({ type: 'file' }) * file.mtime // undefined * * Object.prototype.hasOwnProperty.call(file, 'mtime') // false * - * const dir = new Data({ type: 'dir', mtime: new Date() }) + * const dir = new UnixFS({ type: 'directory', mtime: { secs: 5n } }) * dir.mtime // { secs: Number, nsecs: Number } * ``` */ -import errcode from 'err-code' +import { InvalidTypeError, InvalidUnixFSMessageError } from './errors.js' import { Data as PBData } from './unixfs.js' export interface Mtime { @@ -100,7 +127,9 @@ export interface Mtime { export type MtimeLike = Mtime | { Seconds: number, FractionalNanoseconds?: number } | [number, number] | Date -const types: Record = { +export type UnixFSType = 'raw' | 'directory' | 'file' | 'metadata' | 'symlink' | 'hamt-sharded-directory' + +const types: Record = { Raw: 'raw', Directory: 'directory', File: 'file', @@ -117,8 +146,11 @@ const dirTypes = [ const DEFAULT_FILE_MODE = parseInt('0644', 8) const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8) +// https://github.com/ipfs/boxo/blob/364c5040ec91ec8e2a61446e9921e9225704c34d/ipld/unixfs/hamt/hamt.go#L778 +const MAX_FANOUT = BigInt(1 << 10) + export interface UnixFSOptions { - type?: string + type?: UnixFSType data?: Uint8Array blockSizes?: bigint[] hashType?: bigint @@ -134,6 +166,10 @@ class UnixFS { static unmarshal (marshaled: Uint8Array): UnixFS { const message = PBData.decode(marshaled) + if (message.fanout != null && message.fanout > MAX_FANOUT) { + throw new InvalidUnixFSMessageError(`Fanout size was too large - ${message.fanout} > ${MAX_FANOUT}`) + } + const data = new UnixFS({ type: types[message.Type != null ? message.Type.toString() : 'File'], data: message.Data, @@ -178,7 +214,7 @@ class UnixFS { } = options if (type != null && !Object.values(types).includes(type)) { - throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE') + throw new InvalidTypeError('Type: ' + type + ' is not valid') } this.type = type ?? 'file' @@ -250,7 +286,7 @@ class UnixFS { case 'symlink': type = PBData.DataType.Symlink; break case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break default: - throw errcode(new Error(`Type: ${type} is not valid`), 'ERR_INVALID_TYPE') + throw new InvalidTypeError(`Type: ${type} is not valid`) } let data = this.data @@ -296,3 +332,4 @@ class UnixFS { } export { UnixFS } +export * from './errors.js' diff --git a/packages/ipfs-unixfs/src/unixfs.ts b/packages/ipfs-unixfs/src/unixfs.ts index ceed9332..d6feac8d 100644 --- a/packages/ipfs-unixfs/src/unixfs.ts +++ b/packages/ipfs-unixfs/src/unixfs.ts @@ -1,9 +1,3 @@ -/* eslint-disable import/export */ -/* eslint-disable complexity */ -/* eslint-disable @typescript-eslint/no-namespace */ -/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ -/* eslint-disable @typescript-eslint/no-empty-interface */ - import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' diff --git a/packages/ipfs-unixfs/test/unixfs-format.spec.ts b/packages/ipfs-unixfs/test/unixfs-format.spec.ts index a15d0f2e..0aba16d3 100644 --- a/packages/ipfs-unixfs/test/unixfs-format.spec.ts +++ b/packages/ipfs-unixfs/test/unixfs-format.spec.ts @@ -3,8 +3,9 @@ import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { type Mtime, UnixFS } from '../src/index.js' +import { UnixFS } from '../src/index.js' import * as Pb from '../src/unixfs.js' +import type { Mtime } from '../src/index.js' const PBData = Pb.Data const raw = loadFixture('test/fixtures/raw.unixfs') @@ -369,6 +370,7 @@ describe('unixfs-format', () => { try { // eslint-disable-next-line no-new new UnixFS({ + // @ts-expect-error invalid type type: 'bananas' }) } catch (err: any) { @@ -431,4 +433,17 @@ describe('unixfs-format', () => { expect(marshaled).to.deep.equal(Uint8Array.from([0x08, 0x02, 0x18, 0x00])) }) + + it('should limit maximum fanout size', () => { + const data = new UnixFS({ + type: 'hamt-sharded-directory', + fanout: 1025n + }) + const marshaled = data.marshal() + + expect(() => { + UnixFS.unmarshal(marshaled) + }).to.throw() + .with.property('name', 'InvalidUnixFSMessageError') + }) }) diff --git a/packages/ipfs-unixfs/typedoc.json b/packages/ipfs-unixfs/typedoc.json index f599dc72..db0b0747 100644 --- a/packages/ipfs-unixfs/typedoc.json +++ b/packages/ipfs-unixfs/typedoc.json @@ -1,4 +1,5 @@ { + "readme": "none", "entryPoints": [ "./src/index.ts" ] diff --git a/typedoc.json b/typedoc.json index 481c04ce..a7a0c07b 100644 --- a/typedoc.json +++ b/typedoc.json @@ -1,7 +1,12 @@ { "$schema": "https://typedoc.org/schema.json", - "name": "UnixFS", + "name": "js-ipfs-unixfs", + "readme": "./README.md", + "headings": { + "readme": false, + "document": false + }, "exclude": [ - "packages/interop" + "benchmarks/**/*" ] }