diff --git a/README.md b/README.md index b5d54aa..ff7117a 100644 --- a/README.md +++ b/README.md @@ -271,26 +271,28 @@ The ABNF specification for onchfs URIs, as defined in [RFC 5234](https://datatra URI = "onchfs://" [ authority "/" ] cid [ "/" path ] [ "?" query ] [ "#" fragment ] -; while the authority is blockchain-specific as different -; blockchains will have different strategies to identify -; its resources with URI, this provides a generic pattern -; for the authority as reference: +; the authority is based on the CAIP-2 & CAIP-10 specifications +; for identifying blockchains & blockchain accounts; terms have +; been adapted to fit to onchfs context -generic-authority = [ contract-address "." ] blockchain-name - [ ":" chainid ] +generic-authority = namespace [ ":" chain-id ] + [ ":" contract-address] ; this defines how the authority is constructed for the ; ethereum and tezos blockchains, currently supported authority = authority-tez / authority-eth -authority-tez = [ tez-contract-addr "." ] - ( "tezos" / "tez" / "xtz" ) - [ ":" ( "mainnet" / "ghostnet" ) ] +authority-tez = "tezos" + [ ":" tez-chainid ] + [ ":" tez-contract-addr ] -authority-eth = [ eth-contract-addr "." ] - ( "ethereum" / "eth" ) +tez-chainid = "NetXdQprcVkpaWU" ; mainnet + / "NetXnHfVqm9iesp" ; ghostnet + +authority-eth = "eip155" [ ":" eth-chainid ] + [ ":" eth-contract-addr ] eth-chainid = 1*DIGIT ; ex: 1=mainnet, 5=goerli, 6=arbitrum diff --git a/doc/README.md b/doc/README.md index aaba2fa..0c6c2c2 100644 --- a/doc/README.md +++ b/doc/README.md @@ -1,6 +1,6 @@ # Website -This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. +This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator. ### Installation diff --git a/doc/babel.config.js b/doc/babel.config.js index e00595d..cf4260b 100644 --- a/doc/babel.config.js +++ b/doc/babel.config.js @@ -1,3 +1,3 @@ module.exports = { - presets: [require.resolve('@docusaurus/core/lib/babel/preset')], -}; + presets: [require.resolve("@docusaurus/core/lib/babel/preset")], +} diff --git a/doc/docs/concepts/http-proxy.md b/doc/docs/concepts/http-proxy.md index cea6ea4..23fef50 100644 --- a/doc/docs/concepts/http-proxy.md +++ b/doc/docs/concepts/http-proxy.md @@ -58,7 +58,7 @@ https://proxy-url.com/ ━━━━━┻━━━━━ ``` -:::info URI<->Proxy URL compatibility +:::info[URI Proxy to URL compatibility] Because of the design of its format, the _schema-specific part_ is fully compatible with URLs. Onchfs URIs also support URL path, search and fragment components, enabling as many use-cases as there are on the web for HTML documents. [Read more about the URI specification here](./uris) ::: diff --git a/doc/docs/concepts/uris.md b/doc/docs/concepts/uris.md index 0f45976..7239438 100644 --- a/doc/docs/concepts/uris.md +++ b/doc/docs/concepts/uris.md @@ -10,6 +10,12 @@ Simply put, URIs are constructed as following: onchfs://[ /][][? ][# ] ``` +- `authority`: the host of the resource (blockchain/contract). Optional and left out most often as implied by the context. Aligns on [CAIP-2](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-2.md) & [CAIP-10](https://github.com/ChainAgnostic/CAIPs/blob/main/CAIPs/caip-10.md) specs for identifying blockchains & accounts. +- `cid`: unique identifier of the root of the resource +- `path`: eventualy path inside the folder if the resource identified by the cid is a directory +- `query`: some query parameters to pass to the document to load +- `fragment`: anchor/arbitraty data to pass to the document to load + See at the bottom of the document for the [ABNF definition of onchfs uris](#abnf). ## Outside the protocol @@ -64,7 +70,7 @@ Smart contracts provide a generic `get_inode_at(cid, paths[])` view which can re onchfs://6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840 ``` -Point file object at `6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840` , where the context in which the URI was found defines the blockchain/network (for instance if a smart contract references this address, the resources will be found on the main file object smart contract of the ethereum mainnet) +Point file object at `6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840` , where the context in which the URI was found defines the blockchain/network (for instance if an ethereum smart contract references this address, the resources will be found on the main file object smart contract of the ethereum mainnet) --- @@ -77,15 +83,15 @@ Point inode folder at `6db0...6840` , in its `folder` directory, in which `index --- ``` -onchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840 +onchfs://eip155:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840 ``` -Point file object at `6db0...6840` on the `ethereum` blockchain, goerli (`:5`) chain +Point file object at `6db0...6840` on the ethereum (`eip155`) blockchain, goerli (`:5`) chain --- ``` -onchfs://68b75b4e8439a7099e53045bea850b3266e95906.eth/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840 +onchfs://eip155:1:68b75b4e8439a7099e53045bea850b3266e95906/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840 ``` Point file object `6db0...6840` on the contract `68b75b4e8439a7099e53045bea850b3266e95906` of the ethereum mainnet @@ -98,26 +104,28 @@ The ABNF specification for onchfs URIs, as defined in [RFC 5234](https://datatra URI = "onchfs://" [ authority "/" ] cid [ "/" path ] [ "?" query ] [ "#" fragment ] -; while the authority is blockchain-specific as different -; blockchains will have different strategies to identify -; its resources with URI, this provides a generic pattern -; for the authority as reference: +; the authority is based on the CAIP-2 & CAIP-10 specifications +; for identifying blockchains & blockchain accounts; terms have +; been adapted to fit to onchfs context -generic-authority = [ contract-address "." ] blockchain-name - [ ":" chainid ] +generic-authority = namespace [ ":" chain-id ] + [ ":" contract-address] ; this defines how the authority is constructed for the ; ethereum and tezos blockchains, currently supported authority = authority-tez / authority-eth -authority-tez = [ tez-contract-addr "." ] - ( "tezos" / "tez" / "xtz" ) - [ ":" ( "mainnet" / "ghostnet" ) ] +authority-tez = "tezos" + [ ":" tez-chainid ] + [ ":" tez-contract-addr ] + +tez-chainid = "NetXdQprcVkpaWU" ; mainnet + / "NetXnHfVqm9iesp" ; ghostnet -authority-eth = [ eth-contract-addr "." ] - ( "ethereum" / "eth" ) +authority-eth = "eip155" [ ":" eth-chainid ] + [ ":" eth-contract-addr ] eth-chainid = 1*DIGIT ; ex: 1=mainnet, 5=goerli, 6=arbitrum diff --git a/doc/docs/use-cases/generative-art.md b/doc/docs/use-cases/generative-art.md index e3e34d8..be60af0 100644 --- a/doc/docs/use-cases/generative-art.md +++ b/doc/docs/use-cases/generative-art.md @@ -121,7 +121,7 @@ While previous projects leave an opiniated footprint on how code data is handled ├── style.css ├── main.js └── libs/ - ├── fxhash.js + ├── fxhash.min.js ├── colors.js └── processing.min.js ``` @@ -131,7 +131,7 @@ While previous projects leave an opiniated footprint on how code data is handled ```html - + @@ -157,7 +157,7 @@ Onchfs also handles libraries elegantly, and so naturally by its design. Looking ├── style.css -> 0xaeaeaed2... ├── main.js -> 0xa2a2a2a9... └── libs/ -> 0xd5d5d5d5... - ├── fxhash.js -> 0xc6c6c6c6... + ├── fxhash.min.js -> 0xc6c6c6c6... ├── colors.js -> 0xabcdef12... └── processing.min.js -> 0x01010101... @@ -169,7 +169,7 @@ inscriptions: + ... + DIRECTORY libs (0xd5d5d5d5...) { - "fxhash.js": 0xc6c6c6c6..., + "fxhash.min.js": 0xc6c6c6c6..., "colors.js": 0xabcdef12..., "processing.min.js": 0x01010101..., } @@ -191,7 +191,7 @@ inscriptions: + ... + DIRECTORY libs (0xd5d5d5d5...) { - "fxhash.js": 0xc6c6c6c6..., + "fxhash.min.js": 0xc6c6c6c6..., "colors.js": 0xabcdef12..., "processing.min.js": 0x01010101..., <- points to existing resource } diff --git a/doc/docusaurus.config.js b/doc/docusaurus.config.js deleted file mode 100644 index 1920dd2..0000000 --- a/doc/docusaurus.config.js +++ /dev/null @@ -1,154 +0,0 @@ -// @ts-check -// Note: type annotations allow type checking and IDEs autocompletion - -const lightCodeTheme = require("prism-react-renderer/themes/dracula") -const darkCodeTheme = require("prism-react-renderer/themes/dracula") - -/** @type {import('@docusaurus/types').Config} */ -const config = { - title: "ONCHFS — On-Chain for Http File System", - tagline: "A file system for blockchains.", - favicon: "img/favicon.ico", - - // Set the production url of your site here - url: "https://onchfs.com", - // Set the // pathname under which your site is served - // For GitHub pages deployment, it is often '//' - baseUrl: "/", - - // GitHub pages deployment config. - // If you aren't using GitHub pages, you don't need these. - organizationName: "fxhash", - projectName: "onchfs", - - onBrokenLinks: "throw", - onBrokenMarkdownLinks: "warn", - - // Even if you don't use internalization, you can use this field to set useful - // metadata like html lang. For example, if your site is Chinese, you may want - // to replace "en" with "zh-Hans". - i18n: { - defaultLocale: "en", - locales: ["en"], - }, - - markdown: { - mermaid: true, - }, - themes: ["@docusaurus/theme-mermaid"], - - presets: [ - [ - "classic", - /** @type {import('@docusaurus/preset-classic').Options} */ - ({ - docs: { - sidebarPath: require.resolve("./sidebars.js"), - // Please change this to your repo. - // Remove this to remove the "edit this page" links. - editUrl: - "https://github.com/fxhash/onchfs/tree/main/doc/templates/shared/", - }, - theme: { - customCss: require.resolve("./src/css/custom.css"), - }, - blog: false, - }), - ], - ], - - themeConfig: - /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ - ({ - // Replace with your project's social card - image: "img/docusaurus-social-card.jpg", - navbar: { - title: "ONCHFS", - // logo: { - // alt: "My Site Logo", - // src: "img/logo.svg", - // }, - items: [ - { - type: "docSidebar", - sidebarId: "getting-started", - position: "left", - label: "Getting started", - }, - { - type: "docSidebar", - sidebarId: "concepts", - position: "left", - label: "Concepts", - }, - { - type: "docSidebar", - sidebarId: "libraries", - position: "left", - label: "Libraries", - }, - { - href: "https://github.com/fxhash/onchfs", - label: "GitHub", - position: "right", - }, - ], - }, - footer: { - style: "dark", - links: [ - { - title: "Docs", - items: [ - { - label: "Getting started", - to: "/docs/intro", - }, - { - label: "Concepts", - to: "/docs/concepts/hashing", - }, - { - label: "Libraries", - to: "/docs/libraries/overview", - }, - ], - }, - { - title: "Community", - items: [ - { - label: "Stack Overflow", - href: "https://stackoverflow.com/questions/tagged/docusaurus", - }, - { - label: "Discord", - href: "https://discordapp.com/invite/docusaurus", - }, - { - label: "Twitter", - href: "https://twitter.com/docusaurus", - }, - ], - }, - { - title: "More", - items: [ - { - label: "GitHub", - href: "https://github.com/fxhash/onchfs", - }, - ], - }, - ], - copyright: `Copyright © ${new Date().getFullYear()} fxhash. Built with Docusaurus.`, - }, - prism: { - theme: lightCodeTheme, - darkTheme: darkCodeTheme, - additionalLanguages: ["solidity", "abnf"], - }, - }), -} - -module.exports = config diff --git a/doc/docusaurus.config.ts b/doc/docusaurus.config.ts new file mode 100644 index 0000000..c4f0ce5 --- /dev/null +++ b/doc/docusaurus.config.ts @@ -0,0 +1,143 @@ +import { themes as prismThemes } from "prism-react-renderer" +import type { Config } from "@docusaurus/types" +import type * as Preset from "@docusaurus/preset-classic" + +const config: Config = { + title: "ONCHFS — On-Chain for Http File System", + tagline: "A file system for blockchains.", + favicon: "img/favicon.ico", + + // Set the production url of your site here + url: "https://onchfs.com", + // Set the // pathname under which your site is served + // For GitHub pages deployment, it is often '//' + baseUrl: "/", + + // GitHub pages deployment config. + // If you aren't using GitHub pages, you don't need these. + organizationName: "fxhash", + projectName: "onchfs", + + onBrokenLinks: "throw", + onBrokenMarkdownLinks: "warn", + + // Even if you don't use internationalization, you can use this field to set + // useful metadata like html lang. For example, if your site is Chinese, you + // may want to replace "en" with "zh-Hans". + i18n: { + defaultLocale: "en", + locales: ["en"], + }, + + markdown: { + mermaid: true, + }, + themes: ["@docusaurus/theme-mermaid"], + + presets: [ + [ + "classic", + { + docs: { + sidebarPath: "./sidebars.ts", + // Please change this to your repo. + // Remove this to remove the "edit this page" links. + editUrl: + "https://github.com/fxhash/onchfs/tree/main/doc/templates/shared/", + }, + blog: false, + theme: { + customCss: "./src/css/custom.css", + }, + } satisfies Preset.Options, + ], + ], + + themeConfig: { + image: "img/social-card.png", + navbar: { + title: "ONCHFS", + items: [ + { + type: "docSidebar", + sidebarId: "getting-started", + position: "left", + label: "Getting started", + }, + { + type: "docSidebar", + sidebarId: "concepts", + position: "left", + label: "Concepts", + }, + { + type: "docSidebar", + sidebarId: "libraries", + position: "left", + label: "Libraries", + }, + { + href: "https://github.com/fxhash/onchfs", + label: "GitHub", + position: "right", + }, + ], + }, + footer: { + style: "dark", + links: [ + { + title: "Docs", + items: [ + { + label: "Getting started", + to: "/docs/intro", + }, + { + label: "Concepts", + to: "/docs/concepts/hashing", + }, + { + label: "Libraries", + to: "/docs/libraries/overview", + }, + ], + }, + { + title: "Community", + items: [ + { + label: "Stack Overflow", + href: "https://stackoverflow.com/questions/tagged/docusaurus", + }, + { + label: "Discord", + href: "https://discordapp.com/invite/docusaurus", + }, + { + label: "Twitter", + href: "https://twitter.com/docusaurus", + }, + ], + }, + { + title: "More", + items: [ + { + label: "GitHub", + href: "https://github.com/fxhash/onchfs", + }, + ], + }, + ], + copyright: `Copyright © ${new Date().getFullYear()} fxhash. Built with Docusaurus.`, + }, + prism: { + theme: prismThemes.dracula, + darkTheme: prismThemes.dracula, + additionalLanguages: ["solidity", "abnf"], + }, + } satisfies Preset.ThemeConfig, +} + +export default config diff --git a/doc/package.json b/doc/package.json index 442b8db..cad8bb5 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,35 +1,6 @@ { "name": "onchfs-doc", "version": "0.0.0", - "private": true, - "scripts": { - "docusaurus": "docusaurus", - "start": "docusaurus start", - "build": "docusaurus build", - "swizzle": "docusaurus swizzle", - "deploy": "docusaurus deploy", - "clear": "docusaurus clear", - "serve": "docusaurus serve", - "write-translations": "docusaurus write-translations", - "write-heading-ids": "docusaurus write-heading-ids", - "typecheck": "tsc" - }, - "dependencies": { - "@docusaurus/core": "2.4.1", - "@docusaurus/preset-classic": "2.4.1", - "@docusaurus/theme-mermaid": "2.4.1", - "@mdx-js/react": "^1.6.22", - "@types/node": "^20.6.2", - "clsx": "^1.2.1", - "prism-react-renderer": "^1.3.5", - "react": "^17.0.2", - "react-dom": "^17.0.2" - }, - "devDependencies": { - "@docusaurus/module-type-aliases": "2.4.1", - "@tsconfig/docusaurus": "^1.0.5", - "typescript": "^4.7.4" - }, "browserslist": { "production": [ ">0.5%", @@ -37,12 +8,41 @@ "not op_mini all" ], "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" + "last 3 chrome version", + "last 3 firefox version", + "last 5 safari version" ] }, + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/preset-classic": "3.2.1", + "@docusaurus/theme-mermaid": "3.2.1", + "@mdx-js/react": "3.0.1", + "clsx": "2.1.1", + "prism-react-renderer": "2.3.1", + "react": "18.2.0", + "react-dom": "18.2.0" + }, + "devDependencies": { + "@docusaurus/module-type-aliases": "3.2.1", + "@docusaurus/tsconfig": "3.2.1", + "@docusaurus/types": "3.2.1", + "typescript": "5.3.3" + }, "engines": { - "node": ">=16.14" + "node": ">=18.0" + }, + "private": true, + "scripts": { + "build": "docusaurus build", + "clear": "docusaurus clear", + "deploy": "docusaurus deploy", + "docusaurus": "docusaurus", + "serve": "docusaurus serve", + "start": "docusaurus start", + "swizzle": "docusaurus swizzle", + "typecheck": "tsc", + "write-heading-ids": "docusaurus write-heading-ids", + "write-translations": "docusaurus write-translations" } -} +} \ No newline at end of file diff --git a/doc/sidebars.js b/doc/sidebars.ts similarity index 96% rename from doc/sidebars.js rename to doc/sidebars.ts index e01c627..8e62742 100644 --- a/doc/sidebars.js +++ b/doc/sidebars.ts @@ -1,3 +1,5 @@ +import type { SidebarsConfig } from "@docusaurus/plugin-content-docs" + /** * Creating a sidebar enables you to: - create an ordered group of docs @@ -8,11 +10,7 @@ Create as many sidebars as you want. */ - -// @ts-check - -/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ -const sidebars = { +const sidebars: SidebarsConfig = { "getting-started": [ { type: "doc", @@ -174,4 +172,4 @@ const sidebars = { ], } -module.exports = sidebars +export default sidebars diff --git a/doc/src/components/HomepageFeatures/index.tsx b/doc/src/components/HomepageFeatures/index.tsx deleted file mode 100644 index 91ef460..0000000 --- a/doc/src/components/HomepageFeatures/index.tsx +++ /dev/null @@ -1,70 +0,0 @@ -import React from 'react'; -import clsx from 'clsx'; -import styles from './styles.module.css'; - -type FeatureItem = { - title: string; - Svg: React.ComponentType>; - description: JSX.Element; -}; - -const FeatureList: FeatureItem[] = [ - { - title: 'Easy to Use', - Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default, - description: ( - <> - Docusaurus was designed from the ground up to be easily installed and - used to get your website up and running quickly. - - ), - }, - { - title: 'Focus on What Matters', - Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default, - description: ( - <> - Docusaurus lets you focus on your docs, and we'll do the chores. Go - ahead and move your docs into the docs directory. - - ), - }, - { - title: 'Powered by React', - Svg: require('@site/static/img/undraw_docusaurus_react.svg').default, - description: ( - <> - Extend or customize your website layout by reusing React. Docusaurus can - be extended while reusing the same header and footer. - - ), - }, -]; - -function Feature({title, Svg, description}: FeatureItem) { - return ( -
-
- -
-
-

{title}

-

{description}

-
-
- ); -} - -export default function HomepageFeatures(): JSX.Element { - return ( -
-
-
- {FeatureList.map((props, idx) => ( - - ))} -
-
-
- ); -} diff --git a/doc/src/components/HomepageFeatures/styles.module.css b/doc/src/components/HomepageFeatures/styles.module.css deleted file mode 100644 index b248eb2..0000000 --- a/doc/src/components/HomepageFeatures/styles.module.css +++ /dev/null @@ -1,11 +0,0 @@ -.features { - display: flex; - align-items: center; - padding: 2rem 0; - width: 100%; -} - -.featureSvg { - height: 200px; - width: 200px; -} diff --git a/doc/src/css/custom.css b/doc/src/css/custom.css index 58c28ef..fc06844 100644 --- a/doc/src/css/custom.css +++ b/doc/src/css/custom.css @@ -18,7 +18,7 @@ } /* For readability concerns, you should choose a lighter palette in dark mode. */ -[data-theme='dark'] { +[data-theme="dark"] { --ifm-color-primary: #25c2a0; --ifm-color-primary-dark: #21af90; --ifm-color-primary-darker: #1fa588; diff --git a/doc/src/pages/index.tsx b/doc/src/pages/index.tsx index ebb3fce..428d8fd 100644 --- a/doc/src/pages/index.tsx +++ b/doc/src/pages/index.tsx @@ -1,9 +1,8 @@ -import React from "react" import clsx from "clsx" import Link from "@docusaurus/Link" import useDocusaurusContext from "@docusaurus/useDocusaurusContext" import Layout from "@theme/Layout" -import HomepageFeatures from "@site/src/components/HomepageFeatures" +import Heading from "@theme/Heading" import styles from "./index.module.css" @@ -12,7 +11,9 @@ function HomepageHeader() { return (
-

{siteConfig.title}

+ + {siteConfig.title} +

{siteConfig.tagline}

{ if (inMemoryCache[req.path]) { console.log(`⚡️ cache hit for ${req.path}`) diff --git a/examples/test-project-next/package.json b/examples/test-project-next/package.json index c151d2a..9959e6d 100644 --- a/examples/test-project-next/package.json +++ b/examples/test-project-next/package.json @@ -1,23 +1,25 @@ { "name": "test-project-next", "version": "0.1.0", - "private": true, - "scripts": { - "dev": "next dev", - "build": "next build", - "start": "next start", - "lint": "next lint" - }, "dependencies": { - "@taquito/signer": "^17.3.0", - "@taquito/taquito": "^17.3.0", - "onchfs": "workspace:*", - "@types/node": "20.5.9", - "@types/react": "*", - "@types/react-dom": "*", + "@taquito/signer": "17.3.0", + "@taquito/taquito": "17.3.0", "next": "13.4.19", "react": "18.2.0", "react-dom": "18.2.0", - "typescript": "5.2.2" + "typescript": "5.2.2", + "onchfs": "0.0.0" + }, + "devDependencies": { + "@types/node": "20.5.9", + "@types/react": "18.3.3", + "@types/react-dom": "18.2.18" + }, + "private": true, + "scripts": { + "build": "next build", + "dev": "next dev", + "lint": "next lint", + "start": "next start" } -} +} \ No newline at end of file diff --git a/examples/test-project-next/src/pages/index.tsx b/examples/test-project-next/src/pages/index.tsx index eefd977..5e4496a 100644 --- a/examples/test-project-next/src/pages/index.tsx +++ b/examples/test-project-next/src/pages/index.tsx @@ -93,7 +93,7 @@ export default function Home() { const op = await kt.methodsObject .create_file({ chunk_pointers: ins.chunks.map(buf => uint8hex(buf)), - metadata: ins.metadata.map(buf => uint8hex(buf)), + metadata: uint8hex(ins.metadata), }) .send() await op.confirmation(1) @@ -110,16 +110,15 @@ export default function Home() { } const enc = new TextEncoder() - const inode = await Onchfs.prepareDirectory( + const inode = Onchfs.files.prepare( files.map(pt => { return { path: pt.name, content: enc.encode(pt.content), } - }), - 2048 + }) ) - const inscrs = Onchfs.generateInscriptions(inode) + const inscrs = Onchfs.inscriptions.prepare(inode) console.log(inscrs) for (const ins of inscrs) { await writeInscription(ins) diff --git a/examples/test-project/.gitignore b/examples/test-project/.gitignore new file mode 100644 index 0000000..db4c6d9 --- /dev/null +++ b/examples/test-project/.gitignore @@ -0,0 +1,2 @@ +dist +node_modules \ No newline at end of file diff --git a/examples/test-project/package.json b/examples/test-project/package.json index 6aff780..ff67765 100644 --- a/examples/test-project/package.json +++ b/examples/test-project/package.json @@ -1,31 +1,36 @@ { "name": "onchfs-tests", "version": "1.0.0", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "files": [ - "dist/**/*" - ], - "license": "MIT", - "scripts": { - "clean": "rm -rf dist", - "build": "npm run clean && tsc --declaration", - "dev": "nodemon --watch \"src/**/*.ts\" --exec \"ts-node --transpile-only\" src/index.ts", - "test": "echo \"Error: no test specified\" && exit 1" - }, "dependencies": { "@taquito/signer": "17.3.0", "@taquito/taquito": "17.3.0", - "@types/node-dir": "0.0.34", + "axios": "1.5.1", "node-dir": "0.1.17", - "onchfs": "workspace:*", - "tslib": "2.6.0" + "tslib": "2.6.0", + "viem": "2.19.4", + "onchfs": "0.0.0", + "@fxhash/config": "0.0.9", + "@fxhash/eth": "0.0.9" }, "devDependencies": { "@types/node": "18.7.13", + "@types/node-dir": "0.0.34", "nodemon": "2.0.13", "ts-node": "10.9.1", "tsc-alias": "1.8.5", - "typescript": "4.9.5" + "tsup": "7.2.0", + "typescript": "5.3.3" + }, + "files": [ + "dist/**/*" + ], + "license": "MIT", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsup", + "dev": "tsup --watch --on-success 'node dist/index.js'", + "prod": "tsc && node dist/index.js", + "start": "nodemon --watch \"src/**/*.ts\" --exec \"ts-node --transpile-only\" src/index.ts" } -} +} \ No newline at end of file diff --git a/examples/test-project/src/index.ts b/examples/test-project/src/index.ts index 5cfa46e..f079227 100644 --- a/examples/test-project/src/index.ts +++ b/examples/test-project/src/index.ts @@ -1,9 +1,42 @@ +import axios from "axios" import Onchfs, { Inscription } from "onchfs" -import fs from "fs" -import path from "path" +import fs from "node:fs" +import path from "node:path" import dir from "node-dir" import { TezosToolkit, ContractAbstraction } from "@taquito/taquito" import { InMemorySigner } from "@taquito/signer" +import { bytesToHex, createPublicClient, createWalletClient, http } from "viem" +import { privateKeyToAccount } from "viem/accounts" +import { goerli } from "viem/chains" +import { config } from "@fxhash/config" +import { ONCHFS_FILE_SYSTEM_ABI, ONCHFS_CONTENT_STORE } from "@fxhash/eth" + +async function sleep(time: number) { + return new Promise(resolve => setTimeout(resolve, time)) +} + +var walk = function (dir, done) { + var results = [] + fs.readdir(dir, function (err, list) { + if (err) return done(err) + var pending = list.length + if (!pending) return done(null, results) + list.forEach(function (file) { + file = path.resolve(dir, file) + fs.stat(file, function (err, stat) { + if (stat && stat.isDirectory()) { + walk(file, function (err, res) { + results = results.concat(res) + if (!--pending) done(null, results) + }) + } else { + results.push(file) + if (!--pending) done(null, results) + } + }) + }) + }) +} // test with any folder/file at the root of the tests folder const files = fs.readdirSync("./tests") @@ -14,7 +47,9 @@ tezos.setProvider({ "edskSA3GU5AdocLoJtsE2cvrxPAGPZc8RouYhCDaJJ95amCVipeUHiQXiDM37RnKZXed4bobudR8QHmA3cxHNgYDpS5ZcH5XJA" ), }) +const KT_CHUNKS = "KT1TGsvdj2m3JA3RmMGekRYHnK7Ygkje7Xbt" const KT_FILES = "KT1FA8AGGcJha6S6MqfBUiibwTaYhK8u7s9Q" +const TZKT = "https://api.ghostnet.tzkt.io/v1" const kts: Record> = {} const KT = async (add: string) => { @@ -23,14 +58,28 @@ const KT = async (add: string) => { } return kts[add] } +const ethWalletClient = createWalletClient({ + chain: goerli, + account: privateKeyToAccount( + "0xc6ce7bd0af8af4d72dec91fd78c44e5579aac9907a4e22b5424bd903fbd521fd" + ), + transport: http( + "https://eth-goerli.g.alchemy.com/v2/eGEGqTf0cBekTDv0Ghy1kXPKhdNSLmn7" + ), +}) + +const ethPublicClient = createPublicClient({ + transport: http( + "https://eth-goerli.g.alchemy.com/v2/eGEGqTf0cBekTDv0Ghy1kXPKhdNSLmn7" + ), + chain: goerli, +}) function uint8hex(uint8: Uint8Array): string { return [...uint8].map(x => x.toString(16).padStart(2, "0")).join("") } async function writeInscription(ins: Inscription) { - console.log(`Inscription of ${ins.type}`) - console.log(ins) if (ins.type === "chunk") { const kt = await KT(KT_FILES) const op = await kt.methods.write_chunk(uint8hex(ins.content)).send() @@ -57,11 +106,65 @@ async function writeInscription(ins: Inscription) { console.log("OK") } +async function writeInscriptionEth(ins: Inscription) { + if (ins.type === "chunk") { + console.log("-------------------") + console.log("writing chunk:") + console.log(uint8hex(ins.content)) + //@ts-ignore + const { request } = await ethPublicClient.simulateContract({ + account: ethWalletClient.account, + address: config.eth.contracts!.onchfs_content_store, + abi: ONCHFS_CONTENT_STORE, + functionName: "addContent", + args: [bytesToHex(ins.content)], + }) + //@ts-ignore + const transaction = await ethWalletClient.writeContract(request) + console.log(transaction) + } else if (ins.type === "file") { + //@ts-ignore + const { request } = await ethPublicClient.simulateContract({ + account: ethWalletClient.account, + address: config.eth.contracts!.onchfs_file_system, + abi: ONCHFS_FILE_SYSTEM_ABI, + functionName: "createFile", + args: [ + bytesToHex(ins.metadata), + ins.chunks.map(chunk => bytesToHex(chunk)), + ], + }) + //@ts-ignore + const hash = await ethWalletClient.writeContract(request) + console.log(hash) + } else { + //@ts-ignore + const { request } = await ethPublicClient.simulateContract({ + account: ethWalletClient.account, + address: config.eth.contracts!.onchfs_file_system, + abi: ONCHFS_FILE_SYSTEM_ABI, + functionName: "createDirectory", + args: Object.entries(ins.files).reduce( + (acc, [name, content]) => [ + [...acc[0], name], + [...acc[1], bytesToHex(content)], + ], + [[], []] + ), + }) + //@ts-ignore + const hash = await ethWalletClient.writeContract(request) + console.log(hash) + } +} + async function main() { for (const f of files) { // to avoid some files if we want if (f.startsWith("_")) continue + let inscrs + console.log( "---------------------------------------------------------------" ) @@ -76,18 +179,25 @@ async function main() { content: content, }, { - chunkSize: 10, + fileHashingStrategy: "cheap", } ) - console.log(inode) - const inscrs = Onchfs.inscriptions.prepare(inode) + inscrs = Onchfs.inscriptions.prepare(inode) } // is durectory else { + console.log("is dir !!") + console.log(root) + walk(root, (err, res) => { + console.log("doooooooooone") + console.log({ res }) + }) + console.log(files) dir.files(root, async (err, files) => { + console.log("file found") if (err) throw err // for each file, get the content - const inode = await Onchfs.files.prepare( + const inode = Onchfs.files.prepare( files.map(pt => { const pts = pt.split("/").slice(2).join("/") return { @@ -97,14 +207,30 @@ async function main() { }), { chunkSize: 2048, + fileHashingStrategy: "cheap", } ) - const inscrs = Onchfs.inscriptions.prepare(inode) - console.log(inscrs) - for (const ins of inscrs) { - await writeInscription(ins) - } + inscrs = Onchfs.inscriptions.prepare(inode) + // const inscrs = await Onchfs.inscriptions.prepare(inode, { + // async inodeExists(cid) { + // const res = await axios.get( + // `${TZKT}/contracts/${KT_FILES}/bigmaps/inodes/keys/${cid}` + // ) + // return res.status === 200 + // }, + // async chunkExists(cid) { + // const res = await axios.get(`${TZKT}/bigmaps/354463/keys/${cid}`) + // return res.status === 200 + // }, + // }) }) + console.log("end read") + } + console.log("inscruptions") + console.log(inscrs) + for (const ins of inscrs) { + console.log("yooo") + await writeInscriptionEth(ins) } } } diff --git a/examples/test-project/tests/EM/index.html b/examples/test-project/tests/EM/index.html index e7922f9..1f571ee 100644 --- a/examples/test-project/tests/EM/index.html +++ b/examples/test-project/tests/EM/index.html @@ -1,47 +1,59 @@ - + Ethereal Microcosm - ciphrd - + - + - \ No newline at end of file + diff --git a/examples/test-project/tests/EM/style.css b/examples/test-project/tests/EM/style.css index 117f8c7..2cef9d1 100644 --- a/examples/test-project/tests/EM/style.css +++ b/examples/test-project/tests/EM/style.css @@ -13,4 +13,4 @@ body { canvas { max-height: min(100vh, 100vw); max-width: min(100vh, 100vw); -} \ No newline at end of file +} diff --git a/examples/test-project/tests/_EM/index.html b/examples/test-project/tests/_EM/index.html index e7922f9..1f571ee 100644 --- a/examples/test-project/tests/_EM/index.html +++ b/examples/test-project/tests/_EM/index.html @@ -1,47 +1,59 @@ - + Ethereal Microcosm - ciphrd - + - + - \ No newline at end of file + diff --git a/examples/test-project/tests/_EM/style.css b/examples/test-project/tests/_EM/style.css index 117f8c7..2cef9d1 100644 --- a/examples/test-project/tests/_EM/style.css +++ b/examples/test-project/tests/_EM/style.css @@ -13,4 +13,4 @@ body { canvas { max-height: min(100vh, 100vw); max-width: min(100vh, 100vw); -} \ No newline at end of file +} diff --git a/examples/test-project/tests/_boilerplate/index.html b/examples/test-project/tests/_boilerplate/index.html index 8bd9697..cf8e481 100644 --- a/examples/test-project/tests/_boilerplate/index.html +++ b/examples/test-project/tests/_boilerplate/index.html @@ -1,27 +1,43 @@ - + FXHASH project - + - + - + diff --git a/examples/test-project/tests/_boilerplate/scripts/index.js b/examples/test-project/tests/_boilerplate/scripts/index.js index 38be1c2..066af91 100644 --- a/examples/test-project/tests/_boilerplate/scripts/index.js +++ b/examples/test-project/tests/_boilerplate/scripts/index.js @@ -1,8 +1,8 @@ console.log(fxhash) console.log(fxrand()) -const sp = new URLSearchParams(window.location.search); -console.log(sp); +const sp = new URLSearchParams(window.location.search) +console.log(sp) // this is how to define parameters $fx.params([ @@ -24,7 +24,7 @@ $fx.params([ default: "pear", options: { options: ["apple", "orange", "pear"], - } + }, }, { id: "color_id", @@ -45,33 +45,33 @@ $fx.params([ default: "hello", options: { minLength: 1, - maxLength: 5 - } + maxLength: 5, + }, }, -]); +]) // this is how features can be defined $fx.features({ "A random feature": Math.floor($fx.rand() * 10), "A random boolean": $fx.rand() > 0.5, - "A random string": ["A", "B", "C", "D"].at(Math.floor($fx.rand()*4)), + "A random string": ["A", "B", "C", "D"].at(Math.floor($fx.rand() * 4)), "Feature from params, its a number": $fx.getParam("number_id"), }) // log the parameters, for debugging purposes, artists won't have to do that console.log("Current param values:") -// Raw deserialize param values +// Raw deserialize param values console.log($fx.getRawParams()) // Added addtional transformation to the parameter for easier usage -// e.g. color.hex.rgba, color.obj.rgba.r, color.arr.rgb[0] +// e.g. color.hex.rgba, color.obj.rgba.r, color.arr.rgb[0] console.log($fx.getParams()) // how to read a single raw parameter console.log("Single raw value:") -console.log($fx.getRawParam("color_id")); +console.log($fx.getRawParam("color_id")) // how to read a single transformed parameter console.log("Single transformed value:") -console.log($fx.getParam("color_id")); +console.log($fx.getParam("color_id")) // update the document based on the parameters document.body.style.background = $fx.getParam("color_id").hex.rgba diff --git a/examples/test-project/tests/heelo.js b/examples/test-project/tests/_heelo.js similarity index 100% rename from examples/test-project/tests/heelo.js rename to examples/test-project/tests/_heelo.js diff --git a/examples/test-project/tests/hello b/examples/test-project/tests/_hello similarity index 100% rename from examples/test-project/tests/hello rename to examples/test-project/tests/_hello diff --git a/examples/test-project/tsconfig.json b/examples/test-project/tsconfig.json index 670b1a1..59a31b6 100644 --- a/examples/test-project/tsconfig.json +++ b/examples/test-project/tsconfig.json @@ -1,25 +1,16 @@ { "compilerOptions": { - "target": "ES2022", - "module": "Node16", - "lib": ["ES2022"], - "moduleResolution": "Node16", - "rootDir": "src", - "baseUrl": "src", + "lib": ["es6"], + "target": "es6", + "module": "commonjs", + "moduleResolution": "node", "outDir": "dist", - "allowSyntheticDefaultImports": true, - "importHelpers": true, - "alwaysStrict": true, - "sourceMap": true, - "forceConsistentCasingInFileNames": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, - "noUnusedLocals": false, - "noUnusedParameters": true, - "noImplicitAny": false, - "noImplicitThis": false, - "strictNullChecks": false + "resolveJsonModule": true, + "emitDecoratorMetadata": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "sourceMap": true }, - "include": ["src/**/*", "__tests__/**/*"], - "exclude": ["src/static/**"] + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "**/*.spec.ts"] } diff --git a/examples/test-project/tsup.config.ts b/examples/test-project/tsup.config.ts new file mode 100644 index 0000000..b51d83e --- /dev/null +++ b/examples/test-project/tsup.config.ts @@ -0,0 +1,13 @@ +import { defineConfig, Options } from "tsup" + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + outDir: "dist", + format: ["cjs"], + splitting: true, + sourcemap: true, + clean: !options.watch, + dts: true, + bundle: true, + cjsInterop: true, +})) diff --git a/packages/onchfs-js/README.md b/packages/onchfs-js/README.md index 9208a80..461ba7b 100644 --- a/packages/onchfs-js/README.md +++ b/packages/onchfs-js/README.md @@ -1,270 +1,15 @@ -# Package API - -As we are aiming for long-term adoption of the ONCHFS, we are looking at building a cohesive programming API for the official packages. - -There are 3 different approaches: - -## Exporting nested objects for grouping similar APIs - -Rational: we can granuarly define how we nest the different properties of the API to guide users in their usage (hierarchy defines level of abstractions). - -```ts -import ONCHFS from "onchfs" - -// preparing a file -const file = ONCHFS.files.prepareFile(...) - -// preparing a directory -const dir = ONCHFS.files.prepareDirectory(...) - -// generate the inscriptions from a directory -const inscriptions = ONCHFS.files.generateInscriptions(...) - -// create a proxy-resolver -const resolver = ONCHFS.proxy.createResolver(...) - -// resolve an URI -const components = ONCHFS.uri.parse(...) -const components = ONCHFS.uri.resolve(...) - -// UNCOMMON OPERATIONS - -// chunk bytes -const chunks = ONCHFS.files.utils.chunkBytes(...) - -// encode/decode some metadata -const encoded = ONCHFS.files.utils.metadata.encode(...) -const decoded = ONCHFS.files.utils.metadata.decode(...) +# 🐌 onchfs js library +```sh +$ npm install onchfs ``` -Alternatively, we can use a similar approach and yet provide a way to import 1 level deep: - -```ts -// gives access to the same API as above -import * as ONCHFS from "onchfs" - -// possibility to import 1-deep nested APIs -import { files, uri, proxy } from "onchfs" - -// ex - -// preparing a file -const file = files.prepareFile(...) - -// preparing a directory -const dir = files.prepareDirectory(...) - -// generate the inscriptions from a directory -const inscriptions = files.generateInscriptions(...) - -// create a proxy-resolver -const resolver = proxy.createResolver(...) - -// resolve an URI -const components = uri.parse(...) -const components = uri.resolve(...) - -// UNCOMMON OPERATIONS - -// chunk bytes -const chunks = files.utils.chunkBytes(...) - -// encode/decode some metadata -const encoded = files.utils.metadata.encode(...) -const decoded = files.utils.metadata.decode(...) +```js +import onchfs from "onchfs" +onchfs.files.prepare(...) ``` -Pros: - -- we have full control over the API exposed -- the file structure doesn't have to reflect the API, usage logic does - -Cons: - -- uncommon operations are deeply nested, and we don't provide a way to import an atomic operation; the dot notation must be used to access "hidden" features -- on VSCode, with autocomplete, everything shows as a property: it's hard to differenciate regular static properties from methods; _maybe it can be solved with some import/export voodoo, didn't investigate_. -- need to bundle all the package even if we just want the URI resolution - -## Exposing everything at the top level - -I won't describe more this strategy as I think it's not great for the DX. While a manual can be used to understand the different functions, there's no way developers understand which functions they are supposed to used, resulting in a mess of an API. - -## Multi-package architecture - -We would divide ONCHFS into smaller packages which themselves give access to a subset of the features in a smaller scope. - -```ts -import { - prepareFile, - prepareDirectory, - generateInscriptions, - metadata, - utils -} from "@onchfs/files" -// alternatively -import * as ONCHFSFiles from "@onchfs/files" - -// preparing a file -const file = prepareFile(...) - -// preparing a directory -const dir = prepareDirectory(...) - -// generate the inscriptions from a directory -const inscriptions = generateInscriptions(...) - -// encode/decode some metadata -const encoded = metadata.encode(...) -const decoded = metadata.decode(...) - - -import { createProxyResolver } from "@onchfs/proxy" +[📑 Extensive documentation](https://onchfs.com/docs/libraries/onchfs-js/overview) -// create a proxy resolver -const resolver = createProxyResolver(...) - - -import { parseURI } from "@onchfs/uri" - -const components = parseURI(...) - -``` - -Pros - -- clean API, it's easier to access the components we need in the app -- type friendly; functions are functions (and not object properties) -- bundle-optimized: consumers can only ship what they need for their app - -Cons - -- DX a bit tedious sometimes when building a fullstack single app (multiple onchfs packages have to be imported) -- harder to maintain for us: need to engineer finer solution for the deployment of the various modules (_can be mitigated with a strong strategy_) - -More ideas ? - -# TODOs - -## Improving the API of the onchfs package - -Consider various use-cases, readility, conciseness, etc.. - -Maybe divide into 2 APIs, accessible through a single core API ? If needed - -- files -- resolver -- - -## Publish strategy - -- from monorepo -- into different packages - -Improvements to the URI - -- Before the CID, any number of / is accepted, however the URI should be normalized so that there are no / before the CID - -# API Improvements - -```ts - -/** - * OUTPUT MODE - * Possibility to instanciate onchfs object instead of using the main one to - * get access to some top-level configuration, such as the output data type of - * the most common operations. - * - * * This is optional - * - * * tbd if good idea, not sure; maybe we just use hex everywhere as backend on - * node can easily work with it, and front-ends won't really manipulate bytes - * (if an application requires to do so they can use onchfs.utils) - */ -const onchfs = new Onchfs({ - outputsEncoding: "uint8array", // other: "hex" -}) - -/** - * PREPARING FILES - * Polymorphic API for preparing files & directories, makes it more - * straighforward and clear. - */ - -// preparing file (uint8array, string) -const file = onchfs.files.prepare(bytes, filename) - -// preparing a directory -const directory = onchfs.files.prepare([ - { path: "index.html", content: bytes0 }, - { path: "style.css", content: bytes1 }, - { path: "lib/main.js", content: bytes3 }, - { path: "lib/processing.min.js", content: bytes4 }, -]) - -/** - * Generating/optimizing inscriptions - */ - -// in any case the output of the prepare command can be fed into the -// inscriptions function -// this will create an optimised list of inscriptions -const inscriptions = await onchfs.inscriptions.prepare(file, { - // if an inode with such CID is found the optimizer will remove the relevant - // inscriptions. - getInode: async (cid) => { - return await blockchainNode.getInode(cid) - } -}) - -/** - * Working with metadata - */ -const encoded = onchfs.metadata.encode(...) -const decoded = onchfs.metadata.decode(...) - - -/** - * Writing a proxy - */ - -const resolver = onchfs.resolver.create( - // a list of resolver, order matters as if an URI without an authority has to - // be resolved, each network will be tested until the resource is found on one - [ - { - blockchain: "tezos:mainnet", - rpcs: ["https://rpc1.fxhash.xyz", "..."] - }, - { - blockchain: "tezos:ghostnet", - rpcs: ["https://rpc1.fxhash-dev.xyz", "..."], - // optional, the blockchain default one will be used by default - contract: "KT..." - }, - ] -) - -app.use(async (req, res, next) => { - const response = await resolver.resolve(req.path) - // ... -}) - -// also possible to create a custom resolver with low-level primitives -const resolver = onchfs.resolver.custom({ - getInode: async (cid, path) => { - // handle - }, - getFile: async (cid) => { - // handle - } -}) - -/** - * URI - */ - -const components = onchfs.uri.parse("onchfs://...") - -``` +_This readme was automatically generated by a lazy human_ diff --git a/packages/onchfs-js/package.json b/packages/onchfs-js/package.json index 804f043..722e2da 100644 --- a/packages/onchfs-js/package.json +++ b/packages/onchfs-js/package.json @@ -1,32 +1,14 @@ { "name": "onchfs", - "version": "1.0.0", - "exports": { - ".": { - "require": "./dist/index.js", - "import": "./dist/index.mjs", - "types": "./dist/index.d.ts" - } - }, - "main": "./dist/index.mjs", - "browser": "./dist/index.js", - "types": "./dist/index.d.ts", - "files": [ - "dist" - ], - "license": "MIT", - "scripts": { - "build": "tsup", - "dev": "tsup --watch", - "test": "jest" - }, + "version": "0.0.0", "dependencies": { "@taquito/taquito": "17.3.1", "file-type": "18.5.0", "hpack.js": "2.1.6", "js-sha3": "0.9.1", "mime-types": "2.1.35", - "pako": "2.1.0" + "pako": "2.1.0", + "viem": "2.19.4" }, "devDependencies": { "@babel/core": "7.22.19", @@ -42,7 +24,28 @@ "ts-jest": "29.1.1", "tsc-alias": "1.8.5", "tslib": "2.6.0", - "tsup": "6.6.0", - "typescript": "4.9.5" + "tsup": "8.0.1", + "typescript": "5.3.3", + "@fxhash/tsconfig": "1.0.0" + }, + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "require": "./dist/index.cjs" + } + }, + "files": [ + "dist" + ], + "license": "MIT", + "main": "./dist/index.cjs", + "module": "./dist/index.js", + "type": "module", + "types": "./dist/index.d.ts", + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "test": "jest" } -} +} \ No newline at end of file diff --git a/packages/onchfs-js/src/cid/index.ts b/packages/onchfs-js/src/cid/index.ts new file mode 100644 index 0000000..92782b5 --- /dev/null +++ b/packages/onchfs-js/src/cid/index.ts @@ -0,0 +1,49 @@ +import { concatUint8Arrays, keccak } from "@/utils" +import { FileChunk, FileHashingStrategy } from ".." +import { INODE_BYTE_IDENTIFIER } from "@/config" + +/** + * Computes the CID of a file given its chunk parts, metadata, and the + * strategy which should be used for the computation. + * + * @param strategy The strategy with which a file CID is computed. Based on the + * blockchain, such strategy can be different for optimization purposes. + * @param chunks The chunks of the file, in the right Order_By + * @param metadata The metadata associated with the file, already compressed in + * the right byte format + * + * @returns CID of the gile given the strategy + */ +export function computeFileCid( + chunks: FileChunk[], + metadata: Uint8Array, + strategy: FileHashingStrategy +): Uint8Array { + if (strategy === "consistent") { + // compute the file unique identifier, following the onchfs specifications: + // keccak( 0x01 , keccak( content ), keccak( metadata ) ) + const wholeBytes = concatUint8Arrays(...chunks.map(ch => ch.bytes)) + const contentHash = keccak(wholeBytes) + const metadataHash = keccak(metadata) + return keccak( + concatUint8Arrays(INODE_BYTE_IDENTIFIER.FILE, contentHash, metadataHash) + ) + } else if (strategy === "cheap") { + // compute the file unique identifier, following the onchfs specifications: + // keccak( 0x01 , keccak( checksums ), keccak( metadata ) ) + const chunksChecksumsHashed = keccak( + concatUint8Arrays(...chunks.map(chunk => chunk.hash)) + ) + const metadataHash = keccak(metadata) + + return keccak( + concatUint8Arrays( + INODE_BYTE_IDENTIFIER.FILE, + chunksChecksumsHashed, + metadataHash + ) + ) + } else { + throw new Error("Cannot compute file CID: strategy is missing") + } +} diff --git a/packages/onchfs-js/src/config.ts b/packages/onchfs-js/src/config.ts index e42b495..7be6847 100644 --- a/packages/onchfs-js/src/config.ts +++ b/packages/onchfs-js/src/config.ts @@ -13,11 +13,34 @@ export const INODE_BYTE_IDENTIFIER = { // improve storage being shared as much as possible depending on the use cases export const DEFAULT_CHUNK_SIZE = 16384 -// TODO: insert true values here. +export const CHAIN_IDS = { + tezos: { + mainnet: "NetXdQprcVkpaWU", + ghostnet: "NetXnHfVqm9iesp", + }, + eip155: { + mainnet: "1", + goerli: "5", + sepolia: "11155111", + baseMainnet: "8453", + baseSepolia: "84532", + }, +} as const + // A naive map of the "official" onchfs Smart Contracts. export const DEFAULT_CONTRACTS: Record = { - "tezos:mainnet": "KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC", - "tezos:ghostnet": "KT1FA8AGGcJha6S6MqfBUiibwTaYhK8u7s9Q", - "ethereum:1": "b0e58801d1b4d69179b7bc23fe54a37cee999b09", - "ethereum:5": "fcfdfa971803e1cc201f80d8e74de71fddea6551", + // tezos mainnet + "tezos:NetXdQprcVkpaWU": "KT1Ae7dT1gsLw2tRnUMXSCmEyF74KVkM6LUo", + // tezos ghostnet + "tezos:NetXnHfVqm9iesp": "KT1FA8AGGcJha6S6MqfBUiibwTaYhK8u7s9Q", + // eth mainnet + "eip155:1": "0x9e0f2864c6f125bbf599df6ca6e6c3774c5b2e04", + // eth goerli + "eip155:5": "0xc3f5ef1a0256b9ceb1452650db72344809bb3a85", + // eth sepolia + "eip155:11155111": "0x4f555d39e89f6d768f75831d610b3940fa94c6b1", + // base mainnet + "eip155:8453": "0x2983008f292a43f208bba0275afd7e9b3d39af3b", + // base sepolia + "eip155:84532": "0x3fb48e03291b2490f939c961a1ad088437129f71", } diff --git a/packages/onchfs-js/src/files/directory.ts b/packages/onchfs-js/src/files/directory.ts index ae8fbfd..6b7b4c0 100644 --- a/packages/onchfs-js/src/files/directory.ts +++ b/packages/onchfs-js/src/files/directory.ts @@ -4,6 +4,7 @@ import { DirectoryInode, IFile, INode, + OnchfsPrepareOptions, PrepareDirectoryDir, PrepareDirectoryFile, PrepareDirectoryNode, @@ -147,7 +148,7 @@ export function buildDirectoryGraph( */ export function prepareDirectory( files: IFile[], - chunkSize: number = DEFAULT_CHUNK_SIZE + options: Required ): DirectoryInode { const [graph, leaves] = buildDirectoryGraph(files) @@ -165,7 +166,7 @@ export function prepareDirectory( path: node.name, content: node.content, }, - chunkSize + options ) } else if (node.type === "directory") { // compute the inode associated with the directory diff --git a/packages/onchfs-js/src/files/file.ts b/packages/onchfs-js/src/files/file.ts index bbc1594..bb2dbc5 100644 --- a/packages/onchfs-js/src/files/file.ts +++ b/packages/onchfs-js/src/files/file.ts @@ -1,12 +1,33 @@ import { gzip } from "pako" -import { DEFAULT_CHUNK_SIZE, INODE_BYTE_IDENTIFIER } from "@/config" import { lookup as lookupMime } from "mime-types" import { chunkBytes } from "./chunks" -import { concatUint8Arrays, keccak } from "@/utils" import { FileMetadataEntries } from "@/types/metadata" import { encodeMetadata } from "@/metadata/encode" -import { FileInode, IFile } from "@/types/files" -// import { fileTypeFromBuffer } from "file-type" +import { FileInode, IFile, OnchfsPrepareOptions } from "@/types/files" +import { computeFileCid } from "@/cid" + +const MIME_LOOKUP = { + vert: "text/plain", +} + +/** + * Resolves the MIME type for a given filename. + * @param {string} filename - The name of the file. + * @returns {string|null} The determined MIME type or null if not found. + */ +function resolveMimeType(filename): string | null { + let mime = lookupMime(filename) + if (!mime) { + // fallback to extension lookup + const extension = filename.split(".").pop() + if (extension && MIME_LOOKUP[extension]) { + return MIME_LOOKUP[extension] + } + // return null if no MIME type is found + return null + } + return mime +} /** * Computes all the necessary data for the inscription of the file on-chain. @@ -25,19 +46,15 @@ import { FileInode, IFile } from "@/types/files" */ export function prepareFile( file: IFile, - chunkSize: number = DEFAULT_CHUNK_SIZE + options: Required ): FileInode { const { path: name, content } = file let metadata: FileMetadataEntries = {} let insertionBytes = content // we use file extension to get mime type - let mime = lookupMime(name) - // if no mime type can be mapped from filename, use magic number + const mime = resolveMimeType(name) + if (!mime) { - // const magicMime = await fileTypeFromBuffer(content) - // if (magicMime) { - // metadata["Content-Type"] = magicMime.mime - // } // if still no mime, we simply do not set the Content-Type in the metadata, // and let the browser handle it. // We could set it to "application/octet-stream" as RFC2046 states, however @@ -54,16 +71,14 @@ export function prepareFile( metadata["Content-Encoding"] = "gzip" } - // chunk the file - const chunks = chunkBytes(insertionBytes, chunkSize) - // encode the metadata + // chunk the file, encode its metadata and compute its CID based on provided + // hashing strategy + const chunks = chunkBytes(insertionBytes, options.chunkSize) const metadataEncoded = encodeMetadata(metadata) - // compute the file unique identifier, following the onchfs specifications: - // keccak( 0x01 , keccak( content ), keccak( metadata ) ) - const contentHash = keccak(insertionBytes) - const metadataHash = keccak(metadataEncoded) - const cid = keccak( - concatUint8Arrays(INODE_BYTE_IDENTIFIER.FILE, contentHash, metadataHash) + const cid = computeFileCid( + chunks, + metadataEncoded, + options.fileHashingStrategy ) return { @@ -71,5 +86,8 @@ export function prepareFile( cid, chunks, metadata: metadataEncoded, + source: { + content: file.content, + }, } } diff --git a/packages/onchfs-js/src/files/index.ts b/packages/onchfs-js/src/files/index.ts index ef843b3..df9c241 100644 --- a/packages/onchfs-js/src/files/index.ts +++ b/packages/onchfs-js/src/files/index.ts @@ -5,6 +5,7 @@ import { encodeFilename, } from "./directory" +export { directoryUploadSummary as uploadSummary } from "./summary" export { prepare } from "./prepare" export const utils = { chunkBytes, diff --git a/packages/onchfs-js/src/files/prepare.ts b/packages/onchfs-js/src/files/prepare.ts index 6c30d96..8289e32 100644 --- a/packages/onchfs-js/src/files/prepare.ts +++ b/packages/onchfs-js/src/files/prepare.ts @@ -10,6 +10,7 @@ import { prepareFile } from "./file" const defaultPrepareOptions: Required = { chunkSize: DEFAULT_CHUNK_SIZE, + fileHashingStrategy: "consistent", } /** @@ -93,8 +94,8 @@ export function prepare( ...(options || {}), } if (Array.isArray(files)) { - return prepareDirectory(files, _options.chunkSize) + return prepareDirectory(files, _options) } else { - return prepareFile(files, _options.chunkSize) + return prepareFile(files, _options) } } diff --git a/packages/onchfs-js/src/files/summary.ts b/packages/onchfs-js/src/files/summary.ts new file mode 100644 index 0000000..4f3ff50 --- /dev/null +++ b/packages/onchfs-js/src/files/summary.ts @@ -0,0 +1,134 @@ +import { + DirectoryInode, + FileChunk, + FileInode, + FileUploadProgress, + Inscription, + UploadProgress, + UploadSummary, +} from ".." + +/** + * Given a directory inode, computes an upload summary based on the inscriptions + * which are missing for the directory to be fully uploaded. + * @param node The Directory node for which the summary needs to be computed. + * @param missingInscriptions The inscriptions which need to be inscribed for + * the directory to be fully uploaded. + */ +export function directoryUploadSummary( + node: DirectoryInode, + missingInscriptions: Inscription[] +): UploadSummary { + const files: FileUploadProgress[] = [] + const extra: UploadProgress = { + total: 0, + left: 0, + } + const global: UploadProgress = { + total: 0, + left: 0, + } + + // recursively parse a directory (& sub-directories) while populating the + // upload summary as files are being traversed + function parse(node: DirectoryInode, path = "") { + for (const name in node.files) { + const N = node.files[name] + if (N.type === "file") { + const progress = fileUploadProgress(N, missingInscriptions) + files.push({ + path: path + name, + inode: N, + progress, + }) + global.total += progress.total + global.left += progress.left + } else if (N.type === "directory") { + parse(N, path + name + "/") + } else { + throw new Error("unsupported node type, this should never be reach!") + } + } + + if ( + missingInscriptions.find( + ins => ins.type === "directory" && ins.cid === node.cid + ) + ) { + //todo: this suppose node.files.name < 32 bytes, but it could be improved + // by computing the accurate size here + const dirSize = Object.keys(node.files).length * 32 * 2 + 32 + extra.total += dirSize + extra.left += dirSize + global.total += dirSize + global.left += dirSize + } + } + parse(node) + + return { + global, + files, + extraPayload: extra, + } +} + +/** + * Given a file node and some missing inscriptions, computes the progress of + * the upload on Onchfs. + * @param node File node + * @param missingInscriptions A list of inscriptions which are missing for the + * file to be fully uploaded. Inscriptions are not strictly constrained to being + * part of the file, they can be a bigger set of inscriptions wherein the file + * inscriptions are contained. + * @returns Progress details about the file upload. + */ +export function fileUploadProgress( + node: FileInode, + missingInscriptions: Inscription[] +): UploadProgress { + // chunk bytes, 2x the chunk hashes (for storing chunks + for referencing + // chunk in the file), the file hash + const total = + node.chunks.reduce((acc, chunk) => acc + chunk.bytes.length, 0) + + // add the size of storing 2x chunk hashes + node.chunks.length * 2 * 32 + + // the hash of the file + 32 + + // if the file exists in the inscriptions, we can just return full upload + if ( + !missingInscriptions.find( + ins => ins.type === "file" && ins.cid === node.cid + ) + ) { + return { + total, + left: 0, + } + } + + // find the chunks which are missing for the inscription + const missingChunks: FileChunk[] = [] + for (const chunk of node.chunks) { + if ( + missingInscriptions.find( + ins => ins.type === "chunk" && ins.hash === chunk.hash + ) + ) { + missingChunks.push(chunk) + } + } + + return { + total, + left: + missingChunks.reduce((acc, chunk) => acc + chunk.bytes.length, 0) + + // for each chunk, we need to store chunk hash + missingChunks.length * 32 + + // file node must reference all the chunks + node.chunks.length * 32 + + // file cid pointer + 32, + } +} diff --git a/packages/onchfs-js/src/inscriptions/estimate.ts b/packages/onchfs-js/src/inscriptions/estimate.ts index 5762a0d..2c1ed21 100644 --- a/packages/onchfs-js/src/inscriptions/estimate.ts +++ b/packages/onchfs-js/src/inscriptions/estimate.ts @@ -1,4 +1,8 @@ -import { Inscription } from "@/types/inscriptions" +import { + Inscription, + InscriptionDirectory, + InscriptionFile, +} from "@/types/inscriptions" /** * Compute the number of bytes an inscription will take on the storage. diff --git a/packages/onchfs-js/src/inscriptions/prepare.ts b/packages/onchfs-js/src/inscriptions/prepare.ts index 1ccb026..1de002c 100644 --- a/packages/onchfs-js/src/inscriptions/prepare.ts +++ b/packages/onchfs-js/src/inscriptions/prepare.ts @@ -1,5 +1,11 @@ -import { INode } from "@/types/files" +import { FileChunk, INode } from "@/types/files" import { Inscription } from "@/types/inscriptions" +import { u8hex } from "@/utils/uint8" + +interface InsPrepResolver { + inodeExists: (cid: string) => Promise + chunkExists: (cid: string) => Promise +} /** * Traverse the inverted tree starting by the root, creating inscriptions as @@ -36,38 +42,157 @@ import { Inscription } from "@/types/inscriptions" * * @returns A list of inscription objects ready to be turned into operations */ -export function prepareInscriptions(root: INode): Inscription[] { +export function prepareInscriptions(root: INode): Inscription[] + +/** + * Traverse the inverted tree starting by the root, creating inscriptions as + * it's being traversed. Before inscriptions are added to the final list, they + * are checked against the provided resolution function to see if they are + * already inscribed. If so, there is not need to inscribe them. At the end of + * the flow the inscriptions will be reversed to ensure they are written to the + * store in the right order (as the onchfs will reject inodes pointing to + * inexisting resources; let it be file chunks or directory files). + * + * @example + * + * ```ts + * // first prepare the file(s) + * const F = onchfs.files.prepare({ + * content: [], + * path: "index.html" + * }) + * // ot + * const F = onchfs.files.prepare([ + * { + * content: [], + * path: "index.htmml" + * }, + * { + * content: [], + * path: "lib/main.js" + * } + * ]) + * + * // the prepare the inscriptions + * const inscriptions = await onchfs.inscriptions.prepare(F, { + * async inodeExists(cid) { + * // just an example, implementation will vary depending use-cases + * // this should return true|false depending on the existance of the inode + * return indexer.onchfs.hasInode(cid) + * }, + * async chunkExists(cid) { + * return indexer.onchfs.chunkExists(cid) + * } + * }) + * ``` + * + * @param root The root of the tree, can be either the root directory or a file + * + * @returns A list of inscription objects ready to be turned into operations + */ +export function prepareInscriptions( + root: INode, + resolver: InsPrepResolver +): Promise + +export function prepareInscriptions( + root: INode, + resolver?: InsPrepResolver +): Inscription[] | Promise { const inscriptions: Inscription[] = [] - const traverse = (node: INode) => { + + // no async resolution of the inodes/chunk; assume insert all + if (typeof resolver === "undefined") { + const traverse = (node: INode) => { + if (node.type === "directory") { + inscriptions.push(createInscription(node)) + // recursively traverse each inode of the directory + for (const name in node.files) { + traverse(node.files[name]) + } + } else if (node.type === "file") { + // create the file inscription first as it will be reversed in the end, + // so the chunk inscriptions will appear first + inscriptions.push(createInscription(node)) + for (const chunk of node.chunks) { + inscriptions.push(createInscription(chunk)) + } + } + } + traverse(root) + return inscriptions.reverse() + } + // resolver is defined, return a promise and check for inscriptions existing + else { + return new Promise(async resolve => { + const traverse = async (node: INode) => { + // check if inode exists, if so we are done with this segment of graph + if (node.type === "directory" || node.type === "file") { + let found = false + try { + found = await resolver.inodeExists(u8hex(node.cid)) + } catch (e) {} + if (found) return + } + + if (node.type === "directory") { + inscriptions.push(createInscription(node)) + // recursively traverse each inode of the directory + for (const name in node.files) { + await traverse(node.files[name]) + } + } else if (node.type === "file") { + inscriptions.push(createInscription(node)) + + // check all the chunks at once, then filter those which need insert + const results = await Promise.allSettled( + node.chunks.map(chunk => resolver.chunkExists(u8hex(chunk.hash))) + ) + const chunksToInsert = node.chunks.filter((_, i) => { + const res = results[i] + return res.status === "fulfilled" && !res.value + }) + + for (const chunk of chunksToInsert) { + inscriptions.push(createInscription(chunk)) + } + } + } + await traverse(root) + resolve(inscriptions.reverse()) + }) + } +} + +/** + * Creates an inscription matching with the given node/chunk. + * @param node The node, either an INode or a file chunk + * @returns The inscription corresponding to the node + */ +function createInscription(node: INode | FileChunk): Inscription { + if ("type" in node) { if (node.type === "directory") { - inscriptions.push({ + return { type: "directory", files: Object.fromEntries( Object.keys(node.files).map(name => [name, node.files[name].cid]) ), cid: node.cid, - }) - // recursively traverse each inode of the directory - for (const name in node.files) { - traverse(node.files[name]) } } else if (node.type === "file") { - // create the file inscription first as it will be reversed in the end, - // so the chunk inscriptions will appear first - inscriptions.push({ + return { type: "file", chunks: node.chunks.map(chk => chk.hash), metadata: node.metadata, cid: node.cid, - }) - for (const chunk of node.chunks) { - inscriptions.push({ - type: "chunk", - content: chunk.bytes, - }) } } + } else { + return { + type: "chunk", + content: node.bytes, + hash: node.hash, + } } - traverse(root) - return inscriptions.reverse() + throw new Error("Unknown node type") } diff --git a/packages/onchfs-js/src/resolver/errors.ts b/packages/onchfs-js/src/resolver/errors.ts index 6ca4f67..e94e4b0 100644 --- a/packages/onchfs-js/src/resolver/errors.ts +++ b/packages/onchfs-js/src/resolver/errors.ts @@ -4,7 +4,10 @@ import { ProxyResolutionStatusErrors } from "@/types/resolver" * Error thrown during the resolution of a relative URI by the proxy. */ export class OnchfsProxyResolutionError extends Error { - constructor(message: string, public status: ProxyResolutionStatusErrors) { + constructor( + message: string, + public status: ProxyResolutionStatusErrors + ) { super(message) this.name = "OnchfsProxyResolutionError" } diff --git a/packages/onchfs-js/src/resolver/proxy.ts b/packages/onchfs-js/src/resolver/proxy.ts index 7c8fbfe..5696239 100644 --- a/packages/onchfs-js/src/resolver/proxy.ts +++ b/packages/onchfs-js/src/resolver/proxy.ts @@ -15,19 +15,30 @@ import { ProxyResolutionStatusRedirect, ProxyResolutionStatusSuccess, Resolver, + chainAliases, } from "@/types/resolver" -import { URIAuthority, URISchemaSpecificParts } from "@/types/uri" +import { + BlockchainNetwork, + URIAuthority, + URISchemaSpecificParts, + blockchainNetworks, +} from "@/types/uri" import { parseAuthority, parseSchema, parseSchemaSpecificPart, } from "@/uri/parse" -import { - TezosToolkit, - ContractAbstraction, - ContractProvider, -} from "@taquito/taquito" import { DEFAULT_CONTRACTS } from "@/config" +import { + createPublicClient, + encodeFunctionData, + fallback, + hexToBytes, + http, +} from "viem" +import { ONCHFS_FILE_SYSTEM_ABI } from "@/utils/abi" +import { EthInode, EthInodeType } from "@/types/eth" +import { TezosService } from "@/services/tezos.service" const ResolutionErrors: Record = { [ProxyResolutionStatusErrors.BAD_REQUEST]: "Bad Request", @@ -82,27 +93,34 @@ const ResolutionErrors: Record = { export function createProxyResolver(controllers: BlockchainResolverCtrl[]) { // add default cain contract if missing const blockchainResolvers: BlockchainResolver[] = controllers.map(h => { - const blockchain = h.blockchain.split(":")[0] as "tezos" | "ethereum" + // find the base chain id by resolving aliases if needed + let baseChainId: BlockchainNetwork + if ((blockchainNetworks as readonly string[]).includes(h.blockchain)) { + baseChainId = h.blockchain as BlockchainNetwork + } else { + for (const [base, aliases] of Object.entries(chainAliases)) { + if ((aliases as readonly string[]).includes(h.blockchain)) { + baseChainId = base as BlockchainNetwork + } + } + if (!baseChainId) { + throw new Error( + `The given blockchain identifier "${h.blockchain}" is unknown, it cannot be resvoled by this resolver` + ) + } + } + + const blockchain = baseChainId.split(":")[0] as "tezos" | "eip155" switch (blockchain) { case "tezos": { - const Tezos = new TezosToolkit(h.rpcs[0]) - - const KTs: Record> = {} - - async function KT(address: string) { - if (KTs[address]) { - return KTs[address] - } - KTs[address] = await Tezos.contract.at(address) - return KTs[address] - } + const tezos = new TezosService(h.rpcs) return { blockchain: h.blockchain, resolverWithContract: (address?: string) => { // default blockchain address if not specified - address = address || DEFAULT_CONTRACTS[h.blockchain] + address = address || DEFAULT_CONTRACTS[baseChainId] if (!address) { throw new Error( `no contract address was found; neither can it be inferred from the context (${h.blockchain}) nor has it been provided during resolution.` @@ -110,15 +128,16 @@ export function createProxyResolver(controllers: BlockchainResolverCtrl[]) { } return { getInodeAtPath: async (cid, path) => { - const kt = await KT(address) - const out = await kt.contractViews - .get_inode_at({ - cid, - path, - }) - .executeView({ - viewCaller: "KT1Uktxf9dgGga6DRRNbGEDepxFGTwNtTg4y", - }) + const out = await tezos.call(address, kt => + kt.contractViews + .get_inode_at({ + cid, + path, + }) + .executeView({ + viewCaller: address, + }) + ) // if the contract has answered with a directory if (out.inode.directory) { @@ -140,18 +159,93 @@ export function createProxyResolver(controllers: BlockchainResolverCtrl[]) { } }, readFile: async cid => { - const kt = await KT(address) - const res = await kt.contractViews.read_file(cid).executeView({ - viewCaller: "KT1Uktxf9dgGga6DRRNbGEDepxFGTwNtTg4y", - }) + const res = await tezos.call(address, kt => + kt.contractViews.read_file(cid).executeView({ + viewCaller: address, + }) + ) return hexStringToBytes(res.content) }, } }, } } - case "ethereum": { - throw new Error("Implement eth resolver!") + case "eip155": { + const publicClient = createPublicClient({ + transport: fallback(h.rpcs.map(rpc => http(rpc))), + }) + return { + blockchain: h.blockchain, + resolverWithContract: (address?: string) => { + // default blockchain address if not specified + address = address || DEFAULT_CONTRACTS[baseChainId] + if (!address) { + throw new Error( + `no contract address was found; neither can it be inferred from the context (${h.blockchain}) nor has it been provided during resolution.` + ) + } + + return { + getInodeAtPath: async (cid: string, path: string[]) => { + try { + //@ts-ignore + const out: [`0x${string}`, EthInode] = await ( + publicClient as any + ).readContract({ + address: address as `0x${string}`, + abi: ONCHFS_FILE_SYSTEM_ABI, + functionName: "getInodeAt", + args: [`0x${cid}`, path], + }) + if (out && (out as any)?.length === 2) { + const cid = out[0].replace("0x", "") + const inode = out[1] + + // if the contract has answered with a directory + if (inode.inodeType === EthInodeType.DIRECTORY) { + const dir = inode.directory + const files: Record = {} + for (let i = 0; i < dir.filenames.length; i++) { + files[dir.filenames[i]] = dir.fileChecksums[i].replace( + "0x", + "" + ) + } + return { + cid, + files, + } + } else { + const file = inode.file + // the contract has answered with a file + return { + cid, + chunkPointers: file.chunkChecksums.map(pt => + pt.replace("0x", "") + ), + metadata: file.metadata.replace("0x", ""), + } + } + } else { + throw new Error("wrogn response from contract") + } + } catch (err) { + return null + } + }, + readFile: async (cid: string) => { + //@ts-ignore + const hexBytesString = await publicClient.readContract({ + address: address as `0x${string}`, + abi: ONCHFS_FILE_SYSTEM_ABI, + functionName: "readFile", + args: [`0x${cid}`], + }) + return hexToBytes(hexBytesString as any) + }, + } + }, + } } } }) @@ -186,43 +280,47 @@ export function createProxyResolver(controllers: BlockchainResolverCtrl[]) { const resolvers = orderedResolversFromAuthority(authority) // try finding the resource on every resolver let res: InodeNativeFS | null = null - for (const resolver of resolvers) { - try { - res = await resolver - .resolverWithContract(authority?.contract) - .getInodeAtPath(cid, path) - break - } catch (err) { - continue - } - } - - if (res) return res - else + try { + res = await Promise.any( + resolvers.map(async resolver => { + const resp = await resolver + .resolverWithContract(authority?.contract) + .getInodeAtPath(cid, path) + if (resp) return resp + throw Error("file not found") + }) + ) + if (res) return res + throw null + } catch (err) { + console.log(err) throw new Error( "searched all available blockchains, resource not found." ) + } }, async readFile(cid, chunkPointers, authority) { const resolvers = orderedResolversFromAuthority(authority) // try finding the resource on every resolver let res: string | Uint8Array | null = null - for (const resolver of resolvers) { - try { - res = await resolver - .resolverWithContract(authority?.contract) - .readFile(cid, chunkPointers) - break - } catch (err) { - continue - } - } - - if (res) return res - else + try { + res = await Promise.any( + resolvers.map(async resolver => { + const resp = await resolver + .resolverWithContract(authority?.contract) + .readFile(cid, chunkPointers) + if (resp) return resp + throw Error("file not found") + }) + ) + if (res) return res + throw null + } catch (err) { + console.log(err) throw new Error( "searched all available blockchains, resource not found." ) + } }, }) } diff --git a/packages/onchfs-js/src/services/tezos.service.ts b/packages/onchfs-js/src/services/tezos.service.ts new file mode 100644 index 0000000..f132ed0 --- /dev/null +++ b/packages/onchfs-js/src/services/tezos.service.ts @@ -0,0 +1,66 @@ +import { + ContractAbstraction, + ContractProvider, + TezosToolkit, +} from "@taquito/taquito" + +function shuffle(array: T[]): T[] { + const out = [...array] + for (let i = out.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)) + ;[out[i], out[j]] = [out[j], out[i]] + } + return out +} + +export class TezosService { + private tezosToolkit: TezosToolkit + private rpcNodes: string[] + private contractsCache: Record< + string, + ContractAbstraction + > = {} + + constructor(rpcs: string[]) { + this.tezosToolkit = new TezosToolkit(rpcs[0]) + this.rpcNodes = rpcs + } + + async call( + address: string, + callback: (contract: ContractAbstraction) => Promise + ): Promise { + for (const rpc of shuffle(this.rpcNodes)) { + this.tezosToolkit.setProvider({ rpc }) + try { + const contract = await this.getContract(address) + return await callback(contract) + } catch (err) { + if (!this.canErrorBeCycled(err)) throw err + console.error(`RPC ${rpc} failed: ${err}, trying next...`) + } + } + throw new Error("all RPCs failed") + } + + async getContract( + address: string + ): Promise> { + if (!this.contractsCache[address]) { + this.contractsCache[address] = + await this.tezosToolkit.contract.at(address) + } + return this.contractsCache[address] + } + + // given an error, returns true if request can be cycled to another RPC node + private canErrorBeCycled(err: any): boolean { + return ( + err && + (err.name === "HttpRequestFailed" || + err.status === 500 || + err.status === 408 || + err.status === 429) + ) + } +} diff --git a/packages/onchfs-js/src/types/eth.ts b/packages/onchfs-js/src/types/eth.ts new file mode 100644 index 0000000..c01123a --- /dev/null +++ b/packages/onchfs-js/src/types/eth.ts @@ -0,0 +1,22 @@ +export enum EthInodeType { + DIRECTORY = 0, + FILE = 1, +} + +export type EthInodeFile = { + inodeType: EthInodeType.FILE + file: { + metadata: `0x${string}` + chunkChecksums: `0x${string}`[] + } +} + +export type EthInodeDirectory = { + inodeType: EthInodeType.DIRECTORY + directory: { + filenames: string[] + fileChecksums: `0x${string}`[] + } +} + +export type EthInode = EthInodeFile | EthInodeDirectory diff --git a/packages/onchfs-js/src/types/files.ts b/packages/onchfs-js/src/types/files.ts index 0fce4b6..1ceeb20 100644 --- a/packages/onchfs-js/src/types/files.ts +++ b/packages/onchfs-js/src/types/files.ts @@ -8,6 +8,9 @@ export type FileInode = { chunks: FileChunk[] cid: Uint8Array metadata: Uint8Array + source: { + content: Uint8Array + } } export type DirectoryInode = { @@ -50,6 +53,85 @@ export type PrepareDirectoryDir = { export type PrepareDirectoryNode = PrepareDirectoryFile | PrepareDirectoryDir +/** + * How the File cids are constructed. Some blockchains are expensive and as + * such having a consistent cid generation on the input bytes might be quite + * expensive, as such the file system supports 2 ways of hashing files: + * - consistent: hash the whole file content + * - cheap: hash the checksums of the file chunks + */ +export type FileHashingStrategy = "consistent" | "cheap" + export interface OnchfsPrepareOptions { + /** + * The size of the chunks. Leaving is as default will fallback to onchfs + * default chunk size, which is meant to be optimized for most purposes. + */ chunkSize?: number + + /** + * The strategy which will be used for computing file cids from their + * chunks. + */ + fileHashingStrategy?: FileHashingStrategy +} + +/** + * Describes the state of an upload, in terms of size. Such an object can be + * attached to any entity which can be stored in order to represent its storage + * state. + */ +export interface UploadProgress { + /** + * The total size required for storing the full object from scratch, in bytes. + */ + total: number + /** + * The number of bytes left. + */ + left: number +} + +/** + * Progress related to a file upload. + */ +export interface FileUploadProgress { + /** + * **Absolute path** of the file from the root of the object uploaded. + */ + path: string + + /** + * The file Inode which is associated to the given file. + */ + inode: FileInode + + /** + * Progress of the upload, includes the progress of the upload of the file + * chunks. + */ + progress: UploadProgress +} + +/** + * The summary of an upload of Inscriptions, defining the upload state of the + * various components of an upload on Onchfs + */ +export interface UploadSummary { + /** + * Global progress of the upload. It can be derived from the progress of the + * files & extra payload, but is given for convenience. + */ + global: UploadProgress + + /** + * A list of upload summary for all the files inside the directory, where + */ + files: FileUploadProgress[] + + /** + * Some extra payload, mainly used as a reference for non-visible objects + * such as directories which can have a significant print sometimes. + */ + extraPayload: UploadProgress } diff --git a/packages/onchfs-js/src/types/inscriptions.ts b/packages/onchfs-js/src/types/inscriptions.ts index cd7b2c7..fe0b650 100644 --- a/packages/onchfs-js/src/types/inscriptions.ts +++ b/packages/onchfs-js/src/types/inscriptions.ts @@ -1,13 +1,14 @@ export type InscriptionChunk = { type: "chunk" content: DataEncoding + hash: DataEncoding } export type InscriptionFile = { type: "file" metadata: DataEncoding chunks: DataEncoding[] - cid: Uint8Array + cid: DataEncoding } export type InscriptionDirectory = { @@ -15,7 +16,7 @@ export type InscriptionDirectory = { files: { [name: string]: DataEncoding } - cid: Uint8Array + cid: DataEncoding } export type Inscription = diff --git a/packages/onchfs-js/src/types/resolver.ts b/packages/onchfs-js/src/types/resolver.ts index 2c5198d..4c234b9 100644 --- a/packages/onchfs-js/src/types/resolver.ts +++ b/packages/onchfs-js/src/types/resolver.ts @@ -1,8 +1,26 @@ import { FileMetadataEntries } from "./metadata" import { BlockchainNetwork, URIAuthority } from "./uri" +/** + * For every base blockchain supported, also provide a list of aliases to make + * it easier to build apps using onchfs (otherwise for tezos for instance it + * could be hard to know which chain id is mainnet). + */ +export const chainAliases = { + "tezos:NetXdQprcVkpaWU": ["tezos:mainnet"] as const, + "tezos:NetXnHfVqm9iesp": ["tezos:ghostnet"] as const, + "eip155:1": ["ethereum:mainnet", "eth:mainnet"] as const, + "eip155:5": ["ethereum:goerli", "eth:goerli"] as const, + "eip155:11155111": ["ethereum:sepolia", "eth:sepolia"] as const, + "eip155:84532": ["ethereum:baseSepolia", "eth:baseSepolia"] as const, + "eip155:8453": ["ethereum:baseMainnet", "eth:baseMainnet"] as const, +} as const +export type ChainAliases = + | BlockchainNetwork + | (typeof chainAliases)[BlockchainNetwork][number] + export interface BlockchainResolverCtrl { - blockchain: BlockchainNetwork + blockchain: ChainAliases rpcs: string[] contract?: string } @@ -10,7 +28,7 @@ export interface BlockchainResolverCtrl { export type ResolverContractDecorator = (address?: string) => Resolver export interface BlockchainResolver { - blockchain: BlockchainNetwork + blockchain: ChainAliases resolverWithContract: ResolverContractDecorator } diff --git a/packages/onchfs-js/src/types/uri.ts b/packages/onchfs-js/src/types/uri.ts index ea27a82..f7d9eb9 100644 --- a/packages/onchfs-js/src/types/uri.ts +++ b/packages/onchfs-js/src/types/uri.ts @@ -1,19 +1,24 @@ +import { CHAIN_IDS } from "@/config" + /** * List of the blockchain supported officially. While the protocol can be * deployed anywhere, the URI resolution is more easily inferred from the * supported deployments. */ -export const blockchainNames = ["tezos", "ethereum"] as const +export const blockchainNames = ["tezos", "eip155"] as const export type BlockchainNames = (typeof blockchainNames)[number] /** * Each blockchain has a list of supported networks */ export const blockchainNetworks = [ - "tezos:mainnet", - "tezos:ghostnet", - "ethereum:1", - "ethereum:5", + `tezos:${CHAIN_IDS.tezos.mainnet}`, + `tezos:${CHAIN_IDS.tezos.ghostnet}`, + `eip155:${CHAIN_IDS.eip155.mainnet}`, + `eip155:${CHAIN_IDS.eip155.goerli}`, + `eip155:${CHAIN_IDS.eip155.sepolia}`, + `eip155:${CHAIN_IDS.eip155.baseMainnet}`, + `eip155:${CHAIN_IDS.eip155.baseSepolia}`, ] as const export type BlockchainNetwork = (typeof blockchainNetworks)[number] diff --git a/packages/onchfs-js/src/uri/parse.ts b/packages/onchfs-js/src/uri/parse.ts index 0222b24..a8a9aad 100644 --- a/packages/onchfs-js/src/uri/parse.ts +++ b/packages/onchfs-js/src/uri/parse.ts @@ -2,7 +2,7 @@ * Proper charsets tightly following the spec */ -import { DEFAULT_CONTRACTS } from "@/config" +import { CHAIN_IDS, DEFAULT_CONTRACTS } from "@/config" import { BlockchainNames, URIAuthority, @@ -120,7 +120,10 @@ export function parseSchema(uri: string): string { export function parseSchemaSpecificPart( uriPart: string ): URISchemaSpecificParts { - const authorityReg = `([${AUTHORITY_CHARSET}]*)\\/` + // CAIP-2 Blockchain ID Specification: https://chainagnostic.org/CAIPs/caip-2 + // CAIP-10: Account ID Specification: https://chainagnostic.org/CAIPs/caip-10 + const authorityReg = `([-a-z0-9]{3,8}(?::[-_a-zA-Z0-9]{1,32}(?::[-.%a-zA-Z0-9]{1,128})?)?)\\/` + const cidReg = `[${HEX_CHARSET}]{64}` const pathReg = `${SEG_CHARSET}*(?:\\/${SEG_CHARSET}*)*` const queryReg = `\\?(${QUERY_CHARSET}*)` @@ -154,26 +157,18 @@ export function parseSchemaSpecificPart( const blockchainAuthorityParsers: Record RegExp> = { tezos: () => new RegExp( - `^(?:(KT(?:1|2|3|4)[${B58_CHARSET}]{33})\\.)?(tezos|tez|xtz)(?::(ghostnet|mainnet))?$` + `^(tezos)(?::(?:(Net[${B58_CHARSET}]{12}))(?::(KT(?:1|2|3|4)[${B58_CHARSET}]{33}))?)?$` ), - ethereum: () => - new RegExp(`^(?:([${HEX_CHARSET}]{40})\\.)?(ethereum|eth)(?::([0-9]+))?$`), -} - -type BlockchainNameVariants = { - [K in BlockchainNames]: [K, ...string[]] -} -const blockchainNameVariants: BlockchainNameVariants = { - tezos: ["tezos", "tez", "xtz"], - ethereum: ["ethereum", "eth"], + eip155: () => + new RegExp(`^(eip155)(?::([0-9]{1,})(?::([${HEX_CHARSET}]{40}))?)?$`), } type BlockchainDefaultNetwork = { [K in BlockchainNames]: string } const blockchainDefaultNetwork: BlockchainDefaultNetwork = { - tezos: "mainnet", - ethereum: "1", + tezos: CHAIN_IDS.tezos.mainnet, + eip155: CHAIN_IDS.eip155.mainnet, } /** @@ -221,7 +216,7 @@ export function parseAuthority( // no result; move to next blockchain if (!res) continue // results are in slots [1;3] - assign to temp object being parsed - const [contract, blockchainName, blockchainId] = res.splice(1, 3) + const [blockchainName, blockchainId, contract] = res.splice(1, 3) contract && (tmp.contract = contract) blockchainName && (tmp.blockchainName = blockchainName) blockchainId && (tmp.blockchainId = blockchainId) @@ -236,13 +231,6 @@ export function parseAuthority( "the blockchain could not be inferred when parsing the URI, if the URI doesn't have an authority segment (onchfs:////...), a context should be provided based on where the URI was observed. The blockchain needs to be resolved either through the URI or using the context." ) } - // normalize blockchain name into its cleanest and most comprehensible form - for (const [name, values] of Object.entries(blockchainNameVariants)) { - if (values.includes(tmp.blockchainName)) { - tmp.blockchainName = name - break - } - } // if blockchain ID is missing, then assign the default blockchain ID // associated with the asset, which is mainnet diff --git a/packages/onchfs-js/src/utils/abi.ts b/packages/onchfs-js/src/utils/abi.ts new file mode 100644 index 0000000..7a53762 --- /dev/null +++ b/packages/onchfs-js/src/utils/abi.ts @@ -0,0 +1,232 @@ +export const ONCHFS_FILE_SYSTEM_ABI = [ + { + inputs: [ + { internalType: "address", name: "_contentStore", type: "address" }, + ], + stateMutability: "nonpayable", + type: "constructor", + }, + { inputs: [], name: "ChunkNotFound", type: "error" }, + { inputs: [], name: "DirectoryNotFound", type: "error" }, + { inputs: [], name: "FileNotFound", type: "error" }, + { inputs: [], name: "InodeNotFound", type: "error" }, + { inputs: [], name: "InvalidCharacter", type: "error" }, + { + inputs: [ + { internalType: "uint256", name: "_size", type: "uint256" }, + { internalType: "uint256", name: "_start", type: "uint256" }, + { internalType: "uint256", name: "_end", type: "uint256" }, + ], + name: "InvalidCodeAtRange", + type: "error", + }, + { inputs: [], name: "InvalidFileName", type: "error" }, + { inputs: [], name: "LengthMismatch", type: "error" }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "bytes32", + name: "_checksum", + type: "bytes32", + }, + { + indexed: false, + internalType: "string[]", + name: "_names", + type: "string[]", + }, + { + indexed: false, + internalType: "bytes32[]", + name: "_inodeChecksums", + type: "bytes32[]", + }, + ], + name: "DirectoryCreated", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "bytes32", + name: "_checksum", + type: "bytes32", + }, + { + indexed: false, + internalType: "bytes", + name: "metadata", + type: "bytes", + }, + { + indexed: false, + internalType: "bytes32[]", + name: "_chunkPointers", + type: "bytes32[]", + }, + ], + name: "FileCreated", + type: "event", + }, + { + inputs: [], + name: "CONTENT_STORE", + outputs: [{ internalType: "address", name: "", type: "address" }], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { internalType: "bytes32[]", name: "_pointers", type: "bytes32[]" }, + ], + name: "concatenateChunks", + outputs: [{ internalType: "bytes", name: "fileContent", type: "bytes" }], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { internalType: "string[]", name: "_fileNames", type: "string[]" }, + { internalType: "bytes32[]", name: "_filePointers", type: "bytes32[]" }, + ], + name: "concatenateFiles", + outputs: [ + { internalType: "bytes", name: "concatenatedFiles", type: "bytes" }, + ], + stateMutability: "pure", + type: "function", + }, + { + inputs: [ + { internalType: "string[]", name: "_fileNames", type: "string[]" }, + { internalType: "bytes32[]", name: "_inodeChecksums", type: "bytes32[]" }, + ], + name: "createDirectory", + outputs: [ + { internalType: "bytes32", name: "directoryChecksum", type: "bytes32" }, + ], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { internalType: "bytes", name: "_metadata", type: "bytes" }, + { internalType: "bytes32[]", name: "_chunkPointers", type: "bytes32[]" }, + ], + name: "createFile", + outputs: [ + { internalType: "bytes32", name: "fileChecksum", type: "bytes32" }, + ], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { internalType: "bytes32", name: "_inodeChecksum", type: "bytes32" }, + { internalType: "string[]", name: "_pathSegments", type: "string[]" }, + ], + name: "getInodeAt", + outputs: [ + { internalType: "bytes32", name: "", type: "bytes32" }, + { + components: [ + { internalType: "enum InodeType", name: "inodeType", type: "uint8" }, + { + components: [ + { internalType: "bytes", name: "metadata", type: "bytes" }, + { + internalType: "bytes32[]", + name: "chunkChecksums", + type: "bytes32[]", + }, + ], + internalType: "struct File", + name: "file", + type: "tuple", + }, + { + components: [ + { internalType: "string[]", name: "filenames", type: "string[]" }, + { + internalType: "bytes32[]", + name: "fileChecksums", + type: "bytes32[]", + }, + ], + internalType: "struct Directory", + name: "directory", + type: "tuple", + }, + ], + internalType: "struct Inode", + name: "", + type: "tuple", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [{ internalType: "bytes32", name: "_checksum", type: "bytes32" }], + name: "inodeExists", + outputs: [{ internalType: "bool", name: "", type: "bool" }], + stateMutability: "view", + type: "function", + }, + { + inputs: [{ internalType: "bytes32", name: "checksum", type: "bytes32" }], + name: "inodes", + outputs: [ + { internalType: "enum InodeType", name: "inodeType", type: "uint8" }, + { + components: [ + { internalType: "bytes", name: "metadata", type: "bytes" }, + { + internalType: "bytes32[]", + name: "chunkChecksums", + type: "bytes32[]", + }, + ], + internalType: "struct File", + name: "file", + type: "tuple", + }, + { + components: [ + { internalType: "string[]", name: "filenames", type: "string[]" }, + { + internalType: "bytes32[]", + name: "fileChecksums", + type: "bytes32[]", + }, + ], + internalType: "struct Directory", + name: "directory", + type: "tuple", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [{ internalType: "bytes32", name: "_checksum", type: "bytes32" }], + name: "readDirectory", + outputs: [ + { internalType: "string[]", name: "", type: "string[]" }, + { internalType: "bytes32[]", name: "", type: "bytes32[]" }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [{ internalType: "bytes32", name: "_checksum", type: "bytes32" }], + name: "readFile", + outputs: [{ internalType: "bytes", name: "", type: "bytes" }], + stateMutability: "view", + type: "function", + }, +] diff --git a/packages/onchfs-js/src/utils/keccak.ts b/packages/onchfs-js/src/utils/keccak.ts index 1547f66..368ea4c 100644 --- a/packages/onchfs-js/src/utils/keccak.ts +++ b/packages/onchfs-js/src/utils/keccak.ts @@ -1,4 +1,5 @@ -import { keccak256 } from "js-sha3" +import sha3 from "js-sha3" +const { keccak256 } = sha3 /** * Hashes some bytes with keccak256. Simple typed wrapper to ease implementation diff --git a/packages/onchfs-js/src/utils/uint8.ts b/packages/onchfs-js/src/utils/uint8.ts index bfe63c2..79cef10 100644 --- a/packages/onchfs-js/src/utils/uint8.ts +++ b/packages/onchfs-js/src/utils/uint8.ts @@ -68,3 +68,12 @@ export function areUint8ArrayEqual(a: Uint8Array, b: Uint8Array): boolean { } return true } + +/** + * Outputs the hex string representation of the uint8array + * @param uint8 The uint8 array + * @returns The hex string representation of the uint8array + */ +export function u8hex(uint8: Uint8Array): string { + return [...uint8].map(x => x.toString(16).padStart(2, "0")).join("") +} diff --git a/packages/onchfs-js/test/resolution/uri.test.ts b/packages/onchfs-js/test/resolution/uri.test.ts index 133b50b..68192c5 100644 --- a/packages/onchfs-js/test/resolution/uri.test.ts +++ b/packages/onchfs-js/test/resolution/uri.test.ts @@ -12,6 +12,9 @@ import { } from "../../src/types/uri" import { DEFAULT_CONTRACTS } from "../../src/config" +const TEZOS_MAIN_ID = "NetXdQprcVkpaWU" +const TEZOS_GHOST_ID = "NetXnHfVqm9iesp" + const CHARSETS = (() => { const LOW_ALPHA = "abcdefghijklmnopqrstuvwxyz" const HI_ALPHA = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" @@ -192,7 +195,8 @@ describe("fragment segment MUST only accept certain characters", () => { describe("tezos pattern constrains", () => { const KT_BASE = (a: string, c: string) => `KT${a}WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuu${c}` - const BASE = (a: string, c: string) => `${KT_BASE(a, c)}.tezos` + const BASE = (a: string, c: string) => + `tezos:${TEZOS_MAIN_ID}:${KT_BASE(a, c)}` test("sanity check with known valid address", () => { expect(parseAuthority(BASE("1", "a"))).toHaveProperty( @@ -240,12 +244,12 @@ describe("parse URI", () => { expect(() => parseURI("./some/relative/path.txt")).toThrow() expect(() => parseURI( - "abonchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" + "abonchfs://eip155:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" ) ).toThrow() expect(() => parseURI( - "onchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840°de" + "onchfs://eip155:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840°de" ) ).toThrow() }) @@ -256,13 +260,13 @@ describe("parse URI", () => { { uri: "onchfs://6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", context: { - blockchainName: "ethereum", + blockchainName: "eip155", }, output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "b0e58801d1b4d69179b7bc23fe54a37cee999b09", - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", }, }, @@ -270,61 +274,61 @@ describe("parse URI", () => { { uri: "onchfs://6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840/folder/index.html", context: { - blockchainName: "ethereum", + blockchainName: "eip155", }, output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "b0e58801d1b4d69179b7bc23fe54a37cee999b09", - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", }, path: "folder/index.html", }, }, { - uri: "onchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "onchfs://eip155:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "fcfdfa971803e1cc201f80d8e74de71fddea6551", - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "5", }, }, }, { - uri: "onchfs://68b75b4e8439a7099e53045bea850b3266e95906.eth/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "onchfs://eip155:1:68b75b4e8439a7099e53045bea850b3266e95906/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "68b75b4e8439a7099e53045bea850b3266e95906", - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", }, }, }, { - uri: "onchfs://KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC.tezos/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "onchfs://tezos:NetXdQprcVkpaWU:KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC", blockchainName: "tezos", - blockchainId: "mainnet", + blockchainId: "NetXdQprcVkpaWU", }, }, }, { uri: "onchfs://6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840/folder/index.html?param1=4¶m2=heyheyhey#a-fragment", context: { - blockchainName: "ethereum", + blockchainName: "eip155", }, output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", authority: { contract: "b0e58801d1b4d69179b7bc23fe54a37cee999b09", - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", }, path: "folder/index.html", @@ -342,7 +346,7 @@ describe("parse URI", () => { authority: { contract: "KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC", blockchainName: "tezos", - blockchainId: "mainnet", + blockchainId: "NetXdQprcVkpaWU", }, query: "a-param=1234", }, @@ -414,11 +418,11 @@ describe("parse URI", () => { it("should normalize hexadecimal points (lower/upper)-case", () => { expect( parseURI( - "onchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" + "onchfs://eip155/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" ) ).toEqual( parseURI( - "onchfs://ethereum:5/6db0ff44176C6f1E9f471DC0c3f15194827D1129af94628a3a753c747f726840" + "onchfs://eip155/6db0ff44176C6f1E9f471DC0c3f15194827D1129af94628a3a753c747f726840" ) ) }) @@ -428,10 +432,10 @@ describe("parseSchema", () => { it("should capture 2 groups for valid URIs", () => { expect( parseSchema( - "onchfs://ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" + "onchfs://eip155/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" ) ).toEqual( - "ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" + "eip155/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840" ) expect( parseSchema( @@ -506,24 +510,25 @@ describe("parse schema-specific components", () => { }, }, { - uri: "ethereum:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "eip155:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "ethereum:5", + authority: "eip155:5", }, }, { - uri: "68b75b4e8439a7099e53045bea850b3266e95906.eth/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "eip155:342-45e:68b75b4e8439a7099e53045bea850b3266e95906/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "68b75b4e8439a7099e53045bea850b3266e95906.eth", + authority: "eip155:342-45e:68b75b4e8439a7099e53045bea850b3266e95906", }, }, { - uri: "KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC.tezos/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + // weird chain id "bleblebleble" + uri: "tezos:blebleble:KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC.tezos", + authority: "tezos:blebleble:KT1WvzYHCNBvDSdwafTHv7nJ1dWmZ8GCYuuC", }, }, { @@ -545,24 +550,25 @@ describe("parse schema-specific components", () => { it("should still parse semi-invalid authority segment", () => { const set: { uri: string; output: URISchemaSpecificParts }[] = [ { - uri: "aaaaaaaaaaaaaaaaaa:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "aaaaaaa:5/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "aaaaaaaaaaaaaaaaaa:5", + authority: "aaaaaaa:5", }, }, { - uri: "68b75b4e8439a7099e53045bea850b3266e959.eth/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + // eth address has 38 characters + uri: "eip155:1:68b75b4e8439a7099e53045bea850b3266e959/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "68b75b4e8439a7099e53045bea850b3266e959.eth", + authority: "eip155:1:68b75b4e8439a7099e53045bea850b3266e959", }, }, { - uri: "KT1Wvz.tezos/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", + uri: "tezos:NetXdQprcVkpaWU:KT1Wvz/6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", output: { cid: "6db0ff44176c6f1e9f471dc0c3f15194827d1129af94628a3a753c747f726840", - authority: "KT1Wvz.tezos", + authority: "tezos:NetXdQprcVkpaWU:KT1Wvz", }, }, ] @@ -632,83 +638,43 @@ describe("URI authority parser", () => { out: URIAuthority }[] = [ { - authority: "ethereum:5", + authority: "eip155:5", out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "5", - contract: DEFAULT_CONTRACTS["ethereum:5"], + contract: DEFAULT_CONTRACTS["eip155:5"], }, }, { - authority: "eth:5", + authority: "eip155:5", out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "5", - contract: DEFAULT_CONTRACTS["ethereum:5"], - }, - }, - { - authority: "eth", - out: { - blockchainName: "ethereum", - blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], - }, - }, - { - authority: "eth:1", - out: { - blockchainName: "ethereum", - blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:5"], }, }, { - authority: "ethereum", + authority: "eip155", out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], - }, - }, - { - authority: "tezos:ghostnet", - out: { - blockchainName: "tezos", - blockchainId: "ghostnet", - contract: DEFAULT_CONTRACTS["tezos:ghostnet"], - }, - }, - { - authority: "tez:ghostnet", - out: { - blockchainName: "tezos", - blockchainId: "ghostnet", - contract: DEFAULT_CONTRACTS["tezos:ghostnet"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, }, { - authority: "tez", + authority: "tezos:NetXnHfVqm9iesp", out: { blockchainName: "tezos", - blockchainId: "mainnet", - contract: DEFAULT_CONTRACTS["tezos:mainnet"], - }, - }, - { - authority: "tez:mainnet", - out: { - blockchainName: "tezos", - blockchainId: "mainnet", - contract: DEFAULT_CONTRACTS["tezos:mainnet"], + blockchainId: "NetXnHfVqm9iesp", + contract: DEFAULT_CONTRACTS["tezos:NetXnHfVqm9iesp"], }, }, { authority: "tezos", out: { blockchainName: "tezos", - blockchainId: "mainnet", - contract: DEFAULT_CONTRACTS["tezos:mainnet"], + blockchainId: "NetXdQprcVkpaWU", + contract: DEFAULT_CONTRACTS["tezos:NetXdQprcVkpaWU"], }, }, ] @@ -722,46 +688,46 @@ describe("URI authority parser", () => { const goods: { context: URIContext; out: URIAuthority }[] = [ { context: { - blockchainName: "ethereum", + blockchainName: "eip155", }, out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, }, { context: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, }, { context: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", - contract: DEFAULT_CONTRACTS["ethereum:1"], + contract: DEFAULT_CONTRACTS["eip155:1"], }, }, { context: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", contract: "abcde", }, out: { - blockchainName: "ethereum", + blockchainName: "eip155", blockchainId: "1", contract: "abcde", }, @@ -772,8 +738,8 @@ describe("URI authority parser", () => { }, out: { blockchainName: "tezos", - blockchainId: "mainnet", - contract: DEFAULT_CONTRACTS["tezos:mainnet"], + blockchainId: "NetXdQprcVkpaWU", + contract: DEFAULT_CONTRACTS["tezos:NetXdQprcVkpaWU"], }, }, ] @@ -784,12 +750,15 @@ describe("URI authority parser", () => { }) it("should fail if some segments are invalid", () => { - expect(() => parseAuthority("KT8LELE.tezos")).toThrow() + expect(() => parseAuthority("tezos:NetXdQprcVkpaWU:KT8LELE")).toThrow() + expect(() => + parseAuthority("eip155:1:1e9f471dc0c3f15194827d1129af94628a3a753K") + ).toThrow() expect(() => - parseAuthority("1e9f471dc0c3f15194827d1129af94628a3a753K.eth") + parseAuthority("eip155:1:1e9f471dc0c3f15194827d1129af94628a3a753f.ge") ).toThrow() expect(() => - parseAuthority("1e9f471dc0c3f15194827d1129af94628a3a753f.eth.ge") + parseAuthority("eip155:1:1e9f471dc0c3f15194827d1129af94628a3a753f:1a") ).toThrow() }) diff --git a/packages/onchfs-js/tsconfig.json b/packages/onchfs-js/tsconfig.json index 5ca2688..b44ba8d 100644 --- a/packages/onchfs-js/tsconfig.json +++ b/packages/onchfs-js/tsconfig.json @@ -1,27 +1,18 @@ { + "extends": "@fxhash/tsconfig/base", + "include": ["src/**/*"], + "exclude": ["node_modules", "test/**/*"], "compilerOptions": { - "target": "ES2022", - "module": "CommonJS", - "lib": ["ES2022", "DOM"], - "moduleResolution": "node", "rootDir": "src", "outDir": "dist", - "allowSyntheticDefaultImports": true, - "importHelpers": true, - "alwaysStrict": true, - "sourceMap": true, - "forceConsistentCasingInFileNames": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, - "noUnusedLocals": false, - "noUnusedParameters": true, - "noImplicitAny": false, - "noImplicitThis": false, - "strictNullChecks": false, "paths": { "@/*": ["./src/*"] - } - }, - "include": ["src/**/*"], - "exclude": ["test/**/*"] + }, + "target": "es2022", + "lib": ["es2022", "DOM"], + // FIXME: resolve type errors when removing the following lines + "strict": false, + "noUnusedParameters": false, + "noUnusedLocals": false + } }