diff --git a/.vscode/settings.json b/.vscode/settings.json index 6ef8e7ba9..17448e0d7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,7 +8,7 @@ "**/node_modules": true, "**/build": true, "**/coverage": true, - "**/dist": true, + "**/dist": true }, "eslint.validate": [ "javascript", @@ -32,5 +32,10 @@ "./svelte" ], "typescript.preferences.preferTypeOnlyAutoImports": true, - "rustTestExplorer.rootCargoManifestFilePath": "./Cargo.toml" + "rustTestExplorer.rootCargoManifestFilePath": "./Cargo.toml", + // This won't work in multi-root workspaces, could be fixed by using a rust-analyzer.toml once there is some more documentation on that. + // For now you need to set this in your own vscode settings file. + "rust-analyzer.cargo.extraEnv": { + "ATOMICSERVER_SKIP_JS_BUILD": "true" + } } diff --git a/CHANGELOG.md b/CHANGELOG.md index 796c7e7bb..fb5058550 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,9 +11,13 @@ See [STATUS.md](server/STATUS.md) to learn more about which features will remain - [#1056](https://github.com/atomicdata-dev/atomic-server/issues/1056) Switched from Earthly to Dagger for CI. Also made improvements to E2E test publishing and building docker images. - [#979](https://github.com/atomicdata-dev/atomic-server/issues/979) Fix nested resource deletion, use transactions - [#1057](https://github.com/atomicdata-dev/atomic-server/issues/1057) Fix double slashes in search bar -- CLI should use Agent in requests - get #986 -- Search endpoint throws error for websocket requests #1047 -- Fix search in CLI / atomic_lib #958 +- [#986](https://github.com/atomicdata-dev/atomic-server/issues/986) CLI should use Agent in requests - get +- [#1047](https://github.com/atomicdata-dev/atomic-server/issues/1047) Search endpoint throws error for websocket requests +- [#958](https://github.com/atomicdata-dev/atomic-server/issues/958) Fix search in CLI / atomic_lib +- [#658](https://github.com/atomicdata-dev/atomic-server/issues/658) Added JSON datatype. +- [#1024](https://github.com/atomicdata-dev/atomic-server/issues/1024) Added URI datatype. +BREAKING: [#1107](https://github.com/atomicdata-dev/atomic-server/issues/1107) Named nested resources are no longer supported. Value::Resource and SubResource::Resource have been removed. If you need to include multiple resources in a response use an array. +BREAKING: `store.get_resource_extended()` now returns a `ResourceResponse` instead of a `Resource` due to the removal of named nested resources. ## [v0.40.2] diff --git a/Cargo.lock b/Cargo.lock index f285cf475..8e2554912 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "actix" @@ -640,6 +640,7 @@ dependencies = [ "ring 0.17.8", "rio_api", "rio_turtle", + "rmp-serde", "serde", "serde_jcs", "serde_json", @@ -3517,6 +3518,28 @@ dependencies = [ "rio_api", ] +[[package]] +name = "rmp" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + [[package]] name = "rust-stemmers" version = "1.2.0" diff --git a/browser/CHANGELOG.md b/browser/CHANGELOG.md index 6ae5c270f..d2b6d2ac6 100644 --- a/browser/CHANGELOG.md +++ b/browser/CHANGELOG.md @@ -43,6 +43,7 @@ This changelog covers all five packages, as they are (for now) updated as a whol - [#983](https://github.com/atomicdata-dev/atomic-server/issues/983) Give clear error when name collisions are found in an ontology. - Generates class definitions that enables doing: `resource.props.name = 'New Name'`; - [#1071](https://github.com/atomicdata-dev/atomic-server/issues/1071) Fix bug where classes and properties with 'name' props would lead to invalid generated typescript code. +- Generated ontologies now base import extensions on the tsconfig.json file. (moduleResolution: bundler will remove the .js extensions in imports) ### @tomic/svelte diff --git a/browser/cli/package.json b/browser/cli/package.json index 56e7a1d69..e1e33a1c8 100644 --- a/browser/cli/package.json +++ b/browser/cli/package.json @@ -12,10 +12,7 @@ "dependencies": { "@tomic/lib": "workspace:*", "chalk": "^5.3.0", - "get-tsconfig": "^4.8.1", - "prettier": "3.0.3" - }, - "devDependencies": { + "prettier": "3.0.3", "typescript": "^5.6.3" }, "description": "Generate types from Atomic Data ontologies", diff --git a/browser/cli/src/DatatypeToTSTypeMap.ts b/browser/cli/src/DatatypeToTSTypeMap.ts index 3c4a0b5f4..2974e0353 100644 --- a/browser/cli/src/DatatypeToTSTypeMap.ts +++ b/browser/cli/src/DatatypeToTSTypeMap.ts @@ -11,5 +11,7 @@ export const DatatypeToTSTypeMap = { [Datatype.STRING]: 'string', [Datatype.SLUG]: 'string', [Datatype.MARKDOWN]: 'string', + [Datatype.URI]: 'string', + [Datatype.JSON]: 'unknown', [Datatype.UNKNOWN]: 'JSONValue', }; diff --git a/browser/cli/src/PropertyRecord.ts b/browser/cli/src/PropertyRecord.ts index a5d9387ee..5634dbb47 100644 --- a/browser/cli/src/PropertyRecord.ts +++ b/browser/cli/src/PropertyRecord.ts @@ -13,7 +13,7 @@ export class PropertyRecord { ]); } - public repordPropertyDefined(subject: string) { + public reportPropertyDefined(subject: string) { this.knownProperties.add(subject); if (this.missingProperties.has(subject)) { diff --git a/browser/cli/src/generateOntology.ts b/browser/cli/src/generateOntology.ts index 0bf9ebe99..31b364cf9 100644 --- a/browser/cli/src/generateOntology.ts +++ b/browser/cli/src/generateOntology.ts @@ -52,7 +52,7 @@ export const generateOntology = async ( const properties = dedupe(ontology.props.properties ?? []); for (const prop of properties) { - propertyRecord.repordPropertyDefined(prop); + propertyRecord.reportPropertyDefined(prop); } const [baseObjStr, reverseMapping] = await generateBaseObject(ontology); diff --git a/browser/cli/src/utils.ts b/browser/cli/src/utils.ts index 1cace4c15..e20c9d209 100644 --- a/browser/cli/src/utils.ts +++ b/browser/cli/src/utils.ts @@ -1,4 +1,7 @@ -import { getTsconfig } from 'get-tsconfig'; +import { sys as tsSys, findConfigFile, readConfigFile } from 'typescript'; + +const NOT_FOUND = 'tsconfig.json not found'; +const COULD_NOT_READ = 'Could not read tsconfig.json'; export const camelCaseify = (str: string) => str.replace(/-([a-z0-9])/g, g => { @@ -11,12 +14,51 @@ export const dedupe = (array: T[]): T[] => { export const getExtension = () => { try { - return getTsconfig()?.config.compilerOptions?.moduleResolution === 'Bundler' - ? '' - : '.js'; - } catch (e) { - console.warn('Something went wrong getting TS Config / file extension', e); + const tsconfig = getTsconfig(); + const moduleResolution = tsconfig.config.compilerOptions?.moduleResolution; + + if (!moduleResolution) { + return '.js'; + } + + return moduleResolution.toLowerCase() === 'bundler' ? '' : '.js'; + } catch (error) { + if (error instanceof Error) { + if (error.message === NOT_FOUND) { + // eslint-disable-next-line no-console + console.log('tsconfig.json not found, defaulting to .js imports'); + + return '.js'; + } + + if (error.message === COULD_NOT_READ) { + // eslint-disable-next-line no-console + console.log('Could not read tsconfig.json, defaulting to .js imports'); + + return '.js'; + } - return '.js'; + throw error; + } else { + throw new Error(error); + } } }; + +const getTsconfig = () => { + // Find tsconfig.json file + const tsconfigPath = findConfigFile( + process.cwd(), + tsSys.fileExists, + 'tsconfig.json', + ); + + if (!tsconfigPath) throw new Error(NOT_FOUND); + + // Read tsconfig.json file + const tsconfigFile = readConfigFile(tsconfigPath, tsSys.readFile); + + if (!tsconfigFile.config) throw new Error(COULD_NOT_READ); + + return tsconfigFile; +}; diff --git a/browser/create-template/templates/sveltekit-site/tsconfig.json b/browser/create-template/templates/sveltekit-site/tsconfig.json index 45729f8ff..fc93cbd94 100644 --- a/browser/create-template/templates/sveltekit-site/tsconfig.json +++ b/browser/create-template/templates/sveltekit-site/tsconfig.json @@ -1,4 +1,5 @@ { + "extends": "./.svelte-kit/tsconfig.json", "compilerOptions": { "allowJs": true, "checkJs": true, diff --git a/browser/data-browser/package.json b/browser/data-browser/package.json index fcb16c444..fa6a4dc33 100644 --- a/browser/data-browser/package.json +++ b/browser/data-browser/package.json @@ -8,6 +8,8 @@ "@bugsnag/core": "^7.25.0", "@bugsnag/js": "^7.25.0", "@bugsnag/plugin-react": "^7.25.0", + "@codemirror/lang-json": "^6.0.2", + "@codemirror/lint": "^6.8.5", "@dagrejs/dagre": "^1.1.4", "@dnd-kit/core": "^6.1.0", "@dnd-kit/sortable": "^8.0.0", @@ -27,6 +29,9 @@ "@tiptap/starter-kit": "^2.9.1", "@tiptap/suggestion": "^2.9.1", "@tomic/react": "workspace:*", + "@uiw/codemirror-theme-github": "^4.24.1", + "@uiw/react-codemirror": "^4.24.1", + "clsx": "^2.1.1", "emoji-mart": "^5.6.0", "polished": "^4.3.1", "prismjs": "^1.29.0", diff --git a/browser/data-browser/src/chunks/CodeEditor/AsyncJSONEditor.tsx b/browser/data-browser/src/chunks/CodeEditor/AsyncJSONEditor.tsx new file mode 100644 index 000000000..2cb3c3ff1 --- /dev/null +++ b/browser/data-browser/src/chunks/CodeEditor/AsyncJSONEditor.tsx @@ -0,0 +1,152 @@ +import CodeMirror, { + type BasicSetupOptions, + type EditorView, + type ReactCodeMirrorRef, +} from '@uiw/react-codemirror'; +import { githubLight, githubDark } from '@uiw/codemirror-theme-github'; +import { json, jsonParseLinter } from '@codemirror/lang-json'; +import { linter, type Diagnostic } from '@codemirror/lint'; +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { styled, useTheme } from 'styled-components'; + +export interface JSONEditorProps { + labelId?: string; + initialValue?: string; + showErrorStyling?: boolean; + required?: boolean; + maxWidth?: string; + autoFocus?: boolean; + onChange: (value: string) => void; + onValidationChange?: (isValid: boolean) => void; + onBlur?: () => void; +} + +const basicSetup: BasicSetupOptions = { + lineNumbers: false, + foldGutter: false, + highlightActiveLine: true, + indentOnInput: true, +}; + +/** + * ASYNC COMPONENT DO NOT IMPORT DIRECTLY, USE {@link JSONEditor.tsx}. + */ +const AsyncJSONEditor: React.FC = ({ + labelId, + initialValue, + showErrorStyling, + required, + maxWidth, + autoFocus, + onChange, + onValidationChange, + onBlur, +}) => { + const editorRef = useRef(null); + const theme = useTheme(); + const [value, setValue] = useState(initialValue ?? ''); + const latestDiagnostics = useRef([]); + // We need to use callback because the compiler can't optimize the CodeMirror component. + const handleChange = useCallback( + (val: string) => { + setValue(val); + onChange(val); + }, + [onChange], + ); + + // Wrap jsonParseLinter so we can tap into diagnostics + const validationLinter = useCallback(() => { + const delegate = jsonParseLinter(); + + return (view: EditorView) => { + const isEmpty = view.state.doc.length === 0; + let diagnostics = delegate(view); + + if (!required && isEmpty) { + diagnostics = []; + } + + // Compare the diagnostics so we don't call the onValidationChange callback unnecessarily. + const prev = latestDiagnostics.current; + const changed = + diagnostics.length !== prev.length || + diagnostics.some( + (d, i) => d.from !== prev[i]?.from || d.message !== prev[i]?.message, + ); + + if (changed) { + latestDiagnostics.current = diagnostics; + onValidationChange?.(diagnostics.length === 0); + } + + return diagnostics; + }; + }, [onValidationChange]); + + const extensions = useMemo( + // eslint-disable-next-line react-compiler/react-compiler + () => [json(), linter(validationLinter())], + [validationLinter], + ); + + useEffect(() => { + // The actual editor is not mounted immediately so we need to wait a cycle. + requestAnimationFrame(() => { + if (editorRef.current?.editor && labelId) { + const realEditor = + editorRef.current.editor.querySelector('.cm-content'); + + if (!realEditor) { + return; + } + + realEditor.setAttribute('aria-labelledby', labelId); + } + }); + }, [labelId]); + + return ( + onBlur?.()} + className={showErrorStyling ? 'json-editor__error' : ''} + > + + + ); +}; + +export default AsyncJSONEditor; + +const CodeEditorWrapper = styled.div` + display: contents; + + &.json-editor__error .cm-editor { + border-color: ${p => p.theme.colors.alert} !important; + } + + & .cm-editor { + border: 1px solid ${p => p.theme.colors.bg2}; + border-radius: ${p => p.theme.radius}; + /* padding: ${p => p.theme.size(2)}; */ + outline: none; + + &:focus-within { + border-color: ${p => p.theme.colors.main}; + } + } +`; diff --git a/browser/data-browser/src/chunks/MarkdownEditor/AsyncMarkdownEditor.tsx b/browser/data-browser/src/chunks/MarkdownEditor/AsyncMarkdownEditor.tsx index 5ce5d8298..645dfb5e9 100644 --- a/browser/data-browser/src/chunks/MarkdownEditor/AsyncMarkdownEditor.tsx +++ b/browser/data-browser/src/chunks/MarkdownEditor/AsyncMarkdownEditor.tsx @@ -11,13 +11,15 @@ import { useCallback, useState } from 'react'; import { BubbleMenu } from './BubbleMenu'; import { TiptapContextProvider } from './TiptapContext'; import { ToggleButton } from './ToggleButton'; -import { SlashCommands, suggestion } from './SlashMenu/CommandsExtension'; +import { SlashCommands, buildSuggestion } from './SlashMenu/CommandsExtension'; import { ExtendedImage } from './ImagePicker'; import { transition } from '../../helpers/transition'; +import { usePopoverContainer } from '../../components/Popover'; export type AsyncMarkdownEditorProps = { placeholder?: string; initialContent?: string; + autoFocus?: boolean; onChange?: (content: string) => void; id?: string; labelId?: string; @@ -31,11 +33,16 @@ const LINE_HEIGHT = 1.15; export default function AsyncMarkdownEditor({ placeholder, initialContent, + autoFocus, id, labelId, onChange, onBlur, }: AsyncMarkdownEditorProps): React.JSX.Element { + const containerRef = usePopoverContainer(); + + const container = containerRef.current ?? document.body; + const [extensions] = useState(() => [ StarterKit, Markdown, @@ -65,7 +72,7 @@ export default function AsyncMarkdownEditor({ placeholder: placeholder ?? 'Start typing...', }), SlashCommands.configure({ - suggestion, + suggestion: buildSuggestion(container), }), ]); @@ -76,6 +83,7 @@ export default function AsyncMarkdownEditor({ extensions, content: markdown, onBlur, + autofocus: !!autoFocus, editorProps: { attributes: { ...(id && { id }), diff --git a/browser/data-browser/src/chunks/MarkdownEditor/SlashMenu/CommandsExtension.ts b/browser/data-browser/src/chunks/MarkdownEditor/SlashMenu/CommandsExtension.ts index f400647be..cd1720518 100644 --- a/browser/data-browser/src/chunks/MarkdownEditor/SlashMenu/CommandsExtension.ts +++ b/browser/data-browser/src/chunks/MarkdownEditor/SlashMenu/CommandsExtension.ts @@ -39,7 +39,9 @@ export const SlashCommands = Extension.create({ }, }); -export const suggestion: Partial = { +export const buildSuggestion = ( + container: HTMLElement, +): Partial => ({ items: ({ query }: { query: string }): CommandItem[] => [ { @@ -157,7 +159,7 @@ export const suggestion: Partial = { popup = tippy('body', { getReferenceClientRect: props.clientRect! as () => DOMRect, - appendTo: () => document.body, + appendTo: () => container, content: component.element, showOnCreate: true, interactive: true, @@ -198,4 +200,4 @@ export const suggestion: Partial = { }, }; }, -}; +}); diff --git a/browser/data-browser/src/components/AtomicLink.tsx b/browser/data-browser/src/components/AtomicLink.tsx index cec0f1c8d..bc65d6f39 100644 --- a/browser/data-browser/src/components/AtomicLink.tsx +++ b/browser/data-browser/src/components/AtomicLink.tsx @@ -5,6 +5,7 @@ import { FaExternalLinkAlt } from 'react-icons/fa'; import { ErrorLook } from '../components/ErrorLook'; import { isRunningInTauri } from '../helpers/tauri'; import { useNavigateWithTransition } from '../hooks/useNavigateWithTransition'; +import clsx from 'clsx'; export interface AtomicLinkProps extends React.AnchorHTMLAttributes { @@ -79,7 +80,7 @@ export const AtomicLink = forwardRef( return ( ( ref={ref} > {children} - {href && !clean && } + {href && !clean && } ); }, @@ -121,4 +122,10 @@ export const LinkView = styled.a` &:active { color: ${props => props.theme.colors.mainDark}; } + + &.atomic-link_external { + display: inline-flex; + align-items: center; + gap: 0.6ch; + } `; diff --git a/browser/data-browser/src/components/Dialog/index.tsx b/browser/data-browser/src/components/Dialog/index.tsx index ef5bdb725..44bf38dd2 100644 --- a/browser/data-browser/src/components/Dialog/index.tsx +++ b/browser/data-browser/src/components/Dialog/index.tsx @@ -38,11 +38,9 @@ export const VAR_DIALOG_INNER_WIDTH = '--dialog-inner-width'; const ANIM_MS = 80; const ANIM_SPEED = `${ANIM_MS}ms`; -interface DialogSlotProps { - className?: string; -} - -type DialogSlotComponent = React.FC>; +type DialogSlotComponent = React.FC< + React.PropsWithChildren> +>; /** * Component to build a dialog. The content of this component are rendered in a @@ -179,18 +177,17 @@ const InnerDialog: React.FC> = ({ ); }; -export const DialogTitle: DialogSlotComponent = ({ children, className }) => ( - +export const DialogTitle: DialogSlotComponent = ({ children, ...props }) => ( + {children} ); /** - * Dialog section that is scrollable. Put your main content here. Should be no - * larger than 4rem + * Dialog section that is scrollable. Put your main content here. */ -export const DialogContent: DialogSlotComponent = ({ children, className }) => ( - +export const DialogContent: DialogSlotComponent = ({ children, ...props }) => ( + {children} ); @@ -199,16 +196,16 @@ export const DialogContent: DialogSlotComponent = ({ children, className }) => ( * Bottom part of the Dialog that is always visible. Place your buttons here. * Should be no larger than 4rem */ -export const DialogActions: DialogSlotComponent = ({ children, className }) => ( - +export const DialogActions: DialogSlotComponent = ({ children, ...props }) => ( + {children} ); +Dialog.Title = DialogTitle; +Dialog.Content = DialogContent; +Dialog.Actions = DialogActions; + const CloseButtonSlot = styled(Slot)` justify-self: end; `; diff --git a/browser/data-browser/src/components/JSONEditor.tsx b/browser/data-browser/src/components/JSONEditor.tsx new file mode 100644 index 000000000..d3f53b44b --- /dev/null +++ b/browser/data-browser/src/components/JSONEditor.tsx @@ -0,0 +1,21 @@ +import { lazy, Suspense } from 'react'; +import type { JSONEditorProps } from '../chunks/CodeEditor/AsyncJSONEditor'; +import { styled } from 'styled-components'; + +const AsyncJSONEditor = lazy( + () => import('../chunks/CodeEditor/AsyncJSONEditor'), +); + +export const JSONEditor: React.FC = props => { + return ( + }> + + + ); +}; + +const Loader = styled.div` + background-color: ${p => p.theme.colors.bg}; + border: 1px solid ${p => p.theme.colors.bg2}; + height: 150px; +`; diff --git a/browser/data-browser/src/components/Popover.tsx b/browser/data-browser/src/components/Popover.tsx index cef298447..29f2b25d3 100644 --- a/browser/data-browser/src/components/Popover.tsx +++ b/browser/data-browser/src/components/Popover.tsx @@ -113,6 +113,10 @@ const Arrow = styled(RadixPopover.Arrow)` const PopoverContainerContext = createContext>(createRef()); +export const usePopoverContainer = () => { + return useContext(PopoverContainerContext); +}; + export const PopoverContainer: FC = ({ children }) => { const popoverContainerRef = useRef(null); diff --git a/browser/data-browser/src/components/PropVal.tsx b/browser/data-browser/src/components/PropVal.tsx index ca0f88c20..970a23f2c 100644 --- a/browser/data-browser/src/components/PropVal.tsx +++ b/browser/data-browser/src/components/PropVal.tsx @@ -9,6 +9,7 @@ import { ALL_PROPS_CONTAINER } from '../helpers/containers'; import { LoaderInline } from './Loader'; import type { JSX } from 'react'; +import { JSON_RENDERER_CLASS } from './datatypes/JSON'; type Props = { propertyURL: string; @@ -82,6 +83,11 @@ export const PropValRow = styled.div` grid-template-rows: auto 1fr; @container ${ALL_PROPS_CONTAINER} (min-width: 500px) { + &:has(.${JSON_RENDERER_CLASS}) { + grid-template-columns: 1fr; + gap: 0.5rem; + } + grid-template-columns: 23ch auto; grid-template-rows: 1fr; } diff --git a/browser/data-browser/src/components/SideBar/SideBarItem.ts b/browser/data-browser/src/components/SideBar/SideBarItem.ts index 234149481..7e3eae4c6 100644 --- a/browser/data-browser/src/components/SideBar/SideBarItem.ts +++ b/browser/data-browser/src/components/SideBar/SideBarItem.ts @@ -5,7 +5,6 @@ export interface SideBarItemProps { } /** SideBarItem should probably be wrapped in an AtomicLink for optimal behavior */ -// eslint-disable-next-line prettier/prettier export const SideBarItem = styled('span')` display: flex; min-height: ${props => props.theme.margin * 0.5 + 1}rem; diff --git a/browser/data-browser/src/components/TableEditor/Cell.tsx b/browser/data-browser/src/components/TableEditor/Cell.tsx index 984235cb0..d331c56f3 100644 --- a/browser/data-browser/src/components/TableEditor/Cell.tsx +++ b/browser/data-browser/src/components/TableEditor/Cell.tsx @@ -102,6 +102,10 @@ export function Cell({ const handleMouseDown = useCallback( (e: React.MouseEvent) => { + if (disabledKeyboardInteractions.has(KeyboardInteraction.ExitEditMode)) { + return; + } + setMouseDown(true); // When Shift is pressed, enter multi-select mode @@ -126,10 +130,6 @@ export function Cell({ return; } - if (disabledKeyboardInteractions.has(KeyboardInteraction.ExitEditMode)) { - return; - } - if (isActive && cursorMode === CursorMode.Edit) { return; } @@ -148,6 +148,10 @@ export function Cell({ ); const handleClick = useCallback(() => { + if (disabledKeyboardInteractions.has(KeyboardInteraction.ExitEditMode)) { + return; + } + if (markEnterEditMode) { setMultiSelectCorner(undefined, undefined); setMouseDown(false); @@ -155,7 +159,7 @@ export function Cell({ setCursorMode(CursorMode.Edit); setMarkEnterEditMode(false); } - }, [markEnterEditMode]); + }, [markEnterEditMode, disabledKeyboardInteractions]); useLayoutEffect(() => { if (!ref.current) { diff --git a/browser/data-browser/src/components/ValueComp.tsx b/browser/data-browser/src/components/ValueComp.tsx index 0f1a57fb8..f07c999ef 100644 --- a/browser/data-browser/src/components/ValueComp.tsx +++ b/browser/data-browser/src/components/ValueComp.tsx @@ -14,6 +14,8 @@ import ResourceArray from './datatypes/ResourceArray'; import { ErrMessage } from './forms/InputStyles'; import type { JSX } from 'react'; +import { JSONRenderer } from './datatypes/JSON'; +import { AtomicLink } from './AtomicLink'; type Props = { value: JSONValue; @@ -40,6 +42,12 @@ function ValueComp({ value, datatype }: Props): JSX.Element { return ; case Datatype.RESOURCEARRAY: return ; + case Datatype.JSON: + return ; + case Datatype.URI: + return ( + {value as string} + ); default: return
{valToString(value)}
; } diff --git a/browser/data-browser/src/components/datatypes/JSON.tsx b/browser/data-browser/src/components/datatypes/JSON.tsx new file mode 100644 index 000000000..683335662 --- /dev/null +++ b/browser/data-browser/src/components/datatypes/JSON.tsx @@ -0,0 +1,27 @@ +import type { JSONValue } from '@tomic/react'; +import styled from 'styled-components'; +import { HighlightedCodeBlock } from '../HighlightedCodeBlock'; + +export const JSON_RENDERER_CLASS = 'json-renderer'; +interface JSONRendererProps { + value: JSONValue; +} + +export const JSONRenderer: React.FC = ({ value }) => { + return ( + + ); +}; + +const StyledHighlightedCodeBlock = styled(HighlightedCodeBlock)` + width: calc(100cqw - ${p => p.theme.size()}); + background-color: ${p => p.theme.colors.bgBody}; + + max-height: 40rem; + pre { + background-color: ${p => p.theme.colors.bgBody} !important; + } +`; diff --git a/browser/data-browser/src/components/forms/InputJSON.tsx b/browser/data-browser/src/components/forms/InputJSON.tsx new file mode 100644 index 000000000..c47c0b6bd --- /dev/null +++ b/browser/data-browser/src/components/forms/InputJSON.tsx @@ -0,0 +1,71 @@ +import { useValue } from '@tomic/react'; +import { InputProps } from './ResourceField'; +import { styled } from 'styled-components'; +import { ErrorChipInput } from './ErrorChip'; +import { + checkForInitialRequiredValue, + useValidation, +} from './formValidation/useValidation'; +import { JSONEditor } from '../JSONEditor'; +import { JSON_RENDERER_CLASS } from '../datatypes/JSON'; + +export const InputJSON: React.FC = ({ + labelId, + resource, + property, + commit, + commitDebounceInterval, + autoFocus, + ...props +}) => { + const [value, setValue] = useValue(resource, property.subject, { + commit, + commitDebounce: commitDebounceInterval, + validate: false, + }); + + const { error, setError, setTouched } = useValidation( + checkForInitialRequiredValue(value, props.required), + ); + + function handleUpdate(content: string): void { + if (content === '') { + setValue(undefined); + setError(undefined); + + return; + } + + try { + const parsed = JSON.parse(content); + setValue(parsed); + setError(undefined); + } catch (e) { + setError('Invalid JSON'); + } + } + + const initialValue = JSON.stringify(value, null, 2); + + return ( + + { + setError(valid ? undefined : 'Invalid JSON'); + }} + /> + {error && {error}} + + ); +}; + +const Wrapper = styled.div` + flex: 1; + position: relative; +`; diff --git a/browser/data-browser/src/components/forms/InputSwitcher.tsx b/browser/data-browser/src/components/forms/InputSwitcher.tsx index 9e58b45df..3a6ef363f 100644 --- a/browser/data-browser/src/components/forms/InputSwitcher.tsx +++ b/browser/data-browser/src/components/forms/InputSwitcher.tsx @@ -13,6 +13,8 @@ import { InputDate } from './InputDate'; import { FilePicker } from './FilePicker/FilePicker'; import type { JSX } from 'react'; +import { InputJSON } from './InputJSON'; +import InputURI from './InputURI'; /** Renders a fitting HTML input depending on the Datatype */ export default function InputSwitcher(props: InputProps): JSX.Element { @@ -61,6 +63,14 @@ export default function InputSwitcher(props: InputProps): JSX.Element { return ; } + case Datatype.JSON: { + return ; + } + + case Datatype.URI: { + return ; + } + default: { return ; } diff --git a/browser/data-browser/src/components/forms/InputURI.tsx b/browser/data-browser/src/components/forms/InputURI.tsx new file mode 100644 index 000000000..f1daf1480 --- /dev/null +++ b/browser/data-browser/src/components/forms/InputURI.tsx @@ -0,0 +1,69 @@ +import { useString, validateDatatype } from '@tomic/react'; +import { InputProps } from './ResourceField'; +import { InputStyled, InputWrapper } from './InputStyles'; +import { styled } from 'styled-components'; +import { ErrorChipInput } from './ErrorChip'; +import { + checkForInitialRequiredValue, + useValidation, +} from './formValidation/useValidation'; + +import type { JSX } from 'react'; + +export default function InputURI({ + resource, + property, + commit, + commitDebounceInterval, + ...props +}: InputProps): JSX.Element { + const [value, setValue] = useString(resource, property.subject, { + commit, + commitDebounce: commitDebounceInterval, + validate: false, + }); + + const { error, setError, setTouched } = useValidation( + checkForInitialRequiredValue(value, props.required), + ); + + function handleUpdate(event: React.ChangeEvent): void { + const newval = event.target.value ?? undefined; + setValue(newval); + + try { + validateDatatype(newval, property.datatype); + setError(undefined); + } catch (e) { + setError('Invalid URI'); + } + + if (props.required && newval === '') { + setError('Required'); + } + } + + return ( + + + + + {error && {error}} + + ); +} + +const Wrapper = styled.div` + flex: 1; + position: relative; +`; diff --git a/browser/data-browser/src/components/forms/NewForm/CustomCreateActions/CustomForms/NewDriveDialog.tsx b/browser/data-browser/src/components/forms/NewForm/CustomCreateActions/CustomForms/NewDriveDialog.tsx index 32a83aef9..6aaacc498 100644 --- a/browser/data-browser/src/components/forms/NewForm/CustomCreateActions/CustomForms/NewDriveDialog.tsx +++ b/browser/data-browser/src/components/forms/NewForm/CustomCreateActions/CustomForms/NewDriveDialog.tsx @@ -8,6 +8,7 @@ import { Dialog, DialogContent, DialogActions, + DialogTitle, } from '../../../../Dialog'; import Field from '../../../Field'; import { InputWrapper, InputStyled } from '../../../InputStyles'; @@ -53,7 +54,7 @@ export const NewDriveDialog: FC = ({ await agentResource.save(); // Create a default ontology. - const ontologyName = stringToSlug(name); + const ontologyName = stringToSlug(name.trim()); const ontology = await store.newResource({ subject: await store.buildUniqueSubjectFromParts( ['defaultOntology'], @@ -99,7 +100,9 @@ export const NewDriveDialog: FC = ({ return ( -

New Drive

+ +

New Drive

+
{ diff --git a/browser/data-browser/src/components/forms/ResourceField.tsx b/browser/data-browser/src/components/forms/ResourceField.tsx index 4bc879279..88a201fb4 100644 --- a/browser/data-browser/src/components/forms/ResourceField.tsx +++ b/browser/data-browser/src/components/forms/ResourceField.tsx @@ -110,7 +110,7 @@ interface HelperTextProps { link: string; } -const HelperTextWraper = styled.div` +const HelperTextWrapper = styled.div` position: relative; margin-bottom: 0rem; `; @@ -122,10 +122,10 @@ const Extra = styled(Row)` function HelperText({ text, link }: HelperTextProps) { return ( - + {link} - + ); } diff --git a/browser/data-browser/src/components/forms/ValueForm/ValueForm.tsx b/browser/data-browser/src/components/forms/ValueForm/ValueForm.tsx index b42a508f8..1c6de1483 100644 --- a/browser/data-browser/src/components/forms/ValueForm/ValueForm.tsx +++ b/browser/data-browser/src/components/forms/ValueForm/ValueForm.tsx @@ -84,7 +84,7 @@ const ValueFormWrapper = styled.div` position: relative; flex: 1; word-wrap: break-word; - max-width: 100%; + width: 100%; `; const EditButton = styled.button` diff --git a/browser/data-browser/src/components/forms/ValueForm/ValueFormEdit.tsx b/browser/data-browser/src/components/forms/ValueForm/ValueFormEdit.tsx index 4c406225b..310a24b42 100644 --- a/browser/data-browser/src/components/forms/ValueForm/ValueFormEdit.tsx +++ b/browser/data-browser/src/components/forms/ValueForm/ValueFormEdit.tsx @@ -6,6 +6,7 @@ import { Column, Row } from '../../Row'; import { ErrMessage } from '../InputStyles'; import InputSwitcher from '../InputSwitcher'; import { useEffect, useState } from 'react'; +import { FormValidationContextProvider } from '../formValidation/FormValidationContextProvider'; interface ValueFormEditProps { resource: Resource; @@ -19,6 +20,7 @@ export function ValueFormEdit({ onClose, }: ValueFormEditProps): React.JSX.Element { const [err, setErr] = useState(undefined); + const [isFormValid, setIsFormValid] = useState(false); const save = async () => { try { @@ -44,23 +46,25 @@ export function ValueFormEdit({ }, []); return ( - - - {err && {err.message}} - - - - - + + + + {err && {err.message}} + + + + + + ); } diff --git a/browser/data-browser/src/helpers/iconMap.ts b/browser/data-browser/src/helpers/iconMap.ts index c68d6a773..9aabf109a 100644 --- a/browser/data-browser/src/helpers/iconMap.ts +++ b/browser/data-browser/src/helpers/iconMap.ts @@ -31,6 +31,8 @@ import { FaListUl, FaMarkdown, FaRegSquareCheck, + FaLink, + FaCode, } from 'react-icons/fa6'; const iconMap = new Map([ @@ -70,4 +72,6 @@ export const dataTypeIconMap = new Map([ [Datatype.BOOLEAN, FaRegSquareCheck], [Datatype.DATE, FaCalendar], [Datatype.TIMESTAMP, FaClock], + [Datatype.URI, FaLink], + [Datatype.JSON, FaCode], ]); diff --git a/browser/data-browser/src/routes/Search/SearchRoute.tsx b/browser/data-browser/src/routes/Search/SearchRoute.tsx index 4a2421f4c..b3631400a 100644 --- a/browser/data-browser/src/routes/Search/SearchRoute.tsx +++ b/browser/data-browser/src/routes/Search/SearchRoute.tsx @@ -17,6 +17,7 @@ import { pathNames } from '../paths'; import { appRoute } from '../RootRoutes'; import { base64StringToFilter } from './searchUtils'; import { InlineFormattedResourceList } from '../../components/InlineFormattedResourceList'; +import { ErrorBoundary } from '../../views/ErrorPage'; type SearchRouteQueryParams = { query?: string; @@ -163,16 +164,18 @@ export function Search(): JSX.Element { by adding tag:[name] to your search. )} - - {results.map((subject, index) => ( - - ))} - + + + {results.map((subject, index) => ( + + ))} + + )} diff --git a/browser/data-browser/src/styling.tsx b/browser/data-browser/src/styling.tsx index 7141aa0fc..019ad561b 100644 --- a/browser/data-browser/src/styling.tsx +++ b/browser/data-browser/src/styling.tsx @@ -242,25 +242,24 @@ export const GlobalStyle = createGlobalStyle` * { box-sizing: border-box; - scrollbar-color: ${p => p.theme.colors.bg2} ${p => p.theme.colors.bg}; - &::-webkit-scrollbar { - width: 10px; - height: 10px; - padding: 3px; - background-color: ${p => - p.theme.colors.bg}; /* color of the tracking area */ - } - &::-webkit-scrollbar-thumb { - width: 8px; - margin: auto; - background-color: ${p => - p.theme.colors.bg2}; /* color of the tracking area */ - border-radius: ${p => p.theme.radius}; - - &:hover { - background-color: ${p => darken(0.1)(p.theme.colors.bg2)}; + scrollbar-color: ${p => p.theme.colors.bg2} transparent; + &::-webkit-scrollbar { + width: 10px; + height: 10px; + padding: 3px; + background-color: transparent;/* color of the tracking area */ + } + &::-webkit-scrollbar-thumb { + width: 8px; + margin: auto; + background-color: ${p => + p.theme.colors.bg2}; /* color of the tracking area */ + border-radius: ${p => p.theme.radius}; + + &:hover { + background-color: ${p => darken(0.1)(p.theme.colors.bg2)}; + } } - } } body { diff --git a/browser/data-browser/src/views/ChatRoomPage.tsx b/browser/data-browser/src/views/ChatRoomPage.tsx index 5b7d7c388..9b47ac190 100644 --- a/browser/data-browser/src/views/ChatRoomPage.tsx +++ b/browser/data-browser/src/views/ChatRoomPage.tsx @@ -153,13 +153,13 @@ export function ChatRoomPage({ resource }: ResourcePageProps) { )} ): JSX.Element { + const [dialogProps, show, close, isOpen] = useDialog({ + onSuccess: () => { + tableRef.current?.focus(); + }, + onCancel: () => { + tableRef.current?.focus(); + }, + }); + const prop = useProperty(property); + + const { tableRef } = useTableEditorContext(); + + const options = useMemo( + () => ({ + disabledKeyboardInteractions: new Set([ + ...addIf( + isOpen, + KeyboardInteraction.ExitEditMode, + KeyboardInteraction.EditNextRow, + ), + ]), + }), + [isOpen], + ); + + useCellOptions(options); + + const openDialog = () => { + show(); + }; + + const displayValue = JSON.stringify(value); + + return ( + <> + + + +
{displayValue}
+ + {isOpen && ( + <> + +

Edit {prop.shortname}

+
+ { + if (e.key === 'Escape') { + e.preventDefault(); + close(true); + } + }} + > + + + + )} +
+ + ); +} + +function JSONCellDisplay({ value }: DisplayCellProps): JSX.Element { + const displayValue = JSON.stringify(value); + + return <>{displayValue}; +} + +export const JSONCell: CellContainer = { + Edit: JSONCellEdit, + Display: JSONCellDisplay, +}; + +const StyledDialogContent = styled(Dialog.Content)` + padding-top: 2px; +`; diff --git a/browser/data-browser/src/views/TablePage/EditorCells/MarkdownCell.tsx b/browser/data-browser/src/views/TablePage/EditorCells/MarkdownCell.tsx new file mode 100644 index 000000000..50f6bfe27 --- /dev/null +++ b/browser/data-browser/src/views/TablePage/EditorCells/MarkdownCell.tsx @@ -0,0 +1,94 @@ +import { JSONValue, useProperty } from '@tomic/react'; + +import { CellContainer, DisplayCellProps, EditCellProps } from './Type'; + +import { useMemo, type JSX } from 'react'; +import styled from 'styled-components'; +import { IconButton } from '../../../components/IconButton/IconButton'; +import { FaPencil } from 'react-icons/fa6'; +import { Dialog, useDialog } from '../../../components/Dialog'; +import { + KeyboardInteraction, + useCellOptions, +} from '../../../components/TableEditor'; +import { addIf } from '../../../helpers/addIf'; +import InputMarkdown from '../../../components/forms/InputMarkdown'; +import { useTableEditorContext } from '../../../components/TableEditor/TableEditorContext'; + +function MarkdownCellEdit({ + value, + property, + resource, +}: EditCellProps): JSX.Element { + const [dialogProps, show, _close, isOpen] = useDialog({ + onSuccess: () => { + tableRef.current?.focus(); + }, + onCancel: () => { + tableRef.current?.focus(); + }, + }); + const prop = useProperty(property); + + const { tableRef } = useTableEditorContext(); + + const options = useMemo( + () => ({ + disabledKeyboardInteractions: new Set([ + ...addIf( + isOpen, + KeyboardInteraction.ExitEditMode, + KeyboardInteraction.EditNextRow, + ), + ]), + }), + [isOpen], + ); + + useCellOptions(options); + + const openDialog = () => { + show(); + }; + + return ( + <> + + + +
{value as string}
+ + {isOpen && ( + <> + +

Edit {prop.shortname}

+
+ + + + + )} +
+ + ); +} + +function MarkdownCellDisplay({ + value, +}: DisplayCellProps): JSX.Element { + return <>{value}; +} + +export const MarkdownCell: CellContainer = { + Edit: MarkdownCellEdit, + Display: MarkdownCellDisplay, +}; + +const StyledDialogContent = styled(Dialog.Content)` + padding-top: 2px; +`; diff --git a/browser/data-browser/src/views/TablePage/EditorCells/SlugCell.tsx b/browser/data-browser/src/views/TablePage/EditorCells/SlugCell.tsx index 2562cbc6a..2e456504e 100644 --- a/browser/data-browser/src/views/TablePage/EditorCells/SlugCell.tsx +++ b/browser/data-browser/src/views/TablePage/EditorCells/SlugCell.tsx @@ -10,13 +10,24 @@ function SlugCellEdit({ const handleChange = useCallback( (e: React.ChangeEvent) => { const v = e.target.value.toLowerCase().replace(/\s/g, '-'); + + if (v === '') { + onChange(undefined); + + return; + } + onChange(v); }, [onChange], ); return ( - + ); } diff --git a/browser/data-browser/src/views/TablePage/EditorCells/URICell.tsx b/browser/data-browser/src/views/TablePage/EditorCells/URICell.tsx new file mode 100644 index 000000000..d64e00065 --- /dev/null +++ b/browser/data-browser/src/views/TablePage/EditorCells/URICell.tsx @@ -0,0 +1,48 @@ +import { JSONValue } from '@tomic/react'; +import { useCallback, type JSX } from 'react'; +import { InputBase } from './InputBase'; +import { CellContainer, DisplayCellProps, EditCellProps } from './Type'; +import { AtomicLink } from '../../../components/AtomicLink'; + +function URICellEdit({ + value, + onChange, +}: EditCellProps): JSX.Element { + const handleChange = useCallback( + (e: React.ChangeEvent) => { + const v = e.target.value; + onChange(v); + }, + [onChange], + ); + + return ( + + ); +} + +function URICellDisplay({ value }: DisplayCellProps): JSX.Element { + if (!value) { + return <>; + } + + return ( + + {value as string} + + ); +} + +export const URICell: CellContainer = { + Edit: URICellEdit, + Display: URICellDisplay, +}; diff --git a/browser/data-browser/src/views/TablePage/NewColumnButton.tsx b/browser/data-browser/src/views/TablePage/NewColumnButton.tsx index 65063f61b..0485e3e0f 100644 --- a/browser/data-browser/src/views/TablePage/NewColumnButton.tsx +++ b/browser/data-browser/src/views/TablePage/NewColumnButton.tsx @@ -7,6 +7,7 @@ import { NewPropertyDialog } from './PropertyForm/NewPropertyDialog'; import { TablePageContext } from './tablePageContext'; import { ExternalPropertyDialog } from './PropertyForm/ExternalPropertyDialog'; import { dataTypeIconMap } from '../../helpers/iconMap'; +import { FaCode } from 'react-icons/fa6'; const NewColumnTrigger = buildDefaultTrigger(, 'Add column'); @@ -72,6 +73,12 @@ export function NewColumnButton(): JSX.Element { onClick: openDialog('file'), icon: , }, + { + id: 'json', + label: 'JSON', + onClick: openDialog('json'), + icon: , + }, { id: 'relation', label: 'Relation', diff --git a/browser/data-browser/src/views/TablePage/PropertyForm/JSONPropertyForm.tsx b/browser/data-browser/src/views/TablePage/PropertyForm/JSONPropertyForm.tsx new file mode 100644 index 000000000..527e4b108 --- /dev/null +++ b/browser/data-browser/src/views/TablePage/PropertyForm/JSONPropertyForm.tsx @@ -0,0 +1,13 @@ +import { Datatype, core } from '@tomic/react'; +import { useEffect, type JSX } from 'react'; +import { PropertyCategoryFormProps } from './PropertyCategoryFormProps'; + +export function JSONPropertyForm({ + resource, +}: PropertyCategoryFormProps): JSX.Element { + useEffect(() => { + resource.set(core.properties.datatype, Datatype.JSON); + }, []); + + return <>; +} diff --git a/browser/data-browser/src/views/TablePage/PropertyForm/TextPropertyForm.tsx b/browser/data-browser/src/views/TablePage/PropertyForm/TextPropertyForm.tsx index c63c3a4bc..396c0c3d9 100644 --- a/browser/data-browser/src/views/TablePage/PropertyForm/TextPropertyForm.tsx +++ b/browser/data-browser/src/views/TablePage/PropertyForm/TextPropertyForm.tsx @@ -8,7 +8,9 @@ import { PropertyCategoryFormProps } from './PropertyCategoryFormProps'; export const TextPropertyForm = ({ resource, }: PropertyCategoryFormProps): JSX.Element => { - const [textFormat, setTextFormat] = useState(Datatype.STRING); + const [textFormat, setTextFormat] = useState( + resource.props.datatype as Datatype, + ); const handleTextFormatChange = async ( e: React.ChangeEvent, @@ -47,6 +49,14 @@ export const TextPropertyForm = ({ > Slug + + URI + Length ([ Datatype.STRING, Datatype.MARKDOWN, Datatype.SLUG, + Datatype.URI, ]); const NUMBER_TYPES = new Set([Datatype.INTEGER, Datatype.FLOAT]); const DATE_TYPES = new Set([Datatype.DATE, Datatype.TIMESTAMP]); @@ -57,6 +60,10 @@ export const getCategoryFromResource = ( return 'relation'; } + if (datatype === Datatype.JSON) { + return 'json'; + } + if (datatype === Datatype.ATOMIC_URL) { return 'relation'; } @@ -76,6 +83,7 @@ export const categoryFormFactory = buildComponentFactory( ['select', SelectPropertyForm], ['date', DatePropertyForm], ['file', FilePropertyForm], + ['json', JSONPropertyForm], ['relation', RelationPropertyForm], ]), NoCategorySelected, diff --git a/browser/data-browser/src/views/TablePage/TableCell.tsx b/browser/data-browser/src/views/TablePage/TableCell.tsx index d1d90bb98..526b46e83 100644 --- a/browser/data-browser/src/views/TablePage/TableCell.tsx +++ b/browser/data-browser/src/views/TablePage/TableCell.tsx @@ -47,6 +47,8 @@ const valueOpts = { validate: false, }; +const emptyFunc = () => undefined; + export function TableCell({ columnIndex, rowIndex, @@ -56,7 +58,8 @@ export function TableCell({ }: TableCell): JSX.Element { const { setActiveCell } = useTableEditorContext(); const { addItemsToHistoryStack } = useContext(TablePageContext); - const [save, savePending] = useDebouncedSave(resource, 200); + // We give an empty error handler to debouncedSave so it doesn't spam the user with error popups when the value is invalid. + const [save, savePending] = useDebouncedSave(resource, 200, emptyFunc); const [value, setValue] = useValue(resource, property.subject, valueOpts); const [createdAt, setCreatedAt] = useValue( diff --git a/browser/data-browser/src/views/TablePage/TablePage.tsx b/browser/data-browser/src/views/TablePage/TablePage.tsx index de1c9dcfb..a1d271a58 100644 --- a/browser/data-browser/src/views/TablePage/TablePage.tsx +++ b/browser/data-browser/src/views/TablePage/TablePage.tsx @@ -133,7 +133,7 @@ export function TablePage({ resource }: ResourcePageProps): JSX.Element { setShowExportDialog(true)} > diff --git a/browser/data-browser/src/views/TablePage/dataTypeMaps.ts b/browser/data-browser/src/views/TablePage/dataTypeMaps.ts index af6f753d4..75cfc4431 100644 --- a/browser/data-browser/src/views/TablePage/dataTypeMaps.ts +++ b/browser/data-browser/src/views/TablePage/dataTypeMaps.ts @@ -10,10 +10,15 @@ import { ResourceArrayCell } from './EditorCells/ResourceArrayCell'; import { SlugCell } from './EditorCells/SlugCell'; import { StringCell } from './EditorCells/StringCell'; import { CellContainer } from './EditorCells/Type'; +import { URICell } from './EditorCells/URICell'; +import { MarkdownCell } from './EditorCells/MarkdownCell'; +import { JSONCell } from './EditorCells/JSONCell'; export const dataTypeCellMap = new Map>([ [Datatype.STRING, StringCell], [Datatype.SLUG, SlugCell], + [Datatype.MARKDOWN, MarkdownCell], + [Datatype.URI, URICell], [Datatype.ATOMIC_URL, AtomicURLCell], [Datatype.RESOURCEARRAY, ResourceArrayCell], [Datatype.INTEGER, IntegerCell], @@ -21,6 +26,7 @@ export const dataTypeCellMap = new Map>([ [Datatype.BOOLEAN, BooleanCell], [Datatype.TIMESTAMP, DateTimeCell], [Datatype.DATE, DateCell], + [Datatype.JSON, JSONCell], ]); export const dataTypeAlignmentMap = new Map([ @@ -50,6 +56,7 @@ export function appendStringToType( case Datatype.STRING: case Datatype.SLUG: case Datatype.MARKDOWN: + case Datatype.URI: return `${val}${append}` as T; case Datatype.INTEGER: return Number.parseInt( diff --git a/browser/e2e/package.json b/browser/e2e/package.json index 3b87bd381..d255a052a 100644 --- a/browser/e2e/package.json +++ b/browser/e2e/package.json @@ -24,8 +24,8 @@ "test-debug": "PWDEBUG=1 playwright test", "test-update": "playwright test --update-snapshots", "test-new": "playwright codegen http://localhost:5173", - "test-query": "PWDEBUG=1 DELETE_PREVIOUS_TEST_DRIVES=false playwright test -g" + "test-query": "PWDEBUG=1 DELETE_PREVIOUS_TEST_DRIVES=false playwright test -g", + "test-ui": "playwright test --ui" }, - "dependencies": {} } diff --git a/browser/e2e/tests/JSONProp.spec.ts b/browser/e2e/tests/JSONProp.spec.ts new file mode 100644 index 000000000..c7047853f --- /dev/null +++ b/browser/e2e/tests/JSONProp.spec.ts @@ -0,0 +1,43 @@ +import { test, expect } from '@playwright/test'; +import { before, newDrive, newResource, signIn } from './test-utils'; + +test.describe('JSON prop', () => { + test.beforeEach(before); + + test('create JSON prop', async ({ page }) => { + await signIn(page); + await newDrive(page); + + // A class with a JSON prop, made for this test. + await newResource( + 'https://atomicdata.dev/01k10mtpp8fkkmsd6tkm9qrqyw/defaultontology/class/test-class-with-json-prop', + page, + ); + + await expect( + page.getByRole('heading', { name: 'new test-class-with-json-prop' }), + ).toBeVisible(); + + const name = `Instance: ${Date.now()}`; + await page.getByLabel('Name').fill(name); + + const jsonEditor = page.getByLabel('Test-Json-Prop'); + await jsonEditor.fill('{"valid": false,}'); + + const saveButton = page.getByRole('button', { name: 'Save' }); + await expect(saveButton).toBeDisabled(); + + await jsonEditor.fill('{"valid": true}'); + await expect(saveButton).not.toBeDisabled(); + + await saveButton.click(); + + await expect( + page.getByRole('heading', { + name, + }), + ).toBeVisible(); + + await expect(page.getByText('{\n "valid": true\n }')).toHaveRole('code'); + }); +}); diff --git a/browser/e2e/tests/e2e.spec.ts b/browser/e2e/tests/e2e.spec.ts index 09f82288e..76c8a087b 100644 --- a/browser/e2e/tests/e2e.spec.ts +++ b/browser/e2e/tests/e2e.spec.ts @@ -1,6 +1,6 @@ // This file is copied from `atomic-data-browser` to `atomic-data-server` when `pnpm build-server` is run. // This is why the `testConfig` is imported. -import { test, expect } from '@playwright/test'; +import { test, expect, type Page } from '@playwright/test'; import { DEMO_INVITE_NAME, FRONTEND_URL, @@ -198,6 +198,9 @@ test.describe('data-browser', async () => { }); test('chatroom', async ({ page, browser }) => { + const inputLocator = (currentPage: Page) => + currentPage.getByLabel('Chat input'); + await signIn(page); await newDrive(page); const waiter = waitForCommitOnCurrentResource(page); @@ -207,11 +210,11 @@ test.describe('data-browser', async () => { page.getByRole('heading', { name: 'Untitled ChatRoom' }), ).toBeVisible(); const teststring = `My test: ${timestamp()}`; - await page.fill('[data-test="message-input"]', teststring); + await inputLocator(page).fill(teststring); await page.keyboard.press('Enter'); const chatRoomUrl = (await getCurrentSubject(page)) as string; await expect( - page.locator('[data-test="message-input"]'), + inputLocator(page), 'Text input not cleared on enter', ).toHaveText(''); await expect( @@ -228,7 +231,7 @@ test.describe('data-browser', async () => { await expect(page2.locator(`text=${teststring}`)).toBeVisible(); const teststring2 = `My reply: ${timestamp()}`; - await page2.fill('[data-test="message-input"]', teststring2); + await inputLocator(page2).fill(teststring2); await page2.keyboard.press('Enter'); // Both pages should see then new chat message await expect(page.locator(`text=${teststring2}`)).toBeVisible(); diff --git a/browser/e2e/tests/tables.spec.ts b/browser/e2e/tests/tables.spec.ts index deacd9311..a9250dde9 100644 --- a/browser/e2e/tests/tables.spec.ts +++ b/browser/e2e/tests/tables.spec.ts @@ -223,16 +223,6 @@ test.describe('tables', async () => { await fillRow(index + 1, row); } - // Disabled date tests until Playwright bug fixed - // await expect( - // page.getByRole('gridcell', { name: '4 March 2000' }), - // ).toBeVisible(); - // await expect( - // page.getByRole('gridcell', { name: '15 May 1980' }), - // ).toBeVisible(); - // await expect( - // page.getByRole('gridcell', { name: '13 May 1965' }), - // ).toBeVisible(); await expect( page.getByRole('gridcell', { name: '😵‍💫 dreamy' }), ).toBeVisible(); diff --git a/browser/e2e/tests/template.spec.ts b/browser/e2e/tests/template.spec.ts index 8cf31781e..7498c301c 100644 --- a/browser/e2e/tests/template.spec.ts +++ b/browser/e2e/tests/template.spec.ts @@ -14,16 +14,31 @@ import { spawn, type ChildProcess } from 'node:child_process'; import path from 'node:path'; import kill from 'kill-port'; import { log } from 'node:console'; +import os from 'node:os'; -const execAsync = async ( - command: Parameters[0], - options?: Parameters[1], +const EXEC_DIR = path.join(os.tmpdir(), 'atomic-data-template-tests'); + +const pathToPackage = ( + libName: 'lib' | 'cli' | 'react' | 'svelte' | 'create-template', ) => { + return path.join(__dirname, '..', '..', libName); +}; + +const execAsync = async (command: Parameters[0], cwd?: string) => { return new Promise((resolve, reject) => { + const options = { + cwd: cwd ? path.join(EXEC_DIR, cwd) : EXEC_DIR, + }; + exec(command, options, (err, stdout, stderr) => { + // eslint-disable-next-line no-console console.log(stdout, stderr); if (err) { + // eslint-disable-next-line no-console + console.log( + `Encountered error while excecuting ${command} in ${options.cwd}`, + ); reject(new Error(err.message)); } @@ -36,46 +51,41 @@ const execAsync = async ( }); }; -const TEMPLATE_DIR_NAME = 'template-tests'; // test.describe.configure({ mode: 'serial' }); -async function setupTemplateSite( - serverUrl: string, - siteType: 'nextjs-site' | 'sveltekit-site', -) { - if (!fs.existsSync(TEMPLATE_DIR_NAME)) { - fs.mkdirSync(TEMPLATE_DIR_NAME); +async function setupTemplateSite(serverUrl: string, siteType: string) { + if (!fs.existsSync(EXEC_DIR)) { + fs.mkdirSync(EXEC_DIR); + await execAsync('pnpm init'); + await execAsync(`pnpm link ${pathToPackage('create-template')}`); } - await execAsync('pnpm link ../create-template'); await execAsync( - `pnpm exec create-template ${TEMPLATE_DIR_NAME}/${siteType} --template ${siteType} --server-url ${serverUrl}`, + `pnpm exec create-template ${siteType} --template ${siteType} --server-url ${serverUrl}`, ); - const sitePath = `${TEMPLATE_DIR_NAME}/${siteType}`; - await execAsync('pnpm install', { cwd: sitePath }); - await execAsync('pnpm link ../../../cli', { cwd: sitePath }); - await execAsync('pnpm link ../../../lib', { cwd: sitePath }); + await execAsync('pnpm install', siteType); + await execAsync(`pnpm link ${pathToPackage('cli')}`, siteType); + await execAsync(`pnpm link ${pathToPackage('lib')}`, siteType); if (siteType === 'nextjs-site') { - await execAsync('pnpm link ../../../react', { cwd: sitePath }); + await execAsync(`pnpm link ${pathToPackage('react')}`, siteType); } else if (siteType === 'sveltekit-site') { - await execAsync('pnpm link ../../../svelte', { cwd: sitePath }); - await execAsync('pnpm svelte-kit sync', { cwd: sitePath }); + await execAsync(`pnpm link ${pathToPackage('svelte')}`, siteType); } - await execAsync('pnpm update-ontologies', { cwd: sitePath }); + await execAsync('pnpm update-ontologies', siteType); } -function startServer(templateDir: string, siteType: string) { +function startServer(siteType: string) { // Adjust runtime commands per template const command = siteType === 'nextjs-site' - ? 'pnpm run build && pnpm start' + ? 'pnpm build && pnpm start' : 'pnpm run build && NO_COLOR=1 pnpm preview'; return spawn(command, { - cwd: `${templateDir}/${siteType}`, + cwd: path.join(EXEC_DIR, siteType), shell: true, }); } @@ -121,7 +131,7 @@ const waitForServer = ( }); }; -test.describe('Create Next.js Template', () => { +test.describe('Test create-template package', () => { test.beforeEach(before); test('apply next-js template', async ({ page }) => { @@ -146,7 +156,7 @@ test.describe('Create Next.js Template', () => { try { //start server - const child = startServer(TEMPLATE_DIR_NAME, 'nextjs-site'); + const child = startServer('nextjs-site'); const url = await waitForServer(child); // check if the server is running @@ -184,25 +194,6 @@ test.describe('Create Next.js Template', () => { } }); - test.afterEach(async () => { - const dirPath = path.join( - __dirname, - '..', - TEMPLATE_DIR_NAME, - 'nextjs-site', - ); - - try { - await fs.promises.rm(dirPath, { recursive: true, force: true }); - } catch (error) { - console.error(`Failed to delete ${TEMPLATE_DIR_NAME}:`, error); - } - }); -}); - -test.describe('Create SvelteKit Template', () => { - test.beforeEach(before); - test('apply sveltekit template', async ({ page }) => { test.slow(); await signIn(page); @@ -224,7 +215,7 @@ test.describe('Create SvelteKit Template', () => { await setupTemplateSite(drive.driveURL, 'sveltekit-site'); try { - const child = startServer(TEMPLATE_DIR_NAME, 'sveltekit-site'); + const child = startServer('sveltekit-site'); //start server const url = await waitForServer(child); @@ -264,18 +255,18 @@ test.describe('Create SvelteKit Template', () => { } }); - test.afterEach(async () => { - const dirPath = path.join( - __dirname, - '..', - TEMPLATE_DIR_NAME, - 'sveltekit-site', - ); + test.afterAll(async () => { + if (!fs.existsSync(EXEC_DIR)) { + // eslint-disable-next-line no-console + console.log('No EXEC_DIR to delete, skipping...'); + + return; + } try { - await fs.promises.rm(dirPath, { recursive: true, force: true }); + await fs.promises.rm(EXEC_DIR, { recursive: true, force: true }); } catch (error) { - console.error(`Failed to delete ${TEMPLATE_DIR_NAME}:`, error); + console.error(`Failed to delete ${EXEC_DIR}:`, error); } }); }); diff --git a/browser/e2e/tests/test-utils.ts b/browser/e2e/tests/test-utils.ts index e909d4029..c7909c755 100644 --- a/browser/e2e/tests/test-utils.ts +++ b/browser/e2e/tests/test-utils.ts @@ -188,7 +188,7 @@ export async function openAtomic(page: Page) { export async function editProfileAndCommit(page: Page) { await openAgentPage(page); await page.click('text=Edit profile'); - const advancedButton = await page.getByRole('button', { name: 'advanced' }); + const advancedButton = page.getByRole('button', { name: 'advanced' }); await advancedButton.scrollIntoViewIfNeeded(); await advancedButton.click(); await expect(page.locator('text=add another property')).toBeVisible(); @@ -299,6 +299,7 @@ export async function changeDrive(subject: string, page: Page) { const driveTitleText = await currentDriveTitle(page).textContent(); // Get the domain from the subject to compare with the drive title const subjectDomain = new URL(subject).hostname; + if (driveTitleText && driveTitleText.trim().includes(subjectDomain)) { return; } @@ -351,6 +352,7 @@ export async function isCurrentDrive( // Remove trailing slashes const cleanUrl = urlString.replace(/\/$/, ''); const urlObj = new URL(cleanUrl); + // Compare only hostname and path, ignoring protocol return `${urlObj.hostname}${urlObj.pathname}`; } catch (e) { @@ -364,6 +366,7 @@ export async function isCurrentDrive( return normalizedCurrentUrl === normalizedUrl; } catch (error) { console.error('Error in isCurrentDrive:', error); + return false; } } diff --git a/browser/lib/src/agent.ts b/browser/lib/src/agent.ts index 55e73d72f..d5c053a84 100644 --- a/browser/lib/src/agent.ts +++ b/browser/lib/src/agent.ts @@ -56,7 +56,10 @@ export class Agent implements AgentInterface { * Key. Used for signing in with one string */ public buildSecret(): string { - const objJsonStr = JSON.stringify(this); + const objJsonStr = JSON.stringify({ + privateKey: this.privateKey, + subject: this.subject, + }); return btoa(objJsonStr); } diff --git a/browser/lib/src/client.ts b/browser/lib/src/client.ts index 60301c3f3..9e1177348 100644 --- a/browser/lib/src/client.ts +++ b/browser/lib/src/client.ts @@ -82,13 +82,13 @@ export class Client { } /** Returns true if the given subject is valid */ - public static isValidSubject(subject: string | undefined): boolean { + public static isValidSubject(subject: unknown): boolean { if (typeof subject !== 'string') return false; try { Client.tryValidSubject(subject); - return true; + return subject.startsWith('http'); } catch (e) { return false; } @@ -165,13 +165,16 @@ export class Client { if (opts.noNested) { resource = json; } else { - const [parsedResource, parsedCreatedResources] = parser.parseObject( - json, - subject, - ); + const resources = parser.parse(json, subject); - resource = parsedResource; - createdResources.push(...parsedCreatedResources); + if (resources.length === 0) { + throw new AtomicError( + `Could not parse JSON from fetching ${subject}. Is it an Atomic Data resource?`, + ); + } + + resource = resources.at(-1) as Resource; + createdResources.push(...resources); } } catch (e) { throw new AtomicError( @@ -272,7 +275,7 @@ export class Client { } const json = JSON.parse(body); - const [resources] = parser.parseArray(json); + const resources = parser.parse(json); return resources; } diff --git a/browser/lib/src/commit.ts b/browser/lib/src/commit.ts index bd0010719..9ffeadfca 100644 --- a/browser/lib/src/commit.ts +++ b/browser/lib/src/commit.ts @@ -5,11 +5,8 @@ import { decode as decodeB64, encode as encodeB64 } from 'base64-arraybuffer'; import { sha512 } from '@noble/hashes/sha512'; import { Client } from './client.js'; -import { isArray } from './datatypes.js'; -import { JSONADParser } from './parse.js'; import { Resource } from './resource.js'; import type { Store } from './store.js'; -import { urls, properties } from './urls.js'; import type { JSONValue, JSONArray } from './value.js'; import { commits } from './ontologies/commits.js'; import { core } from './ontologies/core.js'; @@ -381,16 +378,14 @@ export async function generateKeyPair(): Promise { export function parseCommitResource(resource: Resource): Commit { const commit: Commit = { id: resource.subject, - subject: resource.get(urls.properties.commit.subject) as string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - set: resource.get(urls.properties.commit.set) as Record, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - push: resource.get(urls.properties.commit.push) as Record, - signer: resource.get(urls.properties.commit.signer) as string, - createdAt: resource.get(urls.properties.commit.createdAt) as number, - remove: resource.get(urls.properties.commit.remove) as string[], - destroy: resource.get(urls.properties.commit.destroy) as boolean, - signature: resource.get(urls.properties.commit.signature) as string, + subject: resource.get(commits.properties.subject), + set: resource.get(commits.properties.set), + push: resource.get(commits.properties.push), + signer: resource.get(commits.properties.signer), + createdAt: resource.get(commits.properties.createdAt), + remove: resource.get(commits.properties.remove), + destroy: resource.get(commits.properties.destroy), + signature: resource.get(commits.properties.signature), }; return commit; @@ -405,19 +400,17 @@ export function parseCommitJSON(str: string): Commit { throw new Error(`Commit is not an object`); } - const subject = jsonAdObj[urls.properties.commit.subject]; - const set = jsonAdObj[urls.properties.commit.set]; - const push = jsonAdObj[urls.properties.commit.push]; - const signer = jsonAdObj[urls.properties.commit.signer]; - const createdAt = jsonAdObj[urls.properties.commit.createdAt]; - const remove: string[] | undefined = - jsonAdObj[urls.properties.commit.remove]; - const destroy: boolean | undefined = - jsonAdObj[urls.properties.commit.destroy]; - const signature: string = jsonAdObj[urls.properties.commit.signature]; + const subject = jsonAdObj[commits.properties.subject]; + const set = jsonAdObj[commits.properties.set]; + const push = jsonAdObj[commits.properties.push]; + const signer = jsonAdObj[commits.properties.signer]; + const createdAt = jsonAdObj[commits.properties.createdAt]; + const remove: string[] | undefined = jsonAdObj[commits.properties.remove]; + const destroy: boolean | undefined = jsonAdObj[commits.properties.destroy]; + const signature: string = jsonAdObj[commits.properties.signature]; const id: undefined | string = jsonAdObj['@id']; const previousCommit: undefined | string = - jsonAdObj[urls.properties.commit.previousCommit]; + jsonAdObj[commits.properties.previousCommit]; if (!signature) { throw new Error(`Commit has no signature`); @@ -489,7 +482,7 @@ export function parseAndApplyCommit(jsonAdObjStr: string, store: Store) { if (id) { // This is something that the server does, too. - resource.setUnsafe(properties.commit.lastCommit, id); + resource.setUnsafe(commits.properties.lastCommit, id); } if (destroy) { @@ -508,28 +501,10 @@ function execSetCommit( resource: Resource, store?: Store, ) { - const parser = new JSONADParser(); const parsedResources: Resource[] = []; for (const [key, value] of Object.entries(set)) { - let newVal = value; - - if (value?.constructor === {}.constructor) { - const [result, foundResources] = parser.parseValue(value, key); - newVal = result; - parsedResources.push(...foundResources); - } - - if (isArray(value)) { - newVal = value.map(resourceOrURL => { - const [result, foundResources] = parser.parseValue(resourceOrURL, key); - parsedResources.push(...foundResources); - - return result; - }); - } - - resource.setUnsafe(key, newVal); + resource.setUnsafe(key, value); } store && store.addResources(parsedResources); @@ -541,30 +516,13 @@ function execRemoveCommit(remove: string[], resource: Resource) { } } -function execPushCommit( - push: Record, - resource: Resource, - store?: Store, -) { - const parser = new JSONADParser(); - const parsedResources: Resource[] = []; - +function execPushCommit(push: Record, resource: Resource) { for (const [key, value] of Object.entries(push)) { const current = (resource.get(key) as JSONArray) || []; const newArr = value as JSONArray; - // The `push` arrays may contain full resources. - // We parse these here and add them to a list of resources to add to the store. - const stringArr = newArr.map(val => { - const [result, foundResources] = parser.parseValue(val, key); - parsedResources.push(...foundResources); - - return result; - }); // Merge both the old and new items - const new_arr = [...current, ...stringArr]; + const new_arr = [...current, ...newArr]; // Save it! resource.setUnsafe(key, new_arr); } - - store && store.addResources(parsedResources); } diff --git a/browser/lib/src/datatypes.ts b/browser/lib/src/datatypes.ts index 0d64dc9ff..85d91f6ee 100644 --- a/browser/lib/src/datatypes.ts +++ b/browser/lib/src/datatypes.ts @@ -23,6 +23,10 @@ export enum Datatype { STRING = 'https://atomicdata.dev/datatypes/string', /** Milliseconds since unix epoch */ TIMESTAMP = 'https://atomicdata.dev/datatypes/timestamp', + /** JSON object */ + JSON = 'https://atomicdata.dev/datatypes/json', + /** URI */ + URI = 'https://atomicdata.dev/datatypes/uri', UNKNOWN = 'unknown-datatype', } @@ -142,6 +146,26 @@ export const validateDatatype = ( break; } + + case Datatype.JSON: { + try { + JSON.stringify(value); + } catch (e) { + err = 'Not valid JSON'; + } + + break; + } + + case Datatype.URI: { + try { + new URL(value as string); + } catch (e) { + err = 'Not a valid URI'; + } + + break; + } } if (err !== null) { @@ -165,6 +189,8 @@ export const reverseDatatypeMapping = { [Datatype.STRING]: 'String', [Datatype.SLUG]: 'Slug', [Datatype.MARKDOWN]: 'Markdown', + [Datatype.URI]: 'URI', + [Datatype.JSON]: 'JSON', [Datatype.INTEGER]: 'Integer', [Datatype.FLOAT]: 'Float', [Datatype.BOOLEAN]: 'Boolean', diff --git a/browser/lib/src/parse.test.ts b/browser/lib/src/parse.test.ts index 2359939de..b5aa1db06 100644 --- a/browser/lib/src/parse.test.ts +++ b/browser/lib/src/parse.test.ts @@ -4,12 +4,12 @@ import { JSONADParser } from './parse.js'; const EXAMPLE_SUBJECT = 'http://example.com/1'; const EXAMPLE_SUBJECT2 = 'http://example.com/2'; const EXAMPLE_SUBJECT3 = 'http://example.com/3'; -const EXAMPLE_SUBJECT4 = 'http://example.com/4'; -const STRING_PROPERTY = 'http://some-string-property'; -const NUMBER_PROPERTY = 'http://some-number-property'; -const BOOLEAN_PROPERTY = 'http://some-boolean-property'; -const NESTED_RESOURCE_PROPERTY = 'http://some-nested-resource-property'; +const STRING_PROPERTY = 'http://example.com/some-string-property'; +const NUMBER_PROPERTY = 'http://example.com/some-number-property'; +const BOOLEAN_PROPERTY = 'http://example.com/some-boolean-property'; +const NESTED_RESOURCE_PROPERTY = + 'http://example.com/some-nested-resource-property'; describe('parse.ts', () => { it('parses a JSON-AD object and returns it as a resource', ({ expect }) => { const jsonObject = { @@ -20,72 +20,13 @@ describe('parse.ts', () => { }; const parser = new JSONADParser(); - const [resource] = parser.parseObject(jsonObject); + const [resource] = parser.parse(jsonObject); expect(resource.get(STRING_PROPERTY)).toBe('Hoi'); expect(resource.get(NUMBER_PROPERTY)).toBe(10); expect(resource.get(BOOLEAN_PROPERTY)).toBe(true); }); - it('parses a JSON-AD object with a nested resource', ({ expect }) => { - const jsonObjectWithID = { - '@id': EXAMPLE_SUBJECT, - [NESTED_RESOURCE_PROPERTY]: { - '@id': EXAMPLE_SUBJECT2, - [STRING_PROPERTY]: 'Hoi', - }, - }; - - const jsonObjectWithoutID = { - '@id': EXAMPLE_SUBJECT, - [NESTED_RESOURCE_PROPERTY]: { - [STRING_PROPERTY]: 'Hoi', - }, - }; - - const jsonWithArrayOfResources = { - '@id': EXAMPLE_SUBJECT, - [NESTED_RESOURCE_PROPERTY]: [ - { - '@id': EXAMPLE_SUBJECT2, - [STRING_PROPERTY]: 'Hoi', - }, - EXAMPLE_SUBJECT3, - { - [STRING_PROPERTY]: 'Hoi', - }, - ], - }; - - const parser = new JSONADParser(); - const [resource1, parsedResources1] = parser.parseObject(jsonObjectWithID); - - const [resource2, parsedResources2] = - parser.parseObject(jsonObjectWithoutID); - - const [resource3, parsedResources3] = parser.parseObject( - jsonWithArrayOfResources, - ); - - expect(resource1.get(NESTED_RESOURCE_PROPERTY)).toBe(EXAMPLE_SUBJECT2); - expect(parsedResources1).toHaveLength(2); - expect(parsedResources1[1].get(STRING_PROPERTY)).toBe('Hoi'); - - expect(resource2.get(NESTED_RESOURCE_PROPERTY)).toEqual({ - [STRING_PROPERTY]: 'Hoi', - }); - - expect(parsedResources2).toHaveLength(1); - - expect(resource3.get(NESTED_RESOURCE_PROPERTY)).toEqual([ - EXAMPLE_SUBJECT2, - EXAMPLE_SUBJECT3, - { [STRING_PROPERTY]: 'Hoi' }, - ]); - - expect(parsedResources3).toHaveLength(2); - }); - it('parses an array of jsonObjects', ({ expect }) => { const array = [ { @@ -100,17 +41,15 @@ describe('parse.ts', () => { '@id': EXAMPLE_SUBJECT3, [STRING_PROPERTY]: 'Third Resource', [NESTED_RESOURCE_PROPERTY]: { - '@id': EXAMPLE_SUBJECT4, - [STRING_PROPERTY]: 'Fourth Resource', + [STRING_PROPERTY]: 'Nested Resource', }, }, ]; const parser = new JSONADParser(); - const [resources, parsedResources] = parser.parseArray(array); + const resources = parser.parse(array); expect(resources).toHaveLength(3); - expect(parsedResources).toHaveLength(4); }); it('Handles resources without an ID', ({ expect }) => { @@ -119,7 +58,7 @@ describe('parse.ts', () => { }; const parser = new JSONADParser(); - const [resource] = parser.parseObject(jsonObject, 'my-new-id'); + const [resource] = parser.parse(jsonObject, 'my-new-id'); expect(resource.get(STRING_PROPERTY)).toBe('Hoi'); expect(resource.subject).toBe('my-new-id'); diff --git a/browser/lib/src/parse.ts b/browser/lib/src/parse.ts index 823e134a6..e70601063 100644 --- a/browser/lib/src/parse.ts +++ b/browser/lib/src/parse.ts @@ -1,53 +1,59 @@ import { AtomicError } from './error.js'; -import { isArray } from './index.js'; +import { Client, isArray } from './index.js'; import { server } from './ontologies/server.js'; import { Resource, unknownSubject } from './resource.js'; import type { JSONObject, JSONValue } from './value.js'; -/** Resources in JSON-AD can be referenced by their URL (string), - * be entire (nested) resources, in which case they are JSONObjects */ -type StringOrNestedResource = string | JSONObject; - +/** + * Parses a JSON-AD object or array into resources. Create a new instance each time you need to parse a json-ad string. + */ export class JSONADParser { - private parsedResources: Resource[] = []; + public parse(json: unknown, subject: string = unknownSubject): Resource[] { + if (Array.isArray(json)) { + return this.parseArray(json); + } + + if (isJSONObject(json as JSONValue)) { + return [this.parseObject(json as JSONObject, subject)]; + } + + throw new Error(`Expected object or array, got ${typeof json}`); + } /** * Parses an JSON-AD object containing a resource. Returns the resource and a list of all the sub-resources it found. */ - public parseObject( + private parseObject( jsonObject: JSONObject, resourceSubject?: string, - ): [parsedRootResource: Resource, allParsedResources: Resource[]] { - this.parsedResources = []; + ): Resource { const parsedResource = this.parseJsonADResource( jsonObject, resourceSubject, ); - return [parsedResource, [...this.parsedResources]]; + return parsedResource; } /** * Parses an array of JSON-AD objects containing resources. * Returns a list of the resources in the array and a list of all the resources that were found including sub-resources. */ - public parseArray( - jsonArray: unknown[], - ): [resourcesInArray: Resource[], allParsedResources: Resource[]] { - this.parsedResources = []; - const resources = this.parseJsonADArray(jsonArray); + private parseArray(jsonArray: unknown[]): Resource[] { + const resources: Resource[] = []; - return [resources, [...this.parsedResources]]; - } + for (const item of jsonArray as JSONValue[]) { + if (!isJSONObject(item)) { + throw new Error( + `Error parsing JSON-AD Array, expected object, got ${typeof item}`, + ); + } - public parseValue( - value: JSONValue, - key: string, - ): [value: JSONValue, allParsedResources: Resource[]] { - this.parsedResources = []; - const result = this.parseJsonAdResourceValue(value, key); + const resource = this.parseJsonADResource(item); + resources.push(resource); + } - return [result, [...this.parsedResources]]; + return resources; } private parseJsonADResource( @@ -55,13 +61,12 @@ export class JSONADParser { resourceSubject: string = unknownSubject, ): Resource { const resource = new Resource(resourceSubject); - this.parsedResources.push(resource); try { for (const [key, value] of Object.entries(object)) { if (key === '@id') { - if (typeof value !== 'string') { - throw new Error("'@id' field must be a string"); + if (!Client.isValidSubject(value)) { + throw new Error(`@id value ${value} is not a valid subject`); } if ( @@ -74,32 +79,11 @@ export class JSONADParser { ); } - resource.setSubject(value); + resource.setSubject(value as string); continue; } - try { - // Resource values can be either strings (URLs) or full Resources, which in turn can be either Anonymous (no @id) or Named (with an @id) - if (isArray(value)) { - const newarr = value.map(val => - this.parseJsonAdResourceValue(val, key), - ); - resource.setUnsafe(key, newarr); - } else if (typeof value === 'string') { - resource.setUnsafe(key, value); - } else if (typeof value === 'number') { - resource.setUnsafe(key, value); - } else if (typeof value === 'boolean') { - resource.setUnsafe(key, value); - } else { - const subject = this.parseJsonAdResourceValue(value, key); - resource.setUnsafe(key, subject); - } - } catch (e) { - const baseMsg = `Failed creating value ${value} for key ${key} in resource ${resource.subject}`; - const errorMsg = `${baseMsg}. ${e.message}`; - throw new Error(errorMsg); - } + resource.setUnsafe(key, value); } resource.loading = false; @@ -117,49 +101,6 @@ export class JSONADParser { return resource; } - - private parseJsonAdResourceValue( - value: JSONValue, - key: string, - ): StringOrNestedResource { - if (typeof value === 'string') { - return value; - } - - if (isJSONObject(value)) { - if ('@id' in value) { - // It's a named resource that should be parsed too - const nestedSubject = value['@id'] as string; - this.parseJsonADResource(value); - - return nestedSubject; - } else { - // It's an anonymous nested Resource - return value; - } - } - - throw new Error( - `Value ${value} in ${key} not a string or a nested Resource`, - ); - } - - /** Parses a JSON-AD array, returns array of Resources */ - private parseJsonADArray(jsonArray: unknown[]): Resource[] { - const resources: Resource[] = []; - - try { - for (const jsonObject of jsonArray) { - const resource = this.parseJsonADResource(jsonObject as JSONObject); - resources.push(resource); - } - } catch (e) { - e.message = 'Failed parsing JSON ' + e.message; - throw e; - } - - return resources; - } } const isJSONObject = (value: JSONValue): value is JSONObject => diff --git a/browser/lib/src/resource.ts b/browser/lib/src/resource.ts index 274933c37..fb876987a 100644 --- a/browser/lib/src/resource.ts +++ b/browser/lib/src/resource.ts @@ -11,6 +11,8 @@ import { } from './commit.js'; import { validateDatatype } from './datatypes.js'; import { isUnauthorized } from './error.js'; +import { collections } from './ontologies/collections.js'; +import { commits } from './ontologies/commits.js'; import { core } from './ontologies/core.js'; import { server } from './ontologies/server.js'; @@ -22,7 +24,7 @@ import { type QuickAccessPropType, } from './ontology.js'; import type { Store } from './store.js'; -import { properties, instances, urls } from './urls.js'; +import { properties, instances } from './urls.js'; import { valToArray, type JSONValue, @@ -89,6 +91,13 @@ export class Resource { public constructor(subject: string, newResource?: boolean) { if (typeof subject !== 'string') { + // Check if the subject is an object with an @id property + if (subject && typeof subject === 'object' && '@id' in subject) { + throw new Error( + 'Found named nested resource instead of subjects, this probably means your server is outdated.', + ); + } + throw new Error( 'Invalid subject given to resource, must be a string, found ' + typeof subject, @@ -186,7 +195,7 @@ export class Resource { private get store(): Store { if (!this._store) { - console.error(`Resource ${this.title} has no store`); + console.error(`Resource ${this.subject} has no store`); throw new Error('Resource has no store'); } @@ -405,9 +414,9 @@ export class Resource { public getCommitsCollectionSubject(): string { const url = new URL(this.subject); url.pathname = '/commits'; - url.searchParams.append('property', urls.properties.commit.subject); + url.searchParams.append('property', commits.properties.subject); url.searchParams.append('value', this.subject); - url.searchParams.append('sort_by', urls.properties.commit.createdAt); + url.searchParams.append('sort_by', commits.properties.createdAt); url.searchParams.append('include_nested', 'true'); url.searchParams.append('page_size', '9999'); @@ -432,30 +441,34 @@ export class Resource { const commitsCollection = await this.store.fetchResourceFromServer( this.getCommitsCollectionSubject(), ); - const commits = commitsCollection.get( - properties.collection.members, + const commitList = commitsCollection.get( + collections.properties.members, ) as string[]; const builtVersions: Version[] = []; let previousResource = new Resource(this.subject); - for (let i = 0; i < commits.length; i++) { - const commitResource = await this.store.getResource(commits[i]); + for (let i = 0; i < commitList.length; i++) { + const commitResource = await this.store.getResource(commitList[i]); const parsedCommit = parseCommitResource(commitResource); const builtResource = applyCommitToResource( previousResource.clone(), parsedCommit, ); + + builtResource.setStore(this.store); + builtVersions.push({ commit: parsedCommit, resource: builtResource, }); + previousResource = builtResource; // Every 30 cycles we report the progress if (progressCallback && i % 30 === 0) { - progressCallback(Math.round((i / commits.length) * 100)); + progressCallback(Math.round((i / commitList.length) * 100)); await WaitForImmediate(); } } @@ -463,6 +476,10 @@ export class Resource { return builtVersions; } + /** + * Sets the resource to the specified version and saves it. + * @param version The version to set the resource to, you can get this using `resource.getHistory()` + */ public async setVersion(version: Version): Promise { const versionPropvals = version.resource.getPropVals(); @@ -477,6 +494,7 @@ export class Resource { await this.set(key, value); } + // TODO: We should let the user save, this is what we usually do. await this.save(); } diff --git a/browser/lib/src/store.ts b/browser/lib/src/store.ts index 26753fe1f..9db5252cb 100644 --- a/browser/lib/src/store.ts +++ b/browser/lib/src/store.ts @@ -389,18 +389,24 @@ export class Store { /** Opens a Websocket for some subject URL, or returns the existing one. */ public getWebSocketForSubject(subject: string): WebSocket | undefined { - const url = new URL(subject); - const found = this.webSockets.get(url.origin); - - if (found) { - return found; - } else { - if (typeof window !== 'undefined') { - this.webSockets.set(url.origin, startWebsocket(url.origin, this)); + try { + const url = new URL(subject); + const found = this.webSockets.get(url.origin); + + if (found) { + return found; + } else { + if (typeof window !== 'undefined') { + this.webSockets.set(url.origin, startWebsocket(url.origin, this)); + } } - } - return; + return; + } catch (e) { + throw new Error( + `Could not open websocket for subject ${subject}: ${e.message}`, + ); + } } /** Returns the base URL of the companion server */ @@ -626,7 +632,7 @@ export class Store { Uint8Array.from(atob(content), c => c.charCodeAt(0)), ); const json = JSON.parse(jsonString); - const [_, resources] = parser.parseObject(json); + const resources = parser.parse(json); this.addResources(resources); }); } diff --git a/browser/lib/src/value.ts b/browser/lib/src/value.ts index 48b2674da..c37a69a9b 100644 --- a/browser/lib/src/value.ts +++ b/browser/lib/src/value.ts @@ -3,7 +3,7 @@ import type { Resource } from './resource.js'; export type JSONPrimitive = string | number | boolean; export type JSONValue = JSONPrimitive | JSONObject | JSONArray | undefined; -export type JSONObject = { [member: string]: JSONValue }; +export type JSONObject = { [key: string]: JSONValue }; export type JSONArray = Array; /** @@ -15,8 +15,7 @@ export function valToArray(val?: JSONValue): JSONArray { throw new Error(`Not an array: ${val}, is ${typeof val}`); } - if (val.constructor === Array) { - // TODO: check this better + if (Array.isArray(val)) { return val; } @@ -77,13 +76,13 @@ export function valToResource(val: JSONValue): string | Resource { throw new Error(`Not a resource: ${val}, is a Date`); } - if (val?.constructor === Array) { + if (Array.isArray(val)) { throw new Error(`Not a resource: ${val}, is an Array`); } if (typeof val === 'object') { const parser = new JSONADParser(); - const [resource] = parser.parseObject(val as JSONObject, 'nested-resource'); + const [resource] = parser.parse(val as JSONObject, 'nested-resource'); return resource; } diff --git a/browser/lib/src/websockets.ts b/browser/lib/src/websockets.ts index 812f37ef3..14f3ef77a 100644 --- a/browser/lib/src/websockets.ts +++ b/browser/lib/src/websockets.ts @@ -58,7 +58,7 @@ function parseResourceMessage(ev: MessageEvent): Resource[] { const resourceJSON: string = ev.data.slice(9); const parsed = JSON.parse(resourceJSON); const parser = new JSONADParser(); - const [_, resources] = parser.parseObject(parsed); + const resources = parser.parse(parsed); return resources; } diff --git a/browser/package.json b/browser/package.json index d1bf8afae..1420c3862 100644 --- a/browser/package.json +++ b/browser/package.json @@ -55,5 +55,5 @@ "create-template" ] }, - "packageManager": "pnpm@9.3.0" + "packageManager": "pnpm@10.14.0" } diff --git a/browser/pnpm-lock.yaml b/browser/pnpm-lock.yaml index e6b2882c4..c5cab0dff 100644 --- a/browser/pnpm-lock.yaml +++ b/browser/pnpm-lock.yaml @@ -77,13 +77,9 @@ importers: chalk: specifier: ^5.3.0 version: 5.3.0 - get-tsconfig: - specifier: ^4.8.1 - version: 4.8.1 prettier: specifier: 3.0.3 version: 3.0.3 - devDependencies: typescript: specifier: ^5.6.3 version: 5.6.3 @@ -118,6 +114,12 @@ importers: '@bugsnag/plugin-react': specifier: ^7.25.0 version: 7.25.0(@bugsnag/core@7.25.0) + '@codemirror/lang-json': + specifier: ^6.0.2 + version: 6.0.2 + '@codemirror/lint': + specifier: ^6.8.5 + version: 6.8.5 '@dagrejs/dagre': specifier: ^1.1.4 version: 1.1.4 @@ -175,6 +177,15 @@ importers: '@tomic/react': specifier: workspace:* version: link:../react + '@uiw/codemirror-theme-github': + specifier: ^4.24.1 + version: 4.24.1(@codemirror/language@6.11.2)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1) + '@uiw/react-codemirror': + specifier: ^4.24.1 + version: 4.24.1(@babel/runtime@7.27.6)(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.11.2)(@codemirror/lint@6.8.5)(@codemirror/search@6.5.11)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.3)(@codemirror/view@6.38.1)(codemirror@6.0.2)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + clsx: + specifier: ^2.1.1 + version: 2.1.1 emoji-mart: specifier: ^5.6.0 version: 5.6.0 @@ -318,73 +329,6 @@ importers: specifier: ^2.0.1 version: 2.0.1 - e2e/template-tests/sveltekit-site: - dependencies: - '@tomic/lib': - specifier: ^0.40.0 - version: link:../../../lib - '@tomic/svelte': - specifier: ^0.40.0 - version: link:../../../svelte - svelte-markdown: - specifier: ^0.4.1 - version: 0.4.1(svelte@5.1.4) - devDependencies: - '@sveltejs/adapter-auto': - specifier: ^3.3.1 - version: 3.3.1(@sveltejs/kit@2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))) - '@sveltejs/adapter-node': - specifier: ^5.2.9 - version: 5.2.12(@sveltejs/kit@2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))) - '@sveltejs/kit': - specifier: ^2.7.3 - version: 2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - '@sveltejs/vite-plugin-svelte': - specifier: ^4.0.0-next.6 - version: 4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - '@tomic/cli': - specifier: ^0.39.0 - version: link:../../../cli - '@types/eslint': - specifier: ^9.6.1 - version: 9.6.1 - eslint: - specifier: ^9.13.0 - version: 9.13.0(jiti@2.3.3) - eslint-config-prettier: - specifier: ^9.1.0 - version: 9.1.0(eslint@9.13.0(jiti@2.3.3)) - eslint-plugin-svelte: - specifier: ^2.46.0 - version: 2.46.0(eslint@9.13.0(jiti@2.3.3))(svelte@5.1.4)(ts-node@10.9.2(@swc/core@1.7.39)(@types/node@20.17.0)(typescript@5.6.3)) - globals: - specifier: ^15.11.0 - version: 15.11.0 - prettier: - specifier: ^3.3.3 - version: 3.3.3 - prettier-plugin-svelte: - specifier: ^3.2.7 - version: 3.2.7(prettier@3.3.3)(svelte@5.1.4) - svelte: - specifier: ^5.1.4 - version: 5.1.4 - svelte-check: - specifier: ^4.0.5 - version: 4.2.2(picomatch@4.0.2)(svelte@5.1.4)(typescript@5.6.3) - typescript: - specifier: ^5.6.3 - version: 5.6.3 - typescript-eslint: - specifier: ^8.11.0 - version: 8.11.0(eslint@9.13.0(jiti@2.3.3))(typescript@5.6.3) - vite: - specifier: ^5.4.10 - version: 5.4.10(@types/node@20.17.0)(terser@5.43.1) - vitest: - specifier: ^2.1.3 - version: 2.1.3(@types/node@20.17.0)(terser@5.43.1) - lib: dependencies: '@noble/ed25519': @@ -758,7 +702,6 @@ packages: '@babel/plugin-proposal-private-methods@7.18.6': resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==} engines: {node: '>=6.9.0'} - deprecated: This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-private-methods instead. peerDependencies: '@babel/core': ^7.0.0-0 @@ -1181,6 +1124,33 @@ packages: '@bugsnag/safe-json-stringify@6.0.0': resolution: {integrity: sha512-htzFO1Zc57S8kgdRK9mLcPVTW1BY2ijfH7Dk2CeZmspTWKdKqSo1iwmqrq2WtRjFlo8aRZYgLX0wFrDXF/9DLA==} + '@codemirror/autocomplete@6.18.6': + resolution: {integrity: sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==} + + '@codemirror/commands@6.8.1': + resolution: {integrity: sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==} + + '@codemirror/lang-json@6.0.2': + resolution: {integrity: sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==} + + '@codemirror/language@6.11.2': + resolution: {integrity: sha512-p44TsNArL4IVXDTbapUmEkAlvWs2CFQbcfc0ymDsis1kH2wh0gcY96AS29c/vp2d0y2Tquk1EDSaawpzilUiAw==} + + '@codemirror/lint@6.8.5': + resolution: {integrity: sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==} + + '@codemirror/search@6.5.11': + resolution: {integrity: sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA==} + + '@codemirror/state@6.5.2': + resolution: {integrity: sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==} + + '@codemirror/theme-one-dark@6.1.3': + resolution: {integrity: sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==} + + '@codemirror/view@6.38.1': + resolution: {integrity: sha512-RmTOkE7hRU3OVREqFVITWHz6ocgBjv08GoePscAakgVQfciA3SGCEk7mb9IzwW61cKKmlTpHXG6DUE5Ubx+MGQ==} + '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} @@ -1898,7 +1868,6 @@ packages: '@humanwhocodes/config-array@0.13.0': resolution: {integrity: sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==} engines: {node: '>=10.10.0'} - deprecated: Use @eslint/config-array instead '@humanwhocodes/module-importer@1.0.1': resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} @@ -1910,7 +1879,6 @@ packages: '@humanwhocodes/object-schema@2.0.3': resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} - deprecated: Use @eslint/object-schema instead '@humanwhocodes/retry@0.3.1': resolution: {integrity: sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==} @@ -1963,6 +1931,18 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@lezer/common@1.2.3': + resolution: {integrity: sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==} + + '@lezer/highlight@1.2.1': + resolution: {integrity: sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==} + + '@lezer/json@1.0.3': + resolution: {integrity: sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==} + + '@lezer/lr@1.4.2': + resolution: {integrity: sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==} + '@lukeed/ms@2.0.2': resolution: {integrity: sha512-9I2Zn6+NJLfaGoz9jN3lpwDgAYvfGeNYdbAIjJOqzs4Tpc+VU3Jqq4IofSUBKajiDS8k9fZIg18/z13mpk1bsA==} engines: {node: '>=8'} @@ -1971,6 +1951,9 @@ packages: resolution: {integrity: sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==} hasBin: true + '@marijn/find-cluster-break@1.0.2': + resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} + '@microsoft/api-extractor-model@7.30.0': resolution: {integrity: sha512-26/LJZBrsWDKAkOWRiQbdVgcfd1F3nyJnAiJzsAgpouPk7LtOIj7PK9aJtBaw/pUXrkotEg27RrT+Jm/q0bbug==} @@ -2155,7 +2138,6 @@ packages: '@noble/hashes@0.5.9': resolution: {integrity: sha512-7lN1Qh6d8DUGmfN36XRsbN/WcGIPNtTGhkw26vWId/DlCIGsYJJootTtPGghTLcn/AaXPx2Q0b3cacrwXa7OVw==} - deprecated: Upgrade to v1.0.0 or higher for audited package '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -2670,24 +2652,6 @@ packages: '@types/babel__core': optional: true - '@rollup/plugin-commonjs@28.0.6': - resolution: {integrity: sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==} - engines: {node: '>=16.0.0 || 14 >= 14.17'} - peerDependencies: - rollup: ^2.68.0||^3.0.0||^4.0.0 - peerDependenciesMeta: - rollup: - optional: true - - '@rollup/plugin-json@6.1.0': - resolution: {integrity: sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==} - engines: {node: '>=14.0.0'} - peerDependencies: - rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 - peerDependenciesMeta: - rollup: - optional: true - '@rollup/plugin-node-resolve@15.3.1': resolution: {integrity: sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==} engines: {node: '>=14.0.0'} @@ -2697,15 +2661,6 @@ packages: rollup: optional: true - '@rollup/plugin-node-resolve@16.0.1': - resolution: {integrity: sha512-tk5YCxJWIG81umIvNkSod2qK5KyQW19qcBF/B78n1bjtOON6gzKoVeSzAE8yHCZEDmqkHKkxplExA8KzdJLJpA==} - engines: {node: '>=14.0.0'} - peerDependencies: - rollup: ^2.78.0||^3.0.0||^4.0.0 - peerDependenciesMeta: - rollup: - optional: true - '@rollup/plugin-replace@2.4.2': resolution: {integrity: sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg==} peerDependencies: @@ -2863,35 +2818,11 @@ packages: '@surma/rollup-plugin-off-main-thread@2.2.3': resolution: {integrity: sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ==} - '@sveltejs/acorn-typescript@1.0.5': - resolution: {integrity: sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ==} - peerDependencies: - acorn: ^8.9.0 - '@sveltejs/adapter-auto@3.3.0': resolution: {integrity: sha512-EJZqY7eMM+bdbR898Xt9ufawUHLPJu7w3wPr4Cc+T1iIDf3fufVLWg4C71OluIqsdJqv85E4biKuHo3XXIY0PQ==} peerDependencies: '@sveltejs/kit': ^2.0.0 - '@sveltejs/adapter-auto@3.3.1': - resolution: {integrity: sha512-5Sc7WAxYdL6q9j/+D0jJKjGREGlfIevDyHSQ2eNETHcB1TKlQWHcAo8AS8H1QdjNvSXpvOwNjykDUHPEAyGgdQ==} - peerDependencies: - '@sveltejs/kit': ^2.0.0 - - '@sveltejs/adapter-node@5.2.12': - resolution: {integrity: sha512-0bp4Yb3jKIEcZWVcJC/L1xXp9zzJS4hDwfb4VITAkfT4OVdkspSHsx7YhqJDbb2hgLl6R9Vs7VQR+fqIVOxPUQ==} - peerDependencies: - '@sveltejs/kit': ^2.4.0 - - '@sveltejs/kit@2.22.2': - resolution: {integrity: sha512-2MvEpSYabUrsJAoq5qCOBGAlkICjfjunrnLcx3YAk2XV7TvAIhomlKsAgR4H/4uns5rAfYmj7Wet5KRtc8dPIg==} - engines: {node: '>=18.13'} - hasBin: true - peerDependencies: - '@sveltejs/vite-plugin-svelte': ^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 - svelte: ^4.0.0 || ^5.0.0-next.0 - vite: ^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 - '@sveltejs/kit@2.7.2': resolution: {integrity: sha512-bFwrl+0bNr0/DHQZM0INwwSPNYqDjfsKRhUoa6rj9d8tDZzszBrJ3La6/HVFxWGONEigtG+SzHXa1BEa1BLdwA==} engines: {node: '>=18.13'} @@ -3339,7 +3270,6 @@ packages: '@types/fast-json-stable-stringify@2.1.2': resolution: {integrity: sha512-vsxcbfLDdjytnCnHXtinE40Xl46Wr7l/VGRGt7ewJwCPMKEHOdEsTxXX8xwgoR7cbc+6dE8SB4jlMrOV2zAg7g==} - deprecated: This is a stub types definition. fast-json-stable-stringify provides its own type definitions, so you do not need this installed. '@types/geojson@7946.0.14': resolution: {integrity: sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg==} @@ -3386,9 +3316,6 @@ packages: '@types/markdown-it@14.1.2': resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==} - '@types/marked@5.0.2': - resolution: {integrity: sha512-OucS4KMHhFzhz27KxmWg7J+kIYqyqoW5kdIEI319hqARQQUTqhao3M/F+uFnDXD0Rg72iDDZxZNxq5gvctmLlg==} - '@types/mdast@4.0.4': resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} @@ -3421,7 +3348,6 @@ packages: '@types/react-pdf@7.0.0': resolution: {integrity: sha512-G0a+5UiKk3AvEauBP/Js7r9kGZNW3iBbS6kXkH0foGSaKWR6K3ElTe7Y4tlolc2VKbM9udmMxpkbxh/dtR2wXA==} - deprecated: This is a stub types definition. react-pdf provides its own type definitions, so you do not need this installed. '@types/react-router-dom@5.3.3': resolution: {integrity: sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==} @@ -3600,6 +3526,38 @@ packages: resolution: {integrity: sha512-EaewX6lxSjRJnc+99+dqzTeoDZUfyrA52d2/HRrkI830kgovWsmIiTfmr0NZorzqic7ga+1bS60lRBUgR3n/Bw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@uiw/codemirror-extensions-basic-setup@4.24.1': + resolution: {integrity: sha512-o1m1a8eUS3fWERMbDFvN8t8sZUFPgDKNemmlQ5Ot2vKm+Ax84lKP1dhEFgkiOaZ1bDHk4T5h6SjHuTghrJHKww==} + peerDependencies: + '@codemirror/autocomplete': '>=6.0.0' + '@codemirror/commands': '>=6.0.0' + '@codemirror/language': '>=6.0.0' + '@codemirror/lint': '>=6.0.0' + '@codemirror/search': '>=6.0.0' + '@codemirror/state': '>=6.0.0' + '@codemirror/view': '>=6.0.0' + + '@uiw/codemirror-theme-github@4.24.1': + resolution: {integrity: sha512-dl4qFEXINE4TFus7ALMfjFUCl7sWLkqTdaSaln0Vv3s+HVzSMAh5lkEdnH3yPcOOCl5ehYG4zIx8bqEnA2/FYQ==} + + '@uiw/codemirror-themes@4.24.1': + resolution: {integrity: sha512-hduBbFNiWNW6nYa2/giKQ9YpzhWNw87BGpCjC+cXYMZ7bCD6q5DC6Hw+7z7ZwSzEaOQvV91lmirOjJ8hn9+pkg==} + peerDependencies: + '@codemirror/language': '>=6.0.0' + '@codemirror/state': '>=6.0.0' + '@codemirror/view': '>=6.0.0' + + '@uiw/react-codemirror@4.24.1': + resolution: {integrity: sha512-BivF4NLqbuBQK5gPVhSkOARi9nPXw8X5r25EnInPeY+I9l1dfEX8O9V6+0xHTlGHyUo0cNfGEF9t1KHEicUfJw==} + peerDependencies: + '@babel/runtime': '>=7.11.0' + '@codemirror/state': '>=6.0.0' + '@codemirror/theme-one-dark': '>=6.0.0' + '@codemirror/view': '>=6.0.0' + codemirror: '>=6.0.0' + react: '>=16.8.0' + react-dom: '>=16.8.0' + '@ungap/structured-clone@1.2.0': resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} @@ -3869,7 +3827,6 @@ packages: are-we-there-yet@2.0.0: resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} engines: {node: '>=10'} - deprecated: This package is no longer supported. arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} @@ -4390,6 +4347,9 @@ packages: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} + codemirror@6.0.2: + resolution: {integrity: sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==} + color-convert@1.9.3: resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} @@ -5250,7 +5210,6 @@ packages: eslint@8.57.1: resolution: {integrity: sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. hasBin: true eslint@9.13.0: @@ -5266,9 +5225,6 @@ packages: esm-env@1.0.0: resolution: {integrity: sha512-Cf6VksWPsTuW01vU9Mk/3vRue91Zevka5SjyNf3nEpokFRuqt/KjUQoGAwq9qMmhpLTHmXzSIrFRw8zxWzmFBA==} - esm-env@1.2.2: - resolution: {integrity: sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==} - espree@10.2.0: resolution: {integrity: sha512-upbkBJbckcCNBDBDXEbuhjbP68n+scUd3k/U2EkyM9nw+I/jPiL4cLF/Al06CF96wRltFda16sxDFrxsI1v0/g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -5689,7 +5645,6 @@ packages: gauge@3.0.2: resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==} engines: {node: '>=10'} - deprecated: This package is no longer supported. gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} @@ -5764,9 +5719,6 @@ packages: get-them-args@1.3.2: resolution: {integrity: sha512-LRn8Jlk+DwZE4GTlDbT3Hikd1wSHgLMme/+7ddlqKd7ldwR6LjJgTVWzBnR01wnYGe4KgrXjg287RaI22UHmAw==} - get-tsconfig@4.8.1: - resolution: {integrity: sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==} - gh-pages@5.0.0: resolution: {integrity: sha512-Nqp1SjkPIB94Xw/3yYNTUL+G2dxlhjvv1zeN/4kMC1jfViTEqhtVz/Ba1zSXHuvXCN9ADNS1dN4r5/J/nZWEQQ==} engines: {node: '>=10'} @@ -5800,12 +5752,10 @@ packages: glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - deprecated: Glob versions prior to v9 are no longer supported glob@8.1.0: resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} engines: {node: '>=12'} - deprecated: Glob versions prior to v9 are no longer supported global-cache-dir@4.4.0: resolution: {integrity: sha512-bk0gI6IbbphRjAaCJJn5H+T/CcEck5B3a5KBO2BXSDzjFSV+API17w8GA7YPJ6IXJiasW8M0VsEIig1PCHdfOQ==} @@ -6087,7 +6037,6 @@ packages: inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} @@ -6348,9 +6297,6 @@ packages: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} - is-reference@1.2.1: - resolution: {integrity: sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==} - is-reference@3.0.2: resolution: {integrity: sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==} @@ -6868,11 +6814,6 @@ packages: engines: {node: '>= 12'} hasBin: true - marked@5.1.2: - resolution: {integrity: sha512-ahRPGXJpjMjwSOlBoTMZAK7ATXkli5qCPxZ21TG44rx1KEo44bii4ekgTDQPNRQ4Kh7JMb9Ub1PVk1NxRSsorg==} - engines: {node: '>= 16'} - hasBin: true - marked@9.1.6: resolution: {integrity: sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==} engines: {node: '>= 16'} @@ -7370,7 +7311,6 @@ packages: npmlog@5.0.1: resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==} - deprecated: This package is no longer supported. nth-check@2.1.1: resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} @@ -8336,9 +8276,6 @@ packages: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} - resolve-pkg-maps@1.0.0: - resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} - resolve@1.22.10: resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==} engines: {node: '>= 0.4'} @@ -8385,12 +8322,10 @@ packages: rimraf@2.7.1: resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} - deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} - deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true rollup@2.79.2: @@ -8869,6 +8804,9 @@ packages: stubborn-fs@1.2.5: resolution: {integrity: sha512-H2N9c26eXjzL/S/K+i/RHHcFanE74dptvvjM8iwzwbVcWY/zjBbgRqF3K0DY4+OD+uTTASTBvDoxPDaPN02D7g==} + style-mod@4.1.2: + resolution: {integrity: sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==} + style-to-object@1.0.8: resolution: {integrity: sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g==} @@ -8924,14 +8862,6 @@ packages: peerDependencies: svelte: ^3.55.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0 - svelte-check@4.2.2: - resolution: {integrity: sha512-1+31EOYZ7NKN0YDMKusav2hhEoA51GD9Ws6o//0SphMT0ve9mBTsTUEX7OmDMadUP3KjNHsSKtJrqdSaD8CrGQ==} - engines: {node: '>= 18.0.0'} - hasBin: true - peerDependencies: - svelte: ^4.0.0 || ^5.0.0-next.0 - typescript: '>=5.0.0' - svelte-eslint-parser@0.43.0: resolution: {integrity: sha512-GpU52uPKKcVnh8tKN5P4UZpJ/fUDndmq7wfsvoVXsyP+aY0anol7Yqo01fyrlaWGMFfm4av5DyrjlaXdLRJvGA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -8941,11 +8871,6 @@ packages: svelte: optional: true - svelte-markdown@0.4.1: - resolution: {integrity: sha512-pOlLY6EruKJaWI9my/2bKX8PdTeP5CM0s4VMmwmC2prlOkjAf+AOmTM4wW/l19Y6WZ87YmP8+ZCJCCwBChWjYw==} - peerDependencies: - svelte: ^4.0.0 - svelte-preprocess@5.1.4: resolution: {integrity: sha512-IvnbQ6D6Ao3Gg6ftiM5tdbR6aAETwjhHV+UKGf5bHGYR69RQvF1ho0JKPcbUON4vy4R7zom13jPjgdOWCQ5hDA==} engines: {node: '>= 16.0.0'} @@ -9693,14 +9618,6 @@ packages: vite: optional: true - vitefu@1.1.1: - resolution: {integrity: sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==} - peerDependencies: - vite: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0 - peerDependenciesMeta: - vite: - optional: true - vitest@2.1.3: resolution: {integrity: sha512-Zrxbg/WiIvUP2uEzelDNTXmEMJXuzJ1kCpbDvaKByFA9MNeO95V+7r/3ti0qzJzrxdyuUw5VduN7k+D3VmVOSA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -10023,7 +9940,7 @@ snapshots: '@ampproject/remapping@2.3.0': dependencies: '@jridgewell/gen-mapping': 0.3.5 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/trace-mapping': 0.3.27 '@andrewbranch/untar.js@1.0.3': {} @@ -10088,7 +10005,7 @@ snapshots: '@babel/traverse': 7.25.9 '@babel/types': 7.26.3 convert-source-map: 2.0.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.0 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -10100,7 +10017,7 @@ snapshots: '@babel/parser': 7.26.3 '@babel/types': 7.26.3 '@jridgewell/gen-mapping': 0.3.5 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/trace-mapping': 0.3.27 jsesc: 3.0.2 '@babel/generator@7.27.5': @@ -10173,7 +10090,7 @@ snapshots: '@babel/core': 7.26.0 '@babel/helper-compilation-targets': 7.27.2 '@babel/helper-plugin-utils': 7.27.1 - debug: 4.4.1 + debug: 4.4.1(supports-color@9.4.0) lodash.debounce: 4.0.8 resolve: 1.22.10 transitivePeerDependencies: @@ -10821,7 +10738,7 @@ snapshots: '@babel/parser': 7.26.3 '@babel/template': 7.25.9 '@babel/types': 7.26.3 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -10833,7 +10750,7 @@ snapshots: '@babel/parser': 7.27.7 '@babel/template': 7.27.2 '@babel/types': 7.27.7 - debug: 4.4.1 + debug: 4.4.1(supports-color@9.4.0) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -10893,6 +10810,64 @@ snapshots: '@bugsnag/safe-json-stringify@6.0.0': {} + '@codemirror/autocomplete@6.18.6': + dependencies: + '@codemirror/language': 6.11.2 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + '@lezer/common': 1.2.3 + + '@codemirror/commands@6.8.1': + dependencies: + '@codemirror/language': 6.11.2 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + '@lezer/common': 1.2.3 + + '@codemirror/lang-json@6.0.2': + dependencies: + '@codemirror/language': 6.11.2 + '@lezer/json': 1.0.3 + + '@codemirror/language@6.11.2': + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + '@lezer/common': 1.2.3 + '@lezer/highlight': 1.2.1 + '@lezer/lr': 1.4.2 + style-mod: 4.1.2 + + '@codemirror/lint@6.8.5': + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + crelt: 1.0.6 + + '@codemirror/search@6.5.11': + dependencies: + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + crelt: 1.0.6 + + '@codemirror/state@6.5.2': + dependencies: + '@marijn/find-cluster-break': 1.0.2 + + '@codemirror/theme-one-dark@6.1.3': + dependencies: + '@codemirror/language': 6.11.2 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + '@lezer/highlight': 1.2.1 + + '@codemirror/view@6.38.1': + dependencies: + '@codemirror/state': 6.5.2 + crelt: 1.0.6 + style-mod: 4.1.2 + w3c-keyname: 2.2.8 + '@colors/colors@1.5.0': optional: true @@ -11257,7 +11232,7 @@ snapshots: '@eslint/config-array@0.18.0': dependencies: '@eslint/object-schema': 2.1.4 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -11267,7 +11242,7 @@ snapshots: '@eslint/eslintrc@2.1.4': dependencies: ajv: 6.12.6 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.0 espree: 9.6.1 globals: 13.24.0 ignore: 5.3.2 @@ -11281,7 +11256,7 @@ snapshots: '@eslint/eslintrc@3.1.0': dependencies: ajv: 6.12.6 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) espree: 10.2.0 globals: 14.0.0 ignore: 5.3.2 @@ -11364,7 +11339,7 @@ snapshots: '@humanwhocodes/config-array@0.13.0': dependencies: '@humanwhocodes/object-schema': 2.0.3 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.0 minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -11406,8 +11381,8 @@ snapshots: '@jridgewell/gen-mapping@0.3.5': dependencies: '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/sourcemap-codec': 1.5.2 + '@jridgewell/trace-mapping': 0.3.27 '@jridgewell/resolve-uri@3.1.2': {} @@ -11425,7 +11400,7 @@ snapshots: '@jridgewell/trace-mapping@0.3.25': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.2 '@jridgewell/trace-mapping@0.3.27': dependencies: @@ -11435,7 +11410,23 @@ snapshots: '@jridgewell/trace-mapping@0.3.9': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.2 + + '@lezer/common@1.2.3': {} + + '@lezer/highlight@1.2.1': + dependencies: + '@lezer/common': 1.2.3 + + '@lezer/json@1.0.3': + dependencies: + '@lezer/common': 1.2.3 + '@lezer/highlight': 1.2.1 + '@lezer/lr': 1.4.2 + + '@lezer/lr@1.4.2': + dependencies: + '@lezer/common': 1.2.3 '@lukeed/ms@2.0.2': {} @@ -11454,6 +11445,8 @@ snapshots: - encoding - supports-color + '@marijn/find-cluster-break@1.0.2': {} + '@microsoft/api-extractor-model@7.30.0(@types/node@20.17.0)': dependencies: '@microsoft/tsdoc': 0.15.1 @@ -12337,24 +12330,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@rollup/plugin-commonjs@28.0.6(rollup@4.24.0)': - dependencies: - '@rollup/pluginutils': 5.2.0(rollup@4.24.0) - commondir: 1.0.1 - estree-walker: 2.0.2 - fdir: 6.4.4(picomatch@4.0.2) - is-reference: 1.2.1 - magic-string: 0.30.12 - picomatch: 4.0.2 - optionalDependencies: - rollup: 4.24.0 - - '@rollup/plugin-json@6.1.0(rollup@4.24.0)': - dependencies: - '@rollup/pluginutils': 5.2.0(rollup@4.24.0) - optionalDependencies: - rollup: 4.24.0 - '@rollup/plugin-node-resolve@15.3.1(rollup@2.79.2)': dependencies: '@rollup/pluginutils': 5.2.0(rollup@2.79.2) @@ -12365,16 +12340,6 @@ snapshots: optionalDependencies: rollup: 2.79.2 - '@rollup/plugin-node-resolve@16.0.1(rollup@4.24.0)': - dependencies: - '@rollup/pluginutils': 5.2.0(rollup@4.24.0) - '@types/resolve': 1.20.2 - deepmerge: 4.3.1 - is-module: 1.0.0 - resolve: 1.22.10 - optionalDependencies: - rollup: 4.24.0 - '@rollup/plugin-replace@2.4.2(rollup@2.79.2)': dependencies: '@rollup/pluginutils': 3.1.0(rollup@2.79.2) @@ -12409,14 +12374,6 @@ snapshots: optionalDependencies: rollup: 2.79.2 - '@rollup/pluginutils@5.2.0(rollup@4.24.0)': - dependencies: - '@types/estree': 1.0.8 - estree-walker: 2.0.2 - picomatch: 4.0.2 - optionalDependencies: - rollup: 4.24.0 - '@rollup/rollup-android-arm-eabi@4.24.0': optional: true @@ -12521,47 +12478,11 @@ snapshots: magic-string: 0.25.9 string.prototype.matchall: 4.0.12 - '@sveltejs/acorn-typescript@1.0.5(acorn@8.15.0)': - dependencies: - acorn: 8.15.0 - '@sveltejs/adapter-auto@3.3.0(@sveltejs/kit@2.7.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))': dependencies: '@sveltejs/kit': 2.7.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) import-meta-resolve: 4.1.0 - '@sveltejs/adapter-auto@3.3.1(@sveltejs/kit@2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))': - dependencies: - '@sveltejs/kit': 2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - import-meta-resolve: 4.1.0 - - '@sveltejs/adapter-node@5.2.12(@sveltejs/kit@2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))': - dependencies: - '@rollup/plugin-commonjs': 28.0.6(rollup@4.24.0) - '@rollup/plugin-json': 6.1.0(rollup@4.24.0) - '@rollup/plugin-node-resolve': 16.0.1(rollup@4.24.0) - '@sveltejs/kit': 2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - rollup: 4.24.0 - - '@sveltejs/kit@2.22.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))': - dependencies: - '@sveltejs/acorn-typescript': 1.0.5(acorn@8.15.0) - '@sveltejs/vite-plugin-svelte': 4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - '@types/cookie': 0.6.0 - acorn: 8.15.0 - cookie: 0.6.0 - devalue: 5.1.1 - esm-env: 1.2.2 - kleur: 4.1.5 - magic-string: 0.30.12 - mrmime: 2.0.0 - sade: 1.8.1 - set-cookie-parser: 2.7.1 - sirv: 3.0.0 - svelte: 5.1.4 - vite: 5.4.10(@types/node@20.17.0)(terser@5.43.1) - vitefu: 1.1.1(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - '@sveltejs/kit@2.7.2(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))': dependencies: '@sveltejs/vite-plugin-svelte': 4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) @@ -12594,7 +12515,7 @@ snapshots: '@sveltejs/vite-plugin-svelte-inspector@3.0.1(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))': dependencies: '@sveltejs/vite-plugin-svelte': 4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) svelte: 5.1.4 vite: 5.4.10(@types/node@20.17.0)(terser@5.43.1) transitivePeerDependencies: @@ -12603,7 +12524,7 @@ snapshots: '@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1))': dependencies: '@sveltejs/vite-plugin-svelte-inspector': 3.0.1(@sveltejs/vite-plugin-svelte@4.0.0(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)))(svelte@5.1.4)(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)) - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) deepmerge: 4.3.1 kleur: 4.1.5 magic-string: 0.30.12 @@ -13041,12 +12962,12 @@ snapshots: '@types/eslint@9.6.1': dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 '@types/json-schema': 7.0.15 '@types/estree-jsx@1.0.5': dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 '@types/estree@0.0.39': {} @@ -13105,8 +13026,6 @@ snapshots: '@types/linkify-it': 5.0.0 '@types/mdurl': 2.0.0 - '@types/marked@5.0.2': {} - '@types/mdast@4.0.4': dependencies: '@types/unist': 3.0.3 @@ -13244,7 +13163,7 @@ snapshots: '@typescript-eslint/types': 8.11.0 '@typescript-eslint/typescript-estree': 8.11.0(typescript@5.6.3) '@typescript-eslint/visitor-keys': 8.11.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) eslint: 9.13.0(jiti@2.3.3) optionalDependencies: typescript: 5.6.3 @@ -13265,7 +13184,7 @@ snapshots: dependencies: '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.6.3) '@typescript-eslint/utils': 7.18.0(eslint@8.57.1)(typescript@5.6.3) - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.0 eslint: 8.57.1 ts-api-utils: 1.3.0(typescript@5.6.3) optionalDependencies: @@ -13277,7 +13196,7 @@ snapshots: dependencies: '@typescript-eslint/typescript-estree': 8.11.0(typescript@5.6.3) '@typescript-eslint/utils': 8.11.0(eslint@9.13.0(jiti@2.3.3))(typescript@5.6.3) - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) ts-api-utils: 1.3.0(typescript@5.6.3) optionalDependencies: typescript: 5.6.3 @@ -13295,7 +13214,7 @@ snapshots: dependencies: '@typescript-eslint/types': 5.62.0 '@typescript-eslint/visitor-keys': 5.62.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) globby: 11.1.0 is-glob: 4.0.3 semver: 7.7.2 @@ -13309,7 +13228,7 @@ snapshots: dependencies: '@typescript-eslint/types': 7.18.0 '@typescript-eslint/visitor-keys': 7.18.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.0 globby: 11.1.0 is-glob: 4.0.3 minimatch: 9.0.5 @@ -13324,7 +13243,7 @@ snapshots: dependencies: '@typescript-eslint/types': 8.11.0 '@typescript-eslint/visitor-keys': 8.11.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) fast-glob: 3.3.2 is-glob: 4.0.3 minimatch: 9.0.5 @@ -13372,14 +13291,55 @@ snapshots: '@typescript-eslint/types': 8.11.0 eslint-visitor-keys: 3.4.3 + '@uiw/codemirror-extensions-basic-setup@4.24.1(@codemirror/autocomplete@6.18.6)(@codemirror/commands@6.8.1)(@codemirror/language@6.11.2)(@codemirror/lint@6.8.5)(@codemirror/search@6.5.11)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1)': + dependencies: + '@codemirror/autocomplete': 6.18.6 + '@codemirror/commands': 6.8.1 + '@codemirror/language': 6.11.2 + '@codemirror/lint': 6.8.5 + '@codemirror/search': 6.5.11 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + + '@uiw/codemirror-theme-github@4.24.1(@codemirror/language@6.11.2)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1)': + dependencies: + '@uiw/codemirror-themes': 4.24.1(@codemirror/language@6.11.2)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1) + transitivePeerDependencies: + - '@codemirror/language' + - '@codemirror/state' + - '@codemirror/view' + + '@uiw/codemirror-themes@4.24.1(@codemirror/language@6.11.2)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1)': + dependencies: + '@codemirror/language': 6.11.2 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + + '@uiw/react-codemirror@4.24.1(@babel/runtime@7.27.6)(@codemirror/autocomplete@6.18.6)(@codemirror/language@6.11.2)(@codemirror/lint@6.8.5)(@codemirror/search@6.5.11)(@codemirror/state@6.5.2)(@codemirror/theme-one-dark@6.1.3)(@codemirror/view@6.38.1)(codemirror@6.0.2)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': + dependencies: + '@babel/runtime': 7.27.6 + '@codemirror/commands': 6.8.1 + '@codemirror/state': 6.5.2 + '@codemirror/theme-one-dark': 6.1.3 + '@codemirror/view': 6.38.1 + '@uiw/codemirror-extensions-basic-setup': 4.24.1(@codemirror/autocomplete@6.18.6)(@codemirror/commands@6.8.1)(@codemirror/language@6.11.2)(@codemirror/lint@6.8.5)(@codemirror/search@6.5.11)(@codemirror/state@6.5.2)(@codemirror/view@6.38.1) + codemirror: 6.0.2 + react: 19.0.0 + react-dom: 19.0.0(react@19.0.0) + transitivePeerDependencies: + - '@codemirror/autocomplete' + - '@codemirror/language' + - '@codemirror/lint' + - '@codemirror/search' + '@ungap/structured-clone@1.2.0': {} '@vercel/nft@0.27.5(supports-color@9.4.0)': dependencies: '@mapbox/node-pre-gyp': 1.0.11(supports-color@9.4.0) '@rollup/pluginutils': 4.2.1 - acorn: 8.14.1 - acorn-import-attributes: 1.9.5(acorn@8.14.1) + acorn: 8.15.0 + acorn-import-attributes: 1.9.5(acorn@8.15.0) async-sema: 3.1.1 bindings: 1.5.0 estree-walker: 2.0.2 @@ -13509,21 +13469,25 @@ snapshots: mime-types: 2.1.35 negotiator: 0.6.3 - acorn-import-attributes@1.9.5(acorn@8.14.1): + acorn-import-attributes@1.9.5(acorn@8.15.0): dependencies: - acorn: 8.14.1 + acorn: 8.15.0 acorn-jsx@5.3.2(acorn@8.14.1): dependencies: acorn: 8.14.1 + acorn-jsx@5.3.2(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + acorn-typescript@1.4.13(acorn@8.13.0): dependencies: acorn: 8.13.0 acorn-walk@8.3.4: dependencies: - acorn: 8.14.1 + acorn: 8.15.0 acorn@8.13.0: {} @@ -13533,13 +13497,13 @@ snapshots: agent-base@6.0.2(supports-color@9.4.0): dependencies: - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) transitivePeerDependencies: - supports-color agent-base@7.1.1: dependencies: - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) transitivePeerDependencies: - supports-color @@ -14282,6 +14246,16 @@ snapshots: clsx@2.1.1: {} + codemirror@6.0.2: + dependencies: + '@codemirror/autocomplete': 6.18.6 + '@codemirror/commands': 6.8.1 + '@codemirror/language': 6.11.2 + '@codemirror/lint': 6.8.5 + '@codemirror/search': 6.5.11 + '@codemirror/state': 6.5.2 + '@codemirror/view': 6.38.1 + color-convert@1.9.3: dependencies: color-name: 1.1.3 @@ -14632,15 +14606,15 @@ snapshots: dependencies: ms: 2.1.3 - debug@4.4.0(supports-color@9.4.0): + debug@4.4.0: dependencies: ms: 2.1.3 - optionalDependencies: - supports-color: 9.4.0 - debug@4.4.1: + debug@4.4.1(supports-color@9.4.0): dependencies: ms: 2.1.3 + optionalDependencies: + supports-color: 9.4.0 decache@4.6.2: dependencies: @@ -15307,7 +15281,7 @@ snapshots: eslint-plugin-svelte@2.46.0(eslint@9.13.0(jiti@2.3.3))(svelte@5.1.4)(ts-node@10.9.2(@swc/core@1.7.39)(@types/node@20.17.0)(typescript@5.6.3)): dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@9.13.0(jiti@2.3.3)) - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.2 eslint: 9.13.0(jiti@2.3.3) eslint-compat-utils: 0.5.1(eslint@9.13.0(jiti@2.3.3)) esutils: 2.0.3 @@ -15392,12 +15366,12 @@ snapshots: '@humanfs/node': 0.16.5 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.3.1 - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 '@types/json-schema': 7.0.15 ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.3 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) escape-string-regexp: 4.0.0 eslint-scope: 8.1.0 eslint-visitor-keys: 4.1.0 @@ -15424,12 +15398,10 @@ snapshots: esm-env@1.0.0: {} - esm-env@1.2.2: {} - espree@10.2.0: dependencies: - acorn: 8.14.1 - acorn-jsx: 5.3.2(acorn@8.14.1) + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) eslint-visitor-keys: 4.1.0 espree@9.6.1: @@ -15446,8 +15418,8 @@ snapshots: esrap@1.2.2: dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - '@types/estree': 1.0.6 + '@jridgewell/sourcemap-codec': 1.5.2 + '@types/estree': 1.0.8 esrecurse@4.3.0: dependencies: @@ -15463,7 +15435,7 @@ snapshots: estree-walker@3.0.3: dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 esutils@2.0.3: {} @@ -15586,7 +15558,7 @@ snapshots: extract-zip@2.0.1: dependencies: - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) get-stream: 5.2.0 yauzl: 2.10.0 optionalDependencies: @@ -16016,10 +15988,6 @@ snapshots: get-them-args@1.3.2: {} - get-tsconfig@4.8.1: - dependencies: - resolve-pkg-maps: 1.0.0 - gh-pages@5.0.0: dependencies: async: 3.2.6 @@ -16223,7 +16191,7 @@ snapshots: hast-util-to-jsx-runtime@2.3.2: dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 '@types/hast': 3.0.4 '@types/unist': 3.0.3 comma-separated-tokens: 2.0.3 @@ -16315,14 +16283,14 @@ snapshots: https-proxy-agent@5.0.1(supports-color@9.4.0): dependencies: agent-base: 6.0.2(supports-color@9.4.0) - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) transitivePeerDependencies: - supports-color https-proxy-agent@7.0.5: dependencies: agent-base: 7.1.1 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) transitivePeerDependencies: - supports-color @@ -16644,13 +16612,9 @@ snapshots: is-plain-obj@4.1.0: {} - is-reference@1.2.1: - dependencies: - '@types/estree': 1.0.8 - is-reference@3.0.2: dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.8 is-regex@1.1.4: dependencies: @@ -17136,7 +17100,7 @@ snapshots: magic-string@0.30.12: dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.2 make-cancellable-promise@1.3.2: {} @@ -17180,8 +17144,6 @@ snapshots: marked@4.3.0: {} - marked@5.1.2: {} - marked@9.1.6: {} math-intrinsics@1.1.0: {} @@ -17556,7 +17518,7 @@ snapshots: micromark@4.0.0: dependencies: '@types/debug': 4.1.12 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) decode-named-character-reference: 1.0.2 devlop: 1.1.0 micromark-core-commonmark: 2.0.1 @@ -17650,7 +17612,7 @@ snapshots: mlly@1.7.2: dependencies: - acorn: 8.14.1 + acorn: 8.15.0 pathe: 1.1.2 pkg-types: 1.2.1 ufo: 1.5.4 @@ -19046,8 +19008,6 @@ snapshots: resolve-from@5.0.0: {} - resolve-pkg-maps@1.0.0: {} - resolve@1.22.10: dependencies: is-core-module: 2.16.1 @@ -19413,7 +19373,7 @@ snapshots: sorcery@0.11.1: dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.2 buffer-crc32: 1.0.0 minimist: 1.2.8 sander: 0.5.1 @@ -19676,6 +19636,8 @@ snapshots: stubborn-fs@1.2.5: {} + style-mod@4.1.2: {} + style-to-object@1.0.8: dependencies: inline-style-parser: 0.2.4 @@ -19754,18 +19716,6 @@ snapshots: - stylus - sugarss - svelte-check@4.2.2(picomatch@4.0.2)(svelte@5.1.4)(typescript@5.6.3): - dependencies: - '@jridgewell/trace-mapping': 0.3.27 - chokidar: 4.0.1 - fdir: 6.4.4(picomatch@4.0.2) - picocolors: 1.1.1 - sade: 1.8.1 - svelte: 5.1.4 - typescript: 5.6.3 - transitivePeerDependencies: - - picomatch - svelte-eslint-parser@0.43.0(svelte@5.1.4): dependencies: eslint-scope: 7.2.2 @@ -19776,12 +19726,6 @@ snapshots: optionalDependencies: svelte: 5.1.4 - svelte-markdown@0.4.1(svelte@5.1.4): - dependencies: - '@types/marked': 5.0.2 - marked: 5.1.2 - svelte: 5.1.4 - svelte-preprocess@5.1.4(@babel/core@7.26.0)(postcss-load-config@3.1.4(postcss@8.4.47)(ts-node@10.9.2(@swc/core@1.7.39)(@types/node@20.17.0)(typescript@5.6.3)))(postcss@8.4.47)(svelte@5.1.4)(typescript@5.6.3): dependencies: '@types/pug': 2.0.10 @@ -19838,7 +19782,7 @@ snapshots: tabtab@3.0.2: dependencies: - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) es6-promisify: 6.1.1 inquirer: 6.5.2 minimist: 1.2.8 @@ -20057,7 +20001,7 @@ snapshots: '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 '@types/node': 20.17.0 - acorn: 8.14.1 + acorn: 8.15.0 acorn-walk: 8.3.4 arg: 4.1.3 create-require: 1.1.1 @@ -20477,7 +20421,7 @@ snapshots: vite-node@2.1.3(@types/node@20.17.0)(terser@5.43.1): dependencies: cac: 6.7.14 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) pathe: 1.1.2 vite: 5.4.10(@types/node@20.17.0)(terser@5.43.1) transitivePeerDependencies: @@ -20534,10 +20478,6 @@ snapshots: optionalDependencies: vite: 5.4.10(@types/node@20.17.0)(terser@5.43.1) - vitefu@1.1.1(vite@5.4.10(@types/node@20.17.0)(terser@5.43.1)): - optionalDependencies: - vite: 5.4.10(@types/node@20.17.0)(terser@5.43.1) - vitest@2.1.3(@types/node@20.17.0)(terser@5.43.1): dependencies: '@vitest/expect': 2.1.3 @@ -20582,7 +20522,7 @@ snapshots: dependencies: chalk: 4.1.2 commander: 9.5.0 - debug: 4.4.0(supports-color@9.4.0) + debug: 4.4.1(supports-color@9.4.0) transitivePeerDependencies: - supports-color diff --git a/browser/pnpm-workspace.yaml b/browser/pnpm-workspace.yaml index 160940d17..6e394c167 100644 --- a/browser/pnpm-workspace.yaml +++ b/browser/pnpm-workspace.yaml @@ -3,3 +3,4 @@ packages: - '*/**' # exclude packages that are inside test directories - '!create-template/templates/**' + - '!e2e/template-tests/**' diff --git a/browser/react/src/hooks.ts b/browser/react/src/hooks.ts index 6bb8de142..bbe15151d 100644 --- a/browser/react/src/hooks.ts +++ b/browser/react/src/hooks.ts @@ -362,6 +362,10 @@ const titleHookOpts: useValueOptions = { commit: true, }; +const setTitleError = () => { + throw new Error('Cannot set title of resource with error'); +}; + /** * Returns the most fitting title / name for a Resource. This is either the * Name, Shortname, Filename or truncated Subject URL of that resource. @@ -383,6 +387,10 @@ export function useTitle( opts, ); + if (resource.error) { + return [truncateUrl(resource.subject, truncateLength), setTitleError]; + } + if (resource.loading) { return ['...', setName]; } diff --git a/cli/src/main.rs b/cli/src/main.rs index 0257294cc..7d6a885ef 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,6 +1,8 @@ +use atomic_lib::agents::Agent; +use atomic_lib::config::Config; +use atomic_lib::config::{ClientConfig, SharedConfig}; +use atomic_lib::mapping::Mapping; use atomic_lib::serialize::Format; -use atomic_lib::{agents::generate_public_key, mapping::Mapping}; -use atomic_lib::{agents::Agent, config::Config}; use atomic_lib::{errors::AtomicResult, Storelike}; use clap::{crate_version, Parser, Subcommand, ValueEnum}; use colored::*; @@ -159,14 +161,11 @@ impl Context { let write_ctx = set_agent_config().expect("Issue while generating write context / agent configuration"); self.write.borrow_mut().replace(write_ctx.clone()); - self.store.set_default_agent(Agent { - subject: write_ctx.agent.clone(), - private_key: Some(write_ctx.private_key.clone()), - created_at: atomic_lib::utils::now(), - name: None, - public_key: generate_public_key(&write_ctx.private_key).public, - }); - self.store.set_server_url(&write_ctx.server); + let agent = Agent::from_secret(&write_ctx.shared.agent_secret).unwrap(); + self.store.set_default_agent(agent); + self.store + .set_server_url(&write_ctx.client.clone().unwrap().server_url); + write_ctx } } @@ -175,27 +174,45 @@ impl Context { fn set_agent_config() -> CLIResult { let agent_config_path = atomic_lib::config::default_config_file_path()?; match atomic_lib::config::read_config(Some(&agent_config_path)) { - Ok(found) => Ok(found), + Ok(found) => { + prompt_for_missing_config_values(&found)?; + Ok(found) + } Err(_e) => { println!( "No config found at {:?}. Let's create one!", &agent_config_path ); let server = promptly::prompt("What's the base url of your Atomic Server?")?; - let agent = promptly::prompt("What's the URL of your Agent?")?; - let private_key = promptly::prompt("What's the private key of this Agent?")?; + let agent_secret = promptly::prompt("Enter your agent secret")?; let config = atomic_lib::config::Config { - server, - agent, - private_key, + shared: SharedConfig { agent_secret }, + client: Some(ClientConfig { server_url: server }), }; - atomic_lib::config::write_config(&agent_config_path, config.clone())?; + config.save(&agent_config_path)?; println!("New config file created at {:?}", agent_config_path); Ok(config) } } } +fn prompt_for_missing_config_values(config: &Config) -> AtomicResult { + if config.client.is_none() { + println!("No server url found in config."); + let server = promptly::prompt("What's the base url of your Atomic Server?") + .map_err(|e| format!("Invalid input: {}", e))?; + let config = Config { + client: Some(ClientConfig { server_url: server }), + ..config.clone() + }; + config.save(&atomic_lib::config::default_config_file_path()?)?; + + return Ok(config); + } + + Ok(config.clone()) +} + fn main() -> AtomicResult<()> { let cli = Cli::parse(); @@ -285,8 +302,9 @@ fn exec_command(context: &mut Context) -> AtomicResult<()> { validate(context); } Commands::Agent => { - let agent = context.read_config(); - println!("{}", agent.agent); + let config = context.read_config(); + let agent = Agent::from_secret(&config.shared.agent_secret).unwrap(); + println!("{}", agent.subject); } }; Ok(()) diff --git a/cli/src/new.rs b/cli/src/new.rs index 70f8bf7d5..22694fac6 100644 --- a/cli/src/new.rs +++ b/cli/src/new.rs @@ -1,6 +1,7 @@ //! Creating a new resource. Provides prompting logic use crate::{CLIResult, Context}; use atomic_lib::mapping; +use atomic_lib::utils::{check_valid_json, check_valid_uri}; use atomic_lib::{ datatype::DataType, errors::AtomicResult, @@ -45,11 +46,15 @@ fn prompt_instance( // I think URL generation could be better, though. Perhaps use a let path = SystemTime::now().duration_since(UNIX_EPOCH)?.subsec_nanos(); - let write_ctx = context.read_config(); + let config = context.read_config(); - let mut subject = format!("{}/{}", write_ctx.server, path); + let Some(client_config) = config.client else { + return Err("No client config found".into()); + }; + + let mut subject = format!("{}/{}", client_config.server_url, path); if let Some(sn) = &preferred_shortname { - subject = format!("{}/{}-{}", write_ctx.server, path, sn); + subject = format!("{}/{}-{}", client_config.server_url, path, sn); } let mut new_resource: Resource = Resource::new(subject.clone()); @@ -118,7 +123,6 @@ fn prompt_field( optional: bool, context: &Context, ) -> CLIResult> { - let mut input: Option = None; let msg_appendix: &str = if optional { " (optional)" } else { @@ -127,12 +131,11 @@ fn prompt_field( match &property.data_type { DataType::String | DataType::Markdown => { let msg = format!("string{}", msg_appendix); - input = prompt_opt(msg)?; - return Ok(input); + return Ok(prompt_opt(msg)?); } DataType::Slug => { let msg = format!("slug{}", msg_appendix); - input = prompt_opt(msg)?; + let input: Option = prompt_opt(msg)?; let re = Regex::new(atomic_lib::values::SLUG_REGEX)?; match input { Some(slug) => { @@ -145,12 +148,32 @@ fn prompt_field( None => return Ok(None), } } + DataType::Uri => { + let msg = format!("URI{}", msg_appendix); + + let input: Option = prompt_opt(msg)?; + let Some(uri) = input else { + return Ok(None); + }; + + check_valid_uri(&uri).unwrap(); + return Ok(Some(uri)); + } + DataType::JSON => { + let msg = format!("JSON{}", msg_appendix); + let Some(json) = prompt_opt::(msg)? else { + return Ok(None); + }; + + check_valid_json(&json).unwrap(); + return Ok(Some(json)); + } DataType::Integer => { let msg = format!("integer{}", msg_appendix); let number: Option = prompt_opt(msg)?; match number { Some(nr) => { - input = Some(nr.to_string()); + return Ok(Some(nr.to_string())); } None => return Ok(None), } @@ -160,7 +183,7 @@ fn prompt_field( let number: Option = prompt_opt(msg)?; match number { Some(nr) => { - input = Some(nr.to_string()); + return Ok(Some(nr.to_string())); } None => return Ok(None), } @@ -172,8 +195,7 @@ fn prompt_field( match date { Some(date_val) => { if re.is_match(&date_val) { - input = Some(date_val); - return Ok(input); + return Ok(Some(date_val)); } println!("Not a valid date."); return Ok(None); @@ -197,8 +219,7 @@ fn prompt_field( // If a classtype is present, the given URL must be an instance of that Class if let Some(u) = url { // TODO: Check if string or if map - input = context.mapping.lock().unwrap().try_mapping_or_url(&u); - match input { + match context.mapping.lock().unwrap().try_mapping_or_url(&u) { Some(url) => return Ok(Some(url)), None => { println!("Shortname not found, try again."); @@ -209,7 +230,7 @@ fn prompt_field( }, DataType::ResourceArray => loop { let msg = format!( - "resource array - Add the URLs or Shortnames, separated by spacebars{}", + "resource array - Add the URLs or Shortnames, separated by spaces{}", msg_appendix ); let option_string: Option = prompt_opt(msg).unwrap(); @@ -244,11 +265,12 @@ fn prompt_field( } } if length == urls.len() { - input = Some(atomic_lib::serialize::serialize_json_array(&urls).unwrap()); - break; + return Ok(Some( + atomic_lib::serialize::serialize_json_array(&urls).unwrap(), + )); } } - None => break, + None => return Ok(None), } }, DataType::Timestamp => { @@ -256,7 +278,7 @@ fn prompt_field( let number: Option = prompt_opt(msg)?; match number { Some(nr) => { - input = Some(nr.to_string()); + return Ok(Some(nr.to_string())); } None => return Ok(None), } @@ -269,7 +291,7 @@ fn prompt_field( let string: Option = prompt_opt(msg)?; match string { Some(nr) => { - input = Some(nr); + return Ok(Some(nr.to_string())); } None => return Ok(None), } @@ -288,7 +310,6 @@ fn prompt_field( } } }; - Ok(input) } // Asks for and saves the bookmark. Returns the shortname. diff --git a/cli/tests/tests.rs b/cli/tests/tests.rs index e823c3ebb..8a58754fa 100644 --- a/cli/tests/tests.rs +++ b/cli/tests/tests.rs @@ -5,28 +5,14 @@ mod test { const TEST_URL: &str = "https://atomicdata.dev/agents/QmfpRIBn2JYEatT0MjSkMNoBJzstz19orwnT5oT2rcQ="; - #[test] - fn get_fail() { - let mut cmd = Command::cargo_bin(assert_cmd::crate_name!()).unwrap(); - cmd.args(["get", "random-non-existent-shortname"]) - .assert() - .failure(); - } - + #[ignore] #[test] fn get_url() { let mut cmd = Command::cargo_bin(assert_cmd::crate_name!()).unwrap(); cmd.args(["get", TEST_URL]).assert().success(); } - #[test] - fn get_path_array_non_existent() { - let mut cmd = Command::cargo_bin(assert_cmd::crate_name!()).unwrap(); - cmd.args(["get", &format!("{TEST_URL} is-a 1")]) - .assert() - .failure(); - } - + #[ignore] #[test] fn search() { let parent = "https://atomicdata.dev/ontology/core"; diff --git a/docs/.gitignore b/docs/.gitignore index 41d4f2ac9..8b4b3704b 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1,2 +1,3 @@ /book .DS_Store +/build diff --git a/docs/src/core/json-ad.md b/docs/src/core/json-ad.md index c69bf13bb..11913ec20 100644 --- a/docs/src/core/json-ad.md +++ b/docs/src/core/json-ad.md @@ -6,19 +6,31 @@ It is what the current [Rust](https://github.com/atomicdata-dev/atomic-data-brow It is designed to feel familiar to developers and to be easy and performant to parse and serialize. It is inspired by [JSON-LD](https://json-ld.org/). -It uses [JSON](https://www.ecma-international.org/publications-and-standards/standards/ecma-404/), but has some additional constraints: +It is [JSON](https://www.ecma-international.org/publications-and-standards/standards/ecma-404/) with the additional constraint that the root data structure must either be a Named Resource (with an `@id`), or an Array containing Named Resources. -- Every single Object is a `Resource`. -- Every Key is a [`Property`](https://atomicdata.dev/classes/Property) URL. Other keys are invalid. Each Property URL must resolve to an online Atomic Data Property. -- The `@id` field is special: it defines the `Subject` of the `Resource`. If you send an HTTP GET request there with an `content-type: application/ad+json` header, you should get the full JSON-AD resource. -- JSON arrays are mapped to [Resource Arrays](https://atomicdata.dev/datatypes/resourceArray) -- Numbers can be [Integers](https://atomicdata.dev/datatypes/integer), [Timestamps](https://atomicdata.dev/datatypes/timestamp) or [Floats](https://atomicdata.dev/datatypes/float). -- JSON booleans map to [Booleans](https://atomicdata.dev/datatypes/boolean). -- JSON strings can be many datatypes, including [String](https://atomicdata.dev/datatypes/string), [Markdown](https://atomicdata.dev/datatypes/markdown), [Date](https://atomicdata.dev/datatypes/date) or other. -- Nested JSON Objects are Nested Resources. A Nested Resource can either be _Anonymous_ (without an `@id` subject) or a Named Nested Resource (with an `@id` subject). Everywhere a Subject URL can be used as a value (i.e. all properties with the datatype [atomicURL](https://atomicdata.dev/datatypes/atomicURL)), a Nested Resource can be used instead. This also means that an item in an `ResourceArray` can be a Nested Resource. -- The root data structure must either be a Named Resource (with an `@id`), or an Array containing Named Resources. When you want to describe multiple Resources in one JSON-AD document, use an array as the root item. +The mime type (for HTTP content negotiation) is `application/ad+json` ([registration ongoing](https://github.com/ontola/atomic-data-docs/issues/60)). + +## Named Resources + +A named resource is a JSON Object that represents an Atomic Data resource. +Each key represents a property, therefore each key must be a valid [Property](https://atomicdata.dev/classes/Property) URL with the exception of the mandatory `@id` field. +The `@id` field is special: it defines the `Subject` of the `Resource`. If you send an HTTP GET request there with an `content-type: application/ad+json` header, you should get the full JSON-AD resource. + +The types of values allowed are determined by the [datatype](../schema/datatypes.md) of the property. + +- **string**, **slug**, **markdown**, **uri** and **date** datatype fields must be a `string`. +- **integer**, **float** and **timestamp** datatype fields must be a `number`. +- **boolean** datatype fields must be a `boolean`. +- **atomic-url** datatype fields must be either a `string` (url) or an `object` (nested resource). +- **resource-array** datatype fields must be an `array` of strings (must be a url) or objects (must be an nested resource). +- **json** datatype fields can be any valid JSON value. + +Named Resources are only allowed in the following places: + +- The root of the JSON-AD document. +- As an item in an array that is directly under the root of the JSON-AD document. -Let's look at an example JSON-AD Resource: +Example of a named resource in JSON-AD format: ```json { @@ -32,19 +44,18 @@ Let's look at an example JSON-AD Resource: } ``` -The mime type (for HTTP content negotiation) is `application/ad+json` ([registration ongoing](https://github.com/ontola/atomic-data-docs/issues/60)). - -## Nested, Anonymous and Named resources +## Nested Resources -In JSON-AD, a Resource can be respresented in multiple ways: +Nested resources are resources that do not have an `@id` field. +It _does_ have its own unique [path](./paths.md), which can be used as its identifier. -- **Subject**: A URL string, such as `https://atomicdata.dev/classes/Class`. -- **Named Resource**: A JSON Object with an `@id` field containing the Subject. -- **Anonymous Nested Resource** A JSON Object without an `@id` field. This is only possible if it is a Nested Resource, which means that it has a parent Resource. +Nested resources are only allowed in the following places: -Note that this is also valid for `ResourceArrays`, which usually only contain Subjects, but are allowed to contain Nested Resources. +- The value of a property with an **atomic-url** datatype. +- As an item in a **resource-array** property's array value. -In the following JSON-AD example, the `address` is a nested resource: +In the example below is a named resource with the subject: `https://example.com/arnold`. +The `address` property has an nested resource as its value, therefore the path of the nested resource is: `https://example.com/arnold https://example.com/properties/address`. ```json { @@ -57,13 +68,16 @@ In the following JSON-AD example, the `address` is a nested resource: } ``` -Nested Resources can be _named_ or _anonymous_. An _Anonymous Nested Resource_ does not have it's own `@id` field. -It _does_ have its own unique [path](./paths.md), which can be used as its identifier. -The `path` of the anonymous resource in the example above is `https://example.com/arnold https://example.com/properties/address`. +## Regular JSON + +Properties with a **json** datatype can contain any valid JSON value. +If any JSON-AD data is present in these values it will not be treated as JSON-AD, but as regular JSON. + +Because these JSON values do not benefit from any of Atomic Data's features you should avoid using them unless your value is truly JSON data, for example when you need to store a config of some application. ## JSON-AD Parsers, serializers and other libraries -- **Typescript / Javacript**: [@tomic/lib](https://www.npmjs.com/package/@tomic/lib) JSON-AD parser + in-memory store. Works with [@tomic/react](https://www.npmjs.com/package/@tomic/lib) for rendering Atomic Data in React. +- **Typescript / Javacript**: [@tomic/lib](https://www.npmjs.com/package/@tomic/lib) JSON-AD parser + in-memory store. - **Rust**: [atomic_lib](https://crates.io/crates/atomic_lib) has a JSON-AD parser / serializer (and does a lot more). ## Canonicalized JSON-AD @@ -75,7 +89,7 @@ When you need deterministic serialization of Atomic Data (e.g. when calculating 1. All keys are sorted alphabetically (lexicographically) - both in the root object, as in any nested objects. 1. The JSON-AD is minified: no newlines, no spaces. -The last two steps of this process is more formally defined by the JSON Canonicalization Scheme (JCS, [rfc8785](https://tools.ietf.org/html/rfc8785)). +The last two steps of this process are more formally defined by the JSON Canonicalization Scheme (JCS, [rfc8785](https://tools.ietf.org/html/rfc8785)). ## Interoperability with JSON and JSON-LD diff --git a/docs/src/core/paths.md b/docs/src/core/paths.md index 60f78a9a8..ce5d93ade 100644 --- a/docs/src/core/paths.md +++ b/docs/src/core/paths.md @@ -71,7 +71,7 @@ Now the `employer` is simply a nested Object. Note that it no longer has its own `@id`. However, we can still identify this Nested Resource using its Path. -The Subject of the nested resource is its path: `https://example.com/john https://example.com/employer`, including the spacebar. +The Subject of the nested resource is its path: `https://example.com/john https://example.com/employer`, including the space. Note that the path from before still resolves: diff --git a/docs/src/core/querying.md b/docs/src/core/querying.md index ac75d5193..76545e4ff 100644 --- a/docs/src/core/querying.md +++ b/docs/src/core/querying.md @@ -31,8 +31,8 @@ Connection: Closed } ``` -The server MAY also include other resources, if they are deemed relevant. -For example, a search result might include nested children to speed up rendering. +The server MAY respond with an array containing the requested resource along with other resources that are deemed relevant. +For example, a search result might include the results as full resources to speed up rendering. Also note that AtomicServer supports other `Content-Type`s, such as `application/json`, `application/ld+json`, `text/turtle`. diff --git a/docs/src/interoperability/json.md b/docs/src/interoperability/json.md index 7e5a60056..09d694e6b 100644 --- a/docs/src/interoperability/json.md +++ b/docs/src/interoperability/json.md @@ -117,6 +117,6 @@ JSON-AD and JSON-LD are very similar by design, but there are some important dif - Make sure the URLs used in the `@context` resolve to Atomic Properties. - Convert JSON-LD arrays into ResourceArrays -- Creating nested JSON objects is possible (by resolving the identifiers from `@id` relations), but it is up to the serializer to decide how deep this object nesting should happen. +- Nested JSON objects are only allowed if the data does not have an `@id` field. If the data contains multiple named resources they MUST all be part of an array at the root level. Note that as of now, there are no JSON-LD parsers for Atomic Data. diff --git a/docs/src/schema/datatypes.md b/docs/src/schema/datatypes.md index a4509284a..c16147c6d 100644 --- a/docs/src/schema/datatypes.md +++ b/docs/src/schema/datatypes.md @@ -24,9 +24,17 @@ A URL that should resolve to an [Atomic Resource](../core/concepts.md#Resource). _URL: `https://atomicdata.dev/datatypes/URI`_ -A Uniform Resource Identifier, preferably a URL (i.e. an URI that can be fetched). +A Uniform Resource Identifier. Could be HTTP, HTTPS, or any other type of schema. +Examples: + +``` +https://example.com/1 +file://home/user/file.txt +mailto:user@example.com +``` + ## String _URL: `https://atomicdata.dev/datatypes/string`_ @@ -64,10 +72,10 @@ e.g. `-420` _URL: `https://atomicdata.dev/datatypes/float`_ -Number with a comma. -Max value: [`9223372036854775807`](https://en.wikipedia.org/wiki/9,223,372,036,854,775,807) +A 64 bit decimal number. +Max value: `1.7976931348623157e+308` -e.g. `-420` +e.g. `-4.20` ## Boolean @@ -109,8 +117,24 @@ _URL: `https://atomicdata.dev/datatypes/resourceArray`_ Sequential, ordered list of Atomic URIs. Serialized as a JSON array with strings. -Note that other types of arrays are not included in this spec, but can be perfectly valid. - -([Discussion](https://github.com/atomicdata-dev/atomic-data-docs/issues/127)) - e.g. `["https://example.com/1", "https://example.com/1"]` + +## JSON + +_URL: `https://atomicdata.dev/datatypes/json`_ + +Any valid JSON value. +Can be used to store arbitrary json data. + +example: + +```json +[ + "thing", + { + "name": "thing", + }, + 9883 +] +``` diff --git a/docs/src/schema/faq.md b/docs/src/schema/faq.md index 4f619f931..9a226f447 100644 --- a/docs/src/schema/faq.md +++ b/docs/src/schema/faq.md @@ -1,16 +1,10 @@ {{#title Atomic Schema FAQ}} # Atomic Schema FAQ -## How do I create a Property that supports multiple Datatypes? - -A property only has one single Datatype. -However, feel free to create a new kind of Datatype that, in turn, refers to other Datatypes. -Perhaps Generics, or Option like types should be part of the Atomic Base Datatypes. - ## Do you have an `enum` datatype? -In Atomic Data, `enum` is not a datatype, but it's a constraint that can be added to properties that have. -You can set [`allows-only`](https://atomicdata.dev/properties/allowsOnly) on a Property, and use that to limit which values are allowed. +There is no dedicated `enum` datatype but you can use the `allows-only` property to achieve the same effect. +By setting `allows-only` on a Property you limit which specific values are allowed. They work on both `atomic-url` and `resource-array` properties. ## How should a client deal with Shortname collisions? @@ -53,7 +47,7 @@ If that server is offline, or the URL has changed, the existing links will break This is a fundamental problem to HTTP, and not unique to Atomic Data. Like with websites, hosts should make sure that their server stays available, and that URLs remain static. -One possible solution to this problem, is using Content Addressing, such as the [IPFS](../interoperability/ipfs.md) protocol enables, which is why we're planning for using that in the near future. +One possible solution to this problem, is using Content Addressing, such as the [IPFS](../interoperability/ipfs.md) protocol enables, which is why we're planning on using something like that in the near future. Another approach, is using [foreign keys (see issue)](https://github.com/ontola/atomic-data-docs/issues/43). @@ -69,7 +63,7 @@ For more information, see [RDF interoperability](../interoperability/rdf.md). Every time you use an external URL in your data, you kind of create a dependency. This is fundamental to linked data. -In Atomic Data, not having access to the Property in some JSON-AD resource will lead to now knowing how to interpret the data itself. +In Atomic Data, not having access to the Property in some JSON-AD resource will lead to not knowing how to interpret the data itself. You will no longer know what the Datatype was (other than the native JSON datatype, of course), or what the semantic meaning was of the relationship. There are multiple ways we can deal with this: @@ -83,4 +77,3 @@ There are multiple ways we can deal with this: Atomic Data does not have a concept of inheritance. However, you can use the `isA` property to link to _multiple Classes_ from a single resource. -This effectively diff --git a/docs/src/schema/intro.md b/docs/src/schema/intro.md index 537bdbaed..e8048f95a 100644 --- a/docs/src/schema/intro.md +++ b/docs/src/schema/intro.md @@ -34,7 +34,7 @@ This Property does three things: **Classes** are a special kind of Resource that describe an abstract class of things (such as "Person" or "Blog"). Classes can _recommend_ or _require_ a set of Properties. -They behave as Models, similar to `struts` in C or `interfaces` in Typescript. +They behave as Models, similar to `structs` in C or `interfaces` in Typescript. A Resource _could_ have one or more classes, which _could_ provide information about which Properties are expected or required. **example:** diff --git a/docs/src/websockets.md b/docs/src/websockets.md index 624869741..d1af67a1d 100644 --- a/docs/src/websockets.md +++ b/docs/src/websockets.md @@ -21,7 +21,7 @@ The `WebSocket-Protocol` is `AtomicData`. ## Server to client messages - `COMMIT ${CommitBody}` an entire [Commit](../src/commits/concepts.md) for a resource that you're subscribed to. -- `RESOURCE ${Resource}` a JSON-AD Resource as a response to a `GET` message. If there is something wrong with this request (e.g. 404), return a `Error` Resource with the requested subject, similar to how the HTTP protocol server does this.` +- `RESOURCE ${Resource}` a JSON-AD Resource or array of JSON-AD Resources as a response to a `GET` message. If there is something wrong with this request (e.g. 404), return a `Error` Resource with the requested subject, similar to how the HTTP protocol server does this.` - `ERROR ${ErrorBody}` an Error resource is sent whenever something goes wrong. The `ErrorBody` is a plaintext, typically English description of what went wrong. ## Considerations diff --git a/lib/Cargo.toml b/lib/Cargo.toml index ac777b038..45e26b482 100644 --- a/lib/Cargo.toml +++ b/lib/Cargo.toml @@ -17,7 +17,9 @@ name = "benchmarks" [dependencies] base64 = "0.21" -bincode = { version = "1", optional = true } +rmp-serde = { version = "1.3.0", optional = true } +# Needed for migration to bincode v2 +bincode1 = { package = "bincode", version = "1", optional = true } directories = { version = ">= 2, < 5", optional = true } html2md = { version = "0.2.14", optional = true } kuchikiki = { version = "0.8.2", optional = true } @@ -46,6 +48,6 @@ ntest = "0.9" [features] config = ["directories", "toml"] -db = ["sled", "bincode"] +db = ["sled", "rmp-serde", "bincode1"] html = ["kuchikiki", "lol_html", "html2md"] rdf = ["rio_api", "rio_turtle"] diff --git a/lib/defaults/default_store.json b/lib/defaults/default_store.json index 102927f29..28bc191b1 100644 --- a/lib/defaults/default_store.json +++ b/lib/defaults/default_store.json @@ -1139,6 +1139,15 @@ "https://atomicdata.dev/properties/parent": "https://atomicdata.dev/datatypes", "https://atomicdata.dev/properties/shortname": "uri" }, + { + "@id": "https://atomicdata.dev/datatypes/json", + "https://atomicdata.dev/properties/description": "An untyped JSON object.", + "https://atomicdata.dev/properties/isA": [ + "https://atomicdata.dev/classes/Datatype" + ], + "https://atomicdata.dev/properties/parent": "https://atomicdata.dev/datatypes", + "https://atomicdata.dev/properties/shortname": "json" + }, { "@id": "https://atomicdata.dev/classes/Folder", "https://atomicdata.dev/properties/description": "Acts as a parent for resources, useful for ordering data.", diff --git a/lib/src/agents.rs b/lib/src/agents.rs index e9b1d0f20..032209ad6 100644 --- a/lib/src/agents.rs +++ b/lib/src/agents.rs @@ -3,10 +3,18 @@ //! https://docs.atomicdata.dev/commits/concepts.html use base64::{engine::general_purpose, Engine}; +use serde::{Deserialize, Serialize}; use serde_json::from_slice; use crate::{errors::AtomicResult, urls, Resource, Storelike, Value}; +#[derive(Serialize, Deserialize)] +struct DecodedSecret { + #[serde(rename = "privateKey")] + private_key: String, + subject: String, +} + /// None represents no right checks will be performed, effectively SUDO mode. #[derive(Clone, Debug, PartialEq)] pub enum ForAgent { @@ -146,6 +154,17 @@ impl Agent { created_at: crate::utils::now(), }) } + + pub fn build_secret(&self) -> AtomicResult { + let decoded_secret = DecodedSecret { + private_key: self.private_key.clone().ok_or("No private key on agent")?, + subject: self.subject.clone(), + }; + + let vec = serde_json::to_vec(&decoded_secret)?; + let encoded_secret = encode_base64(&vec); + Ok(encoded_secret) + } } /// keypair, serialized using base64 @@ -267,4 +286,14 @@ mod test { "http://localhost:9883/agents/RqPwpgHv+PK7Pnz/dVab8hmHjYnvTL1YrlVa6L9G9Zg=" ); } + + #[test] + fn can_build_secret() { + let og_secret = "eyJwcml2YXRlS2V5IjoiU015eFJnRjdRaGlDN0M1MDZxWFNVS2ZFK1NLQXRDZE5GdTVYZVRqemFkQT0iLCJzdWJqZWN0IjoiaHR0cDovL2xvY2FsaG9zdDo5ODgzL2FnZW50cy9ScVB3cGdIditQSzdQbnovZFZhYjhobUhqWW52VEwxWXJsVmE2TDlHOVpnPSJ9"; + let agent = Agent::from_secret(og_secret).unwrap(); + let secret = agent.build_secret().unwrap(); + + let agent2 = Agent::from_secret(&secret); + assert_eq!(agent2.unwrap().subject, agent.subject); + } } diff --git a/lib/src/class_extender.rs b/lib/src/class_extender.rs new file mode 100644 index 000000000..8b896f405 --- /dev/null +++ b/lib/src/class_extender.rs @@ -0,0 +1,34 @@ +use crate::{ + agents::ForAgent, errors::AtomicResult, storelike::ResourceResponse, urls, Commit, Db, Resource, +}; + +pub struct GetExtenderContext<'a> { + pub store: &'a Db, + pub url: &'a url::Url, + pub db_resource: &'a mut Resource, + pub for_agent: &'a ForAgent, +} + +pub struct CommitExtenderContext<'a> { + pub store: &'a Db, + pub commit: &'a Commit, + pub resource: &'a Resource, +} + +#[derive(Clone)] +pub struct ClassExtender { + pub class: String, + pub on_resource_get: Option AtomicResult>, + pub before_commit: Option AtomicResult<()>>, + pub after_commit: Option AtomicResult<()>>, +} + +impl ClassExtender { + pub fn resource_has_extender(&self, resource: &Resource) -> AtomicResult { + let Ok(is_a) = resource.get(urls::IS_A) else { + return Ok(false); + }; + + Ok(is_a.to_subjects(None)?.iter().any(|c| c == &self.class)) + } +} diff --git a/lib/src/client/helpers.rs b/lib/src/client/helpers.rs index c1d221863..b32a4d6ca 100644 --- a/lib/src/client/helpers.rs +++ b/lib/src/client/helpers.rs @@ -3,7 +3,8 @@ use crate::{ agents::Agent, commit::sign_message, errors::AtomicResult, - parse::{parse_json_ad_resource, ParseOpts}, + parse::{parse_json_ad_string, ParseOpts}, + storelike::ResourceResponse, Resource, Storelike, }; @@ -16,11 +17,38 @@ pub fn fetch_resource( subject: &str, store: &impl Storelike, client_agent: Option<&Agent>, -) -> AtomicResult { +) -> AtomicResult { let body = fetch_body(subject, crate::parse::JSON_AD_MIME, client_agent)?; - let resource = parse_json_ad_resource(&body, store, &ParseOpts::default()) + let resources = parse_json_ad_string(&body, store, &ParseOpts::default()) .map_err(|e| format!("Error parsing body of {}. {}", subject, e))?; - Ok(resource) + + if resources.len() == 1 { + Ok(ResourceResponse::Resource(resources[0].clone())) + } else { + let mut main_resource: Option = None; + let mut referenced: Vec = Vec::new(); + + for r in resources { + if r.get_subject() == subject { + main_resource = Some(r); + } else { + referenced.push(r); + } + } + + let Some(main_resource) = main_resource else { + return Err(format!( + "Requested subject not found in returned resources: {}", + subject + ) + .into()); + }; + + Ok(ResourceResponse::ResourceWithReferenced( + main_resource, + referenced, + )) + } } /// Returns the various x-atomic authentication headers, includign agent signature @@ -143,7 +171,10 @@ mod test { #[ignore] fn fetch_resource_basic() { let store = crate::Store::init().unwrap(); - let resource = fetch_resource(crate::urls::SHORTNAME, &store, None).unwrap(); + let resource = fetch_resource(crate::urls::SHORTNAME, &store, None) + .unwrap() + .to_single(); + let shortname = resource.get(crate::urls::SHORTNAME).unwrap(); assert!(shortname.to_string() == "shortname"); } diff --git a/lib/src/collections.rs b/lib/src/collections.rs index d906a237c..019100e8e 100644 --- a/lib/src/collections.rs +++ b/lib/src/collections.rs @@ -3,7 +3,7 @@ use crate::{ agents::ForAgent, errors::AtomicResult, - storelike::{Query, ResourceCollection}, + storelike::{Query, ResourceCollection, ResourceResponse}, urls, Resource, Storelike, Value, }; @@ -125,7 +125,7 @@ pub struct Collection { /// The actual items that you're interested in. List the member subjects of the current page. pub members: Vec, /// The members as full resources, instead of a list of subjects. Is only populated if `nested` is true. - pub members_nested: Option>, + pub referenced_resources: Option>, /// URL of the value to sort by pub sort_by: Option, // Sorts ascending by default @@ -212,7 +212,11 @@ impl Collection { let query_result = store.query(&q)?; let members = query_result.subjects; - let members_nested = Some(query_result.resources); + let referenced_resources = if collection_builder.include_nested { + Some(query_result.resources) + } else { + None + }; let total_items = query_result.count; let pages_fraction = total_items as f64 / collection_builder.page_size as f64; let total_pages = pages_fraction.ceil() as usize; @@ -227,7 +231,7 @@ impl Collection { let collection = Collection { total_pages, members, - members_nested, + referenced_resources, total_items, subject: collection_builder.subject, property: collection_builder.property, @@ -243,10 +247,9 @@ impl Collection { Ok(collection) } - pub fn to_resource(&self, store: &impl Storelike) -> AtomicResult { + pub fn to_resource(&self, store: &impl Storelike) -> AtomicResult { let mut resource = crate::Resource::new(self.subject.clone()); - self.add_to_resource(&mut resource, store)?; - Ok(resource) + self.add_to_resource(&mut resource, store) } /// Adds the Collection props to an existing Resource. @@ -254,14 +257,10 @@ impl Collection { &self, resource: &mut Resource, store: &impl Storelike, - ) -> AtomicResult { + ) -> AtomicResult { resource.set( crate::urls::COLLECTION_MEMBERS.into(), - if let Some(nested_members) = &self.members_nested { - nested_members.clone().into() - } else { - self.members.clone().into() - }, + self.members.clone().into(), store, )?; if let Some(prop) = &self.property { @@ -306,7 +305,15 @@ impl Collection { store, )?; - Ok(resource.to_owned()) + match &self.referenced_resources { + Some(referenced_resources) => { + return Ok(ResourceResponse::ResourceWithReferenced( + resource.clone(), + referenced_resources.clone(), + )); + } + None => Ok(ResourceResponse::Resource(resource.clone())), + } } } @@ -319,7 +326,7 @@ pub fn construct_collection_from_params( query_params: url::form_urlencoded::Parse, resource: &mut Resource, for_agent: &ForAgent, -) -> AtomicResult { +) -> AtomicResult { let mut sort_by = None; let mut sort_desc = false; let mut current_page = 0; @@ -476,7 +483,7 @@ mod test { Collection::collect_members(&store, collection_builder, &ForAgent::Sudo).unwrap(); assert!(collection.members.contains(&urls::PROPERTY.into())); - let resource_collection = &collection.to_resource(&store).unwrap(); + let resource_collection = &collection.to_resource(&store).unwrap().to_single(); resource_collection .get(urls::COLLECTION_INCLUDE_NESTED) .unwrap_err(); @@ -501,10 +508,10 @@ mod test { }; let collection = Collection::collect_members(&store, collection_builder, &ForAgent::Sudo).unwrap(); - let first_resource = &collection.members_nested.clone().unwrap()[0]; + let first_resource = &collection.referenced_resources.clone().unwrap()[0]; assert!(first_resource.get_subject().contains("Agent")); - let resource_collection = &collection.to_resource(&store).unwrap(); + let resource_collection = &collection.to_resource(&store).unwrap().to_single(); let val = resource_collection .get(urls::COLLECTION_INCLUDE_NESTED) .unwrap() @@ -528,7 +535,8 @@ mod test { false, &ForAgent::Public, ) - .unwrap(); + .unwrap() + .to_single(); assert!( collections_collection .get(urls::COLLECTION_PROPERTY) @@ -559,7 +567,8 @@ mod test { false, &ForAgent::Public, ) - .unwrap(); + .unwrap() + .to_single(); assert!( collection_page_size .get(urls::COLLECTION_PAGE_SIZE) @@ -573,7 +582,8 @@ mod test { false, &ForAgent::Public, ) - .unwrap(); + .unwrap() + .to_single(); assert!( collection_page_nr .get(urls::COLLECTION_PAGE_SIZE) diff --git a/lib/src/config.rs b/lib/src/config.rs index 4cf06a5c0..4a6a4732e 100644 --- a/lib/src/config.rs +++ b/lib/src/config.rs @@ -1,19 +1,27 @@ //! Configuration logic which can be used in both CLI and Server contexts //! For serializaing, storing, and parsing the `~/.config/atomic/config.toml` file -use crate::errors::AtomicResult; +use crate::{agents::Agent, errors::AtomicResult}; use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; /// A set of options that are shared between CLI and Server contexts #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Config { + pub shared: SharedConfig, + pub client: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct SharedConfig { + /// Sudo agent on the server, also used as agent in the CLI. Usually lives on the server, but not necessarily so. + pub agent_secret: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ClientConfig { /// URL of Companion Atomic Server, where data is written to by default. - pub server: String, - /// The current Agent (user) URL. Usually lives on the server, but not necessarily so. - pub agent: String, - /// Private key for the Agent, which is used to sign commits. - pub private_key: String, + pub server_url: String, } /// Returns the default path for the config file: `~/.config/atomic` @@ -39,7 +47,7 @@ pub fn read_config(path: Option<&Path>) -> AtomicResult { let path = path.unwrap_or(&default); let config_string = std::fs::read_to_string(path) .map_err(|e| format!("Error reading config from {:?}. {}", path, e))?; - let config: Config = toml::from_str(&config_string) + let config = parse_and_migrate_if_needed(&config_string) .map_err(|e| format!("Could not parse toml in config file {:?}. {}", path, e))?; Ok(config) } @@ -47,7 +55,7 @@ pub fn read_config(path: Option<&Path>) -> AtomicResult { /// Writes config file from a specified path. /// Overwrites any existing config. /// Creates the config directory if it does not exist. -pub fn write_config(path: &Path, config: Config) -> AtomicResult { +fn write_config(path: &Path, config: Config) -> AtomicResult { let out = toml::to_string_pretty(&config).map_err(|e| format!("Error serializing config. {}", e))?; @@ -61,3 +69,58 @@ pub fn write_config(path: &Path, config: Config) -> AtomicResult { .map_err(|e| format!("Error writing config file to {:?}. {}", path, e))?; Ok(out) } + +impl Config { + pub fn save(&self, path: &Path) -> AtomicResult<()> { + write_config(&path, self.clone())?; + Ok(()) + } + + pub fn to_string(&self) -> AtomicResult { + let out = + toml::to_string_pretty(self).map_err(|e| format!("Error serializing config. {}", e))?; + Ok(out) + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +struct ConfigV0 { + agent: String, + private_key: String, + server: String, +} + +fn parse_and_migrate_if_needed(config_str: &str) -> AtomicResult { + // Try latest version first + if let Ok(config) = toml::from_str::(config_str) { + return Ok(config); + } + + // Try v0 version + if let Ok(config) = toml::from_str::(config_str) { + return config_v0_to_v1(&config); + } + + return Err("Could not parse config".into()); +} + +fn config_v0_to_v1(config_v0: &ConfigV0) -> AtomicResult { + let ConfigV0 { + agent, + private_key, + server, + } = config_v0; + + let new_agent = Agent::from_private_key_and_subject(private_key, agent)?; + + let config = Config { + shared: SharedConfig { + agent_secret: new_agent.build_secret()?, + }, + client: Some(ClientConfig { + server_url: server.clone(), + }), + }; + + Ok(config) +} diff --git a/lib/src/datatype.rs b/lib/src/datatype.rs index ca8450428..9be33e24b 100644 --- a/lib/src/datatype.rs +++ b/lib/src/datatype.rs @@ -17,6 +17,8 @@ pub enum DataType { Slug, String, Timestamp, + Uri, + JSON, Unsupported(String), } @@ -32,6 +34,8 @@ pub fn match_datatype(string: &str) -> DataType { urls::SLUG => DataType::Slug, urls::STRING => DataType::String, urls::TIMESTAMP => DataType::Timestamp, + urls::URI => DataType::Uri, + urls::JSON => DataType::JSON, unsupported_datatype => DataType::Unsupported(unsupported_datatype.into()), } } @@ -51,6 +55,8 @@ impl std::str::FromStr for DataType { urls::SLUG => DataType::Slug, urls::STRING => DataType::String, urls::TIMESTAMP => DataType::Timestamp, + urls::URI => DataType::Uri, + urls::JSON => DataType::JSON, unsupported_datatype => DataType::Unsupported(unsupported_datatype.into()), }) } @@ -69,6 +75,8 @@ impl fmt::Display for DataType { DataType::Slug => write!(f, "{}", urls::SLUG), DataType::String => write!(f, "{}", urls::STRING), DataType::Timestamp => write!(f, "{}", urls::TIMESTAMP), + DataType::Uri => write!(f, "{}", urls::URI), + DataType::JSON => write!(f, "{}", urls::JSON), DataType::Unsupported(url) => write!(f, "{}", url), } } diff --git a/lib/src/db.rs b/lib/src/db.rs index d3523cc05..68d6ab5c9 100644 --- a/lib/src/db.rs +++ b/lib/src/db.rs @@ -1,12 +1,14 @@ //! Persistent, ACID compliant, threadsafe to-disk store. //! Powered by Sled - an embedded database. +mod encoding; mod migrations; mod prop_val_sub_index; mod query_index; #[cfg(test)] pub mod test; mod trees; +mod v1_types; mod val_prop_sub_index; use std::{ @@ -16,25 +18,26 @@ use std::{ vec, }; -use tracing::{info, instrument}; -use trees::{Method, Operation, Transaction, Tree}; - use crate::{ agents::ForAgent, atoms::IndexAtom, + class_extender::{ClassExtender, CommitExtenderContext, GetExtenderContext}, commit::{CommitOpts, CommitResponse}, db::{ + encoding::{decode_propvals, encode_propvals}, query_index::{requires_query_index, NO_VALUE}, val_prop_sub_index::find_in_val_prop_sub_index, }, - endpoints::{default_endpoints, Endpoint, HandleGetContext}, + endpoints::{Endpoint, HandleGetContext}, errors::{AtomicError, AtomicResult}, + plugins::plugins, resources::PropVals, - storelike::{Query, QueryResult, Storelike}, - urls, + storelike::{Query, QueryResult, ResourceResponse, Storelike}, values::SortableValue, Atom, Commit, Resource, }; +use tracing::{info, instrument}; +use trees::{Method, Operation, Transaction, Tree}; use self::{ migrations::migrate_maybe, @@ -46,6 +49,8 @@ use self::{ val_prop_sub_index::add_atom_to_valpropsub_index, }; +use sled::{transaction::TransactionError, Transactional}; + // A function called by the Store when a Commit is accepted type HandleCommit = Box; @@ -82,6 +87,8 @@ pub struct Db { server_url: String, /// Endpoints are checked whenever a resource is requested. They calculate (some properties of) the resource and return it. endpoints: Vec, + /// List of class extenders. + class_extenders: Vec, /// Function called whenever a Commit is applied. on_commit: Option>, /// Where the DB is stored on disk. @@ -111,7 +118,8 @@ impl Db { prop_val_sub_index, server_url, watched_queries, - endpoints: default_endpoints(), + endpoints: plugins::default_endpoints(), + class_extenders: plugins::default_class_extenders(), on_commit: None, }; migrate_maybe(&store).map(|e| format!("Error during migration of database: {:?}", e))?; @@ -166,7 +174,9 @@ impl Db { ) -> AtomicResult<()> { let subject = resource.get_subject(); let propvals = resource.get_propvals(); - let resource_bin = bincode::serialize(propvals)?; + + let resource_bin = encode_propvals(&propvals)?; + transaction.push(Operation { tree: Tree::Resources, method: Method::Insert, @@ -195,15 +205,29 @@ impl Db { /// Constructs the value index from all resources in the store. Could take a while. pub fn build_index(&self, include_external: bool) -> AtomicResult<()> { tracing::info!("Building index (this could take a few minutes for larger databases)"); + let mut count = 0; + for r in self.all_resources(include_external) { let mut transaction = Transaction::new(); - for atom in r.to_atoms() { + for atom in r.to_atoms_iter() { self.add_atom_to_index(&atom, &r, &mut transaction) .map_err(|e| format!("Failed to add atom to index {}. {}", atom, e))?; } self.apply_transaction(&mut transaction) .map_err(|e| format!("Failed to commit transaction. {}", e))?; + + if count % 1000 == 0 { + tracing::info!("Building index, applied transaction: {}", count); + } + + if count % 10000 == 0 { + tracing::info!("Building index, flushing to disk"); + self.db.flush()?; + } + + count += 1; } + tracing::info!("Building index finished!"); Ok(()) } @@ -211,7 +235,8 @@ impl Db { /// Internal method for fetching Resource data. #[instrument(skip(self))] fn set_propvals(&self, subject: &str, propvals: &PropVals) -> AtomicResult<()> { - let resource_bin = bincode::serialize(propvals)?; + let resource_bin = encode_propvals(&propvals)?; + self.resources.insert(subject.as_bytes(), resource_bin)?; Ok(()) } @@ -232,13 +257,7 @@ impl Db { .map_err(|e| format!("Can't open {} from store: {}", subject, e))?; match propval_maybe.as_ref() { Some(binpropval) => { - let propval: PropVals = bincode::deserialize(binpropval).map_err(|e| { - format!( - "Deserialize propval error: {} {}", - corrupt_db_message(subject), - e - ) - })?; + let propval: PropVals = decode_propvals(binpropval)?; Ok(propval) } None => Err(AtomicError::not_found(format!( @@ -279,7 +298,7 @@ impl Db { return None; } - let propvals: PropVals = bincode::deserialize(&resource_bin) + let propvals: PropVals = decode_propvals(&resource_bin) .unwrap_or_else(|e| panic!("{}. {}", corrupt_db_message(&subject), e)); Some(Resource::from_propvals(propvals, subject)) @@ -376,11 +395,30 @@ impl Db { } } - self.resources.apply_batch(batch_resources)?; - self.prop_val_sub_index.apply_batch(batch_propvalsub)?; - self.reference_index.apply_batch(batch_valpropsub)?; - self.watched_queries.apply_batch(batch_watched_queries)?; - self.query_index.apply_batch(batch_query_members)?; + ( + &self.resources, + &self.prop_val_sub_index, + &self.reference_index, + &self.watched_queries, + &self.query_index, + ) + .transaction( + |( + tx_resources, + tx_prop_val_sub_index, + tx_reference_index, + tx_watched_queries, + tx_query_index, + )| { + tx_resources.apply_batch(&batch_resources)?; + tx_prop_val_sub_index.apply_batch(&batch_propvalsub)?; + tx_reference_index.apply_batch(&batch_valpropsub)?; + tx_watched_queries.apply_batch(&batch_watched_queries)?; + tx_query_index.apply_batch(&batch_query_members)?; + Ok::<(), sled::transaction::ConflictableTransactionError>(()) + }, + ) + .map_err(|e: TransactionError<_>| format!("Failed to apply transaction: {}", e))?; Ok(()) } @@ -417,7 +455,7 @@ impl Db { if let Ok(resource) = self.get_resource_extended(&atom.subject, true, &q.for_agent) { subjects.push(atom.subject.clone()); - resources.push(resource); + resources.push(resource.to_single()); } } } @@ -502,6 +540,51 @@ impl Db { } Ok(()) } + + fn is_endpoint(&self, url: &url::Url) -> bool { + self.endpoints.iter().any(|e| e.path == url.path()) + } + + fn call_endpoint(&self, subject: &str, for_agent: &ForAgent) -> AtomicResult { + let url = url::Url::parse(subject)?; + + // Check if the subject matches one of the endpoints + for endpoint in self.endpoints.iter() { + if url.path() == endpoint.path { + // Not all Endpoints have a handle function. + // If there is none, return the endpoint plainly. + let response = if let Some(handle) = endpoint.handle { + // Call the handle function for the endpoint, if it exists. + let context: HandleGetContext = HandleGetContext { + subject: url, + store: self, + for_agent, + }; + (handle)(context).map_err(|e| { + format!("Error handling {} Endpoint: {}", endpoint.shortname, e) + })? + } else { + endpoint.to_resource_response(self)? + }; + + // Extended resources must always return the requested subject as their own subject + match response { + ResourceResponse::Resource(mut resource) => { + resource.set_subject(subject.into()); + return Ok(resource.into()); + } + ResourceResponse::ResourceWithReferenced(mut resource, references) => { + resource.set_subject(subject.into()); + return Ok(ResourceResponse::ResourceWithReferenced( + resource, references, + )); + } + } + } + } + + Err(format!("No endpoint found for {}", subject).into()) + } } impl Drop for Db { @@ -599,22 +682,19 @@ impl Storelike for Db { let mut transaction = Transaction::new(); // BEFORE APPLY COMMIT HANDLERS - // TODO: Move to something dynamic if let Some(resource_new) = &commit_response.resource_new { - let _resource_new_classes = resource_new.get_classes(store)?; - #[cfg(feature = "db")] - for class in &_resource_new_classes { - match class.subject.as_str() { - urls::COMMIT => { - return Err("Commits can not be edited or created directly.".into()) - } - urls::INVITE => crate::plugins::invite::before_apply_commit( + for extender in self.class_extenders.iter() { + if extender.resource_has_extender(resource_new)? { + let Some(handler) = extender.before_commit else { + continue; + }; + + (handler)(CommitExtenderContext { store, - &commit_response.commit, - resource_new, - )?, - _other => {} - }; + commit: &commit_response.commit, + resource: resource_new, + })?; + } } } @@ -665,19 +745,21 @@ impl Storelike for Db { // AFTER APPLY COMMIT HANDLERS // Commit has been checked and saved. // Here you can add side-effects, such as creating new Commits. - #[cfg(feature = "db")] if let Some(resource_new) = &commit_response.resource_new { - let _resource_new_classes = resource_new.get_classes(store)?; - #[cfg(feature = "db")] - for class in &_resource_new_classes { - match class.subject.as_str() { - urls::MESSAGE => crate::plugins::chatroom::after_apply_commit_message( + for extender in self.class_extenders.iter() { + if extender.resource_has_extender(resource_new)? { + use crate::class_extender::CommitExtenderContext; + + let Some(handler) = extender.after_commit else { + continue; + }; + + (handler)(CommitExtenderContext { store, - &commit_response.commit, - resource_new, - )?, - _other => {} - }; + commit: &commit_response.commit, + resource: resource_new, + })?; + } } } Ok(commit_response) @@ -702,14 +784,15 @@ impl Storelike for Db { #[instrument(skip(self))] fn get_resource(&self, subject: &str) -> AtomicResult { - let propvals = self.get_propvals(subject); - - match propvals { + match self.get_propvals(subject) { Ok(propvals) => { let resource = crate::resources::Resource::from_propvals(propvals, subject.into()); Ok(resource) } - Err(e) => self.handle_not_found(subject, e, None), + Err(e) => { + tracing::error!("Error getting resource: {:?}", e); + self.handle_not_found(subject, e, None) + } } } @@ -719,7 +802,7 @@ impl Storelike for Db { subject: &str, skip_dynamic: bool, for_agent: &ForAgent, - ) -> AtomicResult { + ) -> AtomicResult { let url_span = tracing::span!(tracing::Level::TRACE, "URL parse").entered(); // This might add a trailing slash let url = url::Url::parse(subject)?; @@ -737,98 +820,68 @@ impl Storelike for Db { url_span.exit(); let endpoint_span = tracing::span!(tracing::Level::TRACE, "Endpoint").entered(); - // Check if the subject matches one of the endpoints - for endpoint in self.endpoints.iter() { - if url.path() == endpoint.path { - // Not all Endpoints have a handle function. - // If there is none, return the endpoint plainly. - let mut resource = if let Some(handle) = endpoint.handle { - // Call the handle function for the endpoint, if it exists. - let context: HandleGetContext = HandleGetContext { - subject: url, - store: self, - for_agent, - }; - (handle)(context).map_err(|e| { - format!("Error handling {} Endpoint: {}", endpoint.shortname, e) - })? - } else { - endpoint.to_resource(self)? - }; - // Extended resources must always return the requested subject as their own subject - resource.set_subject(subject.into()); - return Ok(resource.to_owned()); - } + + // Check if the subject matches one of the endpoints, if so, call the endpoint. + if self.is_endpoint(&url) { + return self.call_endpoint(subject, for_agent); } + endpoint_span.exit(); let dynamic_span = tracing::span!(tracing::Level::TRACE, "get_resource_extended (dynamic)").entered(); + let mut resource = self.get_resource(&removed_query_params)?; let _explanation = crate::hierarchy::check_read(self, &resource, for_agent)?; - // Whether the resource has dynamic properties - let mut has_dynamic = false; // If a certain class needs to be extended, add it to this match statement - for class in resource.get_classes(self)? { - match class.subject.as_ref() { - crate::urls::COLLECTION => { - has_dynamic = true; - if !skip_dynamic { - resource = crate::collections::construct_collection_from_params( - self, - url.query_pairs(), - &mut resource, - for_agent, - )?; - } - } - crate::urls::INVITE => { - has_dynamic = true; - if !skip_dynamic { - resource = crate::plugins::invite::construct_invite_redirect( - self, - url.query_pairs(), - &mut resource, - for_agent, - )?; - } + for extender in self.class_extenders.iter() { + if extender.resource_has_extender(&resource)? { + if skip_dynamic { + // This lets clients know that the resource may have dynamic properties that are currently not included + resource.set( + crate::urls::INCOMPLETE.into(), + crate::Value::Boolean(true), + self, + )?; + + dynamic_span.exit(); + return Ok(resource.into()); } - crate::urls::DRIVE => { - has_dynamic = true; - if !skip_dynamic { - resource = crate::hierarchy::add_children(self, &mut resource)?; - } - } - crate::urls::CHATROOM => { - has_dynamic = true; - if !skip_dynamic { - resource = crate::plugins::chatroom::construct_chatroom( - self, - url.clone(), - &mut resource, - for_agent, - )?; + + if let Some(handler) = extender.on_resource_get { + let resource_response = (handler)(GetExtenderContext { + store: self, + url: &url, + db_resource: &mut resource, + for_agent, + })?; + + dynamic_span.exit(); + + // TODO: Check if we actually need this + // make sure the actual subject matches the one requested - It should not be changed in the logic above + match resource_response { + ResourceResponse::Resource(mut resource) => { + resource.set_subject(subject.into()); + return Ok(resource.into()); + } + ResourceResponse::ResourceWithReferenced(mut resource, referenced) => { + resource.set_subject(subject.into()); + + return Ok(ResourceResponse::ResourceWithReferenced( + resource, referenced, + )); + } } } - _ => {} } } - dynamic_span.exit(); - // make sure the actual subject matches the one requested - It should not be changed in the logic above resource.set_subject(subject.into()); - // This lets clients know that the resource may have dynamic properties that are currently not included - if has_dynamic && skip_dynamic { - resource.set( - crate::urls::INCOMPLETE.into(), - crate::Value::Boolean(true), - self, - )?; - } - Ok(resource) + Ok(resource.into()) } fn handle_commit(&self, commit_response: &CommitResponse) { @@ -882,8 +935,9 @@ impl Storelike for Db { for_agent, subject: subj_url, }; - let mut resource = fun(handle_post_context)?; + let mut resource = fun(handle_post_context)?.to_single(); resource.set_subject(subject.into()); + return Ok(resource); } } diff --git a/lib/src/db/encoding.rs b/lib/src/db/encoding.rs new file mode 100644 index 000000000..0abcae30a --- /dev/null +++ b/lib/src/db/encoding.rs @@ -0,0 +1,38 @@ +use rmp_serde::Serializer; +use serde::Serialize; + +use crate::{db::query_index::QueryFilter, errors::AtomicResult, resources::PropVals}; + +/// Encode PropVals to a message pack binary format +pub fn encode_propvals(propvals: &PropVals) -> AtomicResult> { + let bin = + rmp_serde::to_vec(&propvals).map_err(|e| format!("Could not serialize PropVals: {}", e))?; + + Ok(bin) +} + +/// Decode PropVals from a message pack binary format +pub fn decode_propvals(bin: &[u8]) -> AtomicResult { + let propvals: PropVals = + rmp_serde::from_slice(bin).map_err(|e| format!("Could not deserialize PropVals: {}", e))?; + + Ok(propvals) +} + +// Make QueryFilter serializable to message pack +impl super::query_index::QueryFilter { + pub fn encode(&self) -> AtomicResult> { + let mut query_filter_bin = Vec::new(); + self.serialize(&mut Serializer::new(&mut query_filter_bin)) + .map_err(|e| format!("Error encoding QueryFilter: {}", e))?; + + Ok(query_filter_bin) + } + + pub fn from_bytes(bytes: &[u8]) -> AtomicResult { + let query_filter: QueryFilter = rmp_serde::from_slice(bytes) + .map_err(|e| format!("Error decoding QueryFilter: {}", e))?; + + Ok(query_filter) + } +} diff --git a/lib/src/db/migrations.rs b/lib/src/db/migrations.rs index 610b0e6c4..e81790cae 100644 --- a/lib/src/db/migrations.rs +++ b/lib/src/db/migrations.rs @@ -12,7 +12,7 @@ Therefore, we need migrations to convert the old schema to the new one. - Update the Tree key used in [crate::db::trees] */ -use crate::{errors::AtomicResult, Db}; +use crate::{db::v1_types::propvals_v1_to_v2, errors::AtomicResult, Db}; /// Checks the current version(s) of the internal Store, and performs migrations if needed. pub fn migrate_maybe(store: &Db) -> AtomicResult<()> { @@ -21,12 +21,58 @@ pub fn migrate_maybe(store: &Db) -> AtomicResult<()> { // Add migrations for outdated Trees to this list "resources" => v0_to_v1(store)?, "reference_index" => ref_v0_to_v1(store)?, + "resources_v1" => resources_v1_to_v2(store)?, _other => {} } } Ok(()) } +fn resources_v1_to_v2(store: &Db) -> AtomicResult<()> { + tracing::warn!("Migrating resources from v1 to v2, this may take a while..."); + let old_key = "resources_v1"; + let old = store.db.open_tree(old_key)?; + + let new_key = "resources_v2"; + let new = store.db.open_tree(new_key)?; + + new.clear()?; + let mut count = 0; + + for item in old.into_iter() { + let (subject, propvals_bin) = item.expect("Unable to convert into interable"); + + let subject: String = + String::from_utf8(subject.to_vec()).expect("Unable to deserialize subject"); + let propvals: crate::db::v1_types::PropValsV1 = bincode1::deserialize(&propvals_bin) + .map_err(|e| format!("Migration Error: Failed to deserialize propvals: {}", e))?; + + let new_propvals = propvals_v1_to_v2(propvals); + + new.insert( + subject.as_bytes(), + rmp_serde::to_vec(&new_propvals) + .map_err(|e| format!("Migration Error: Failed to encode propvals: {}", e))?, + )?; + + count += 1; + } + + store.db.drop_tree(old_key).map_err(|e| { + tracing::error!("Migration Error: Failed to drop old tree: {}", e); + e + })?; + + tracing::info!("Finished migrating {} resources", count); + + tracing::info!("clearing index..."); + store.clear_index()?; + + store.build_index(true)?; + + Ok(()) +} + /// Change the subjects from `bincode` to `.as_bytes()` fn v0_to_v1(store: &Db) -> AtomicResult<()> { tracing::warn!("Migrating resources schema from v0 to v1..."); @@ -38,7 +84,7 @@ fn v0_to_v1(store: &Db) -> AtomicResult<()> { for item in old.into_iter() { let (subject, resource_bin) = item.expect("Unable to convert into iterable"); let subject: String = - bincode::deserialize(&subject).expect("Unable to deserialize subject"); + bincode1::deserialize(&subject).expect("Unable to deserialize subject"); new.insert(subject.as_bytes(), resource_bin)?; count += 1; } diff --git a/lib/src/db/query_index.rs b/lib/src/db/query_index.rs index 325b0da0c..574a79c55 100644 --- a/lib/src/db/query_index.rs +++ b/lib/src/db/query_index.rs @@ -35,17 +35,20 @@ impl QueryFilter { if self.property.is_none() && self.value.is_none() { return Err("Cannot watch a query without a property or value. These types of queries are not implemented. See https://github.com/atomicdata-dev/atomic-server/issues/548 ".into()); }; - store - .watched_queries - .insert(bincode::serialize(self)?, b"")?; + + let query_filter_bin = self.encode()?; + + store.watched_queries.insert(query_filter_bin, b"")?; Ok(()) } /// Check if this [QueryFilter] is being indexed pub fn is_watched(&self, store: &Db) -> bool { + let query_filter_bin = self.encode().expect("Failed to encode QueryFilter"); + store .watched_queries - .contains_key(bincode::serialize(self).unwrap()) + .contains_key(&query_filter_bin) .unwrap_or(false) } } @@ -123,7 +126,7 @@ pub fn query_sorted_indexed( if should_include_resource(q) { if let Ok(resource) = store.get_resource_extended(subject, true, &q.for_agent) { - resources.push(resource); + resources.push(resource.to_single()); subjects.push(subject.into()); } } else { @@ -265,8 +268,7 @@ pub fn check_if_atom_matches_watched_query_filters( for query in store.watched_queries.iter() { // The keys store all the data if let Ok((k, _v)) = query { - let q_filter = bincode::deserialize::(&k) - .map_err(|e| format!("Could not deserialize QueryFilter: {}", e))?; + let q_filter: QueryFilter = QueryFilter::from_bytes(&k)?; if let Some(prop) = should_update_property(&q_filter, index_atom, resource) { let update_val = match resource.get(prop) { @@ -276,7 +278,8 @@ pub fn check_if_atom_matches_watched_query_filters( update_indexed_member(&q_filter, &atom.subject, &update_val, delete, transaction)?; } } else { - return Err(format!("Can't deserialize collection index: {:?}", query).into()); + tracing::error!("Can't query collection index: {:?}", query); + break; } } Ok(()) @@ -327,7 +330,8 @@ pub fn create_query_index_key( value: Option<&SortableValue>, subject: Option<&str>, ) -> AtomicResult> { - let mut q_filter_bytes: Vec = bincode::serialize(query_filter)?; + let mut q_filter_bytes = query_filter.encode()?; + q_filter_bytes.push(SEPARATION_BIT); let mut value_bytes: Vec = if let Some(val) = value { @@ -342,6 +346,7 @@ pub fn create_query_index_key( } else { vec![0] }; + value_bytes.push(SEPARATION_BIT); let subject_bytes = if let Some(sub) = subject { @@ -363,19 +368,22 @@ pub fn parse_collection_members_key(bytes: &[u8]) -> AtomicResult<(QueryFilter, let value_bytes = iter.next().ok_or("No value_bytes")?; let subject_bytes = iter.next().ok_or("No value_bytes")?; - let q_filter: QueryFilter = bincode::deserialize(q_filter_bytes)?; + let q_filter: QueryFilter = QueryFilter::from_bytes(q_filter_bytes)?; + let value = if !value_bytes.is_empty() { std::str::from_utf8(value_bytes) .map_err(|e| format!("Can't parse value in members_key: {}", e))? } else { return Err("Can't parse value in members_key".into()); }; + let subject = if !subject_bytes.is_empty() { std::str::from_utf8(subject_bytes) .map_err(|e| format!("Can't parse subject in members_key: {}", e))? } else { return Err("Can't parse subject in members_key".into()); }; + Ok((q_filter, value, subject)) } diff --git a/lib/src/db/test.rs b/lib/src/db/test.rs index 7803c6cbf..37e918de6 100644 --- a/lib/src/db/test.rs +++ b/lib/src/db/test.rs @@ -70,12 +70,14 @@ fn populate_collections() { .get_resource_extended(&collections_collection_url, false, &ForAgent::Public) .unwrap(); let member_count = collections_resource + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() .unwrap(); assert!(member_count > 11); let nested = collections_resource + .to_single() .get(crate::urls::COLLECTION_INCLUDE_NESTED) .unwrap() .to_bool() @@ -100,6 +102,7 @@ fn destroy_resource_and_check_collection_and_commits() { agents_collection_1.to_json_ad().unwrap() ); let agents_collection_count_1 = agents_collection_1 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -115,6 +118,7 @@ fn destroy_resource_and_check_collection_and_commits() { .get_resource_extended(&commits_url, false, for_agent) .unwrap(); let commits_collection_count_1 = commits_collection_1 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -131,6 +135,7 @@ fn destroy_resource_and_check_collection_and_commits() { .get_resource_extended(&agents_url, false, for_agent) .unwrap(); let agents_collection_count_2 = agents_collection_2 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -144,6 +149,7 @@ fn destroy_resource_and_check_collection_and_commits() { .get_resource_extended(&commits_url, false, for_agent) .unwrap(); let commits_collection_count_2 = commits_collection_2 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -168,6 +174,7 @@ fn destroy_resource_and_check_collection_and_commits() { .get_resource_extended(&agents_url, false, for_agent) .unwrap(); let agents_collection_count_3 = agents_collection_3 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -181,6 +188,7 @@ fn destroy_resource_and_check_collection_and_commits() { .get_resource_extended(&commits_url, false, for_agent) .unwrap(); let commits_collection_count_3 = commits_collection_3 + .to_single() .get(crate::urls::COLLECTION_MEMBER_COUNT) .unwrap() .to_int() @@ -211,7 +219,8 @@ fn get_extended_resource_pagination() { let subject_with_page_size = format!("{}&page_size=1", subject); let resource = store .get_resource_extended(&subject_with_page_size, false, &ForAgent::Public) - .unwrap(); + .unwrap() + .to_single(); let cur_page = resource .get(urls::COLLECTION_CURRENT_PAGE) .unwrap() diff --git a/lib/src/db/trees.rs b/lib/src/db/trees.rs index 376ab57a8..cf68ae547 100644 --- a/lib/src/db/trees.rs +++ b/lib/src/db/trees.rs @@ -2,9 +2,9 @@ use crate::atoms::IndexAtom; use super::{prop_val_sub_index::propvalsub_key, val_prop_sub_index::valpropsub_key}; -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum Tree { - /// Full resources, Key: Subject, Value: [Resource](crate::Resource) + /// Full resources, Key: Subject, Value: [Propvals](crate::resources::PropVals) Resources, /// Stores the members of Collections, easily sortable. QueryMembers, @@ -13,12 +13,12 @@ pub enum Tree { /// Index sorted by {Property}-{Value}-{Subject}. /// Used for queries where the property is known. PropValSub, - /// Reference index, used for queries where the value (or one of the values, in case of an array) is but the subject is not. + /// Reference index, used for queries where the value (or one of the values, in case of an array) is known but the subject is not. /// Index sorted by {Value}-{Property}-{Subject}. ValPropSub, } -const RESOURCES: &str = "resources_v1"; +const RESOURCES: &str = "resources_v2"; const VALPROPSUB: &str = "reference_index_v1"; const QUERY_MEMBERS: &str = "members_index"; const PROPVALSUB: &str = "prop_val_sub_index"; @@ -49,14 +49,14 @@ impl AsRef<[u8]> for Tree { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum Method { Insert, Delete, } /// A single operation to be executed on the database. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Operation { pub tree: Tree, pub method: Method, diff --git a/lib/src/db/v1_types.rs b/lib/src/db/v1_types.rs new file mode 100644 index 000000000..df2375e54 --- /dev/null +++ b/lib/src/db/v1_types.rs @@ -0,0 +1,127 @@ +//! Copy of the old types needed for the migration to resources_v2. +//! These should never be used outside of the migration. + +use std::collections::{HashMap, HashSet}; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize)] +pub enum ValueV1 { + AtomicUrl(String), + Date(String), + Integer(i64), + Float(f64), + Markdown(String), + ResourceArray(Vec), + Slug(String), + String(String), + Timestamp(i64), + NestedResource(SubResourceV1), + Resource(Box), + Boolean(bool), + Unsupported(crate::values::UnsupportedValue), +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum SubResourceV1 { + Resource(Box), + Nested(PropValsV1), + Subject(String), +} + +pub type PropValsV1 = HashMap; + +#[derive(Debug, Serialize, Deserialize)] +pub struct QueryFilterV1 { + pub property: Option, + pub value: Option, + pub sort_by: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ResourceV1 { + propvals: PropValsV1, + subject: String, + commit: CommitBuilderV1, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CommitBuilderV1 { + subject: String, + set: std::collections::HashMap, + push: std::collections::HashMap, + remove: HashSet, + destroy: bool, + previous_commit: Option, +} + +use std::fmt; + +impl fmt::Display for ValueV1 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ValueV1::AtomicUrl(s) => write!(f, "{}", s), + ValueV1::Date(s) => write!(f, "{}", s), + ValueV1::Integer(i) => write!(f, "{}", i), + ValueV1::Float(float) => write!(f, "{}", float), + ValueV1::Markdown(i) => write!(f, "{}", i), + ValueV1::ResourceArray(_) => write!(f, "not implemented"), + ValueV1::Slug(s) => write!(f, "{}", s), + ValueV1::String(s) => write!(f, "{}", s), + ValueV1::Timestamp(i) => write!(f, "{}", i), + ValueV1::Resource(_) => write!(f, "not implemented"), + ValueV1::NestedResource(n) => write!(f, "{:?}", n), + ValueV1::Boolean(b) => write!(f, "{}", b), + ValueV1::Unsupported(u) => write!(f, "{}", u.value), + } + } +} + +pub fn propvals_v1_to_v2(propvals: PropValsV1) -> crate::resources::PropVals { + propvals.into_iter().map(|(k, v)| (k, v.into())).collect() +} + +impl From for crate::values::SubResource { + fn from(sub_resource: SubResourceV1) -> Self { + match sub_resource { + SubResourceV1::Resource(_resource) => panic!("ResourceV1 is not supported"), + SubResourceV1::Nested(propvals) => Self::Nested(propvals_v1_to_v2(propvals)), + SubResourceV1::Subject(subject) => Self::Subject(subject), + } + } +} + +impl From for crate::resources::Resource { + fn from(resource: ResourceV1) -> Self { + Self::from_propvals(propvals_v1_to_v2(resource.propvals), resource.subject) + } +} + +impl From for crate::values::Value { + fn from(value: ValueV1) -> Self { + match value { + crate::db::v1_types::ValueV1::AtomicUrl(v) => Self::AtomicUrl(v.clone()), + crate::db::v1_types::ValueV1::Date(v) => Self::Date(v.clone()), + crate::db::v1_types::ValueV1::Integer(v) => Self::Integer(v.clone()), + crate::db::v1_types::ValueV1::Float(v) => Self::Float(v.clone()), + crate::db::v1_types::ValueV1::Markdown(v) => Self::Markdown(v.clone()), + crate::db::v1_types::ValueV1::ResourceArray(sub_resource_v1s) => { + let sub_resources = sub_resource_v1s.into_iter().map(|v| v.into()).collect(); + Self::ResourceArray(sub_resources) + } + crate::db::v1_types::ValueV1::Slug(v) => Self::Slug(v.clone()), + crate::db::v1_types::ValueV1::String(v) => Self::String(v.clone()), + crate::db::v1_types::ValueV1::Timestamp(v) => Self::Timestamp(v.clone()), + crate::db::v1_types::ValueV1::NestedResource(sub_resource_v1) => { + Self::NestedResource(sub_resource_v1.into()) + } + crate::db::v1_types::ValueV1::Resource(_resource_v1) => { + panic!("ResourceV1 is not supported") + } + crate::db::v1_types::ValueV1::Boolean(v) => Self::Boolean(v), + crate::db::v1_types::ValueV1::Unsupported(unsupported_value) => { + Self::Unsupported(unsupported_value) + } + } + } +} diff --git a/lib/src/endpoints.rs b/lib/src/endpoints.rs index 859ad737d..9a0ef1efd 100644 --- a/lib/src/endpoints.rs +++ b/lib/src/endpoints.rs @@ -4,14 +4,15 @@ //! See https://docs.atomicdata.dev/endpoints.html or https://atomicdata.dev/classes/Endpoint use crate::{ - agents::ForAgent, errors::AtomicResult, plugins, urls, Db, Resource, Storelike, Value, + agents::ForAgent, errors::AtomicResult, storelike::ResourceResponse, urls, Db, Resource, + Storelike, Value, }; -/// The function that is called when a POST request matches the path -type HandleGet = fn(context: HandleGetContext) -> AtomicResult; - /// The function that is called when a GET request matches the path -type HandlePost = fn(context: HandlePostContext) -> AtomicResult; +type HandleGet = fn(context: HandleGetContext) -> AtomicResult; + +/// The function that is called when a POST request matches the path +type HandlePost = fn(context: HandlePostContext) -> AtomicResult; /// Passed to an Endpoint GET request handler. #[derive(Debug)] @@ -72,22 +73,9 @@ impl Endpoint { )?; Ok(resource) } -} -pub fn default_endpoints() -> Vec { - vec![ - plugins::versioning::version_endpoint(), - plugins::versioning::all_versions_endpoint(), - plugins::path::path_endpoint(), - plugins::search::search_endpoint(), - plugins::files::upload_endpoint(), - plugins::files::download_endpoint(), - plugins::export::export_endpoint(), - #[cfg(feature = "html")] - plugins::bookmark::bookmark_endpoint(), - plugins::importer::import_endpoint(), - plugins::query::query_endpoint(), - #[cfg(debug_assertions)] - plugins::prunetests::prune_tests_endpoint(), - ] + pub fn to_resource_response(&self, store: &impl Storelike) -> AtomicResult { + let resource = self.to_resource(store)?; + Ok(resource.into()) + } } diff --git a/lib/src/errors.rs b/lib/src/errors.rs index abe324fa3..f7523a3bf 100644 --- a/lib/src/errors.rs +++ b/lib/src/errors.rs @@ -263,14 +263,3 @@ impl From for AtomicError { } } } - -#[cfg(feature = "db")] -impl From> for AtomicError { - fn from(error: Box) -> Self { - AtomicError { - message: error.to_string(), - subject: None, - error_type: AtomicErrorType::OtherError, - } - } -} diff --git a/lib/src/hierarchy.rs b/lib/src/hierarchy.rs index 0eb1f92d6..4c9d87ed5 100644 --- a/lib/src/hierarchy.rs +++ b/lib/src/hierarchy.rs @@ -4,7 +4,7 @@ use core::fmt; -use crate::{agents::ForAgent, errors::AtomicResult, storelike::Query, urls, Resource, Storelike}; +use crate::{agents::ForAgent, errors::AtomicResult, urls, Resource, Storelike}; #[derive(Debug)] pub enum Right { @@ -30,15 +30,6 @@ impl fmt::Display for Right { } } -/// Looks for children relations, adds to the resource. Performs a Query, might be expensive. -pub fn add_children(store: &impl Storelike, resource: &mut Resource) -> AtomicResult { - let results = store.query(&Query::new_prop_val(urls::PARENT, resource.get_subject()))?; - let mut children = results.subjects; - children.sort(); - resource.set(urls::CHILDREN.into(), children.into(), store)?; - Ok(resource.to_owned()) -} - /// Throws if not allowed. /// Returns string with explanation if allowed. pub fn check_write( diff --git a/lib/src/lib.rs b/lib/src/lib.rs index 4238fcb8e..e47af78e4 100644 --- a/lib/src/lib.rs +++ b/lib/src/lib.rs @@ -61,6 +61,8 @@ assert!(fetched_new_resource.get_shortname("description", &store).unwrap().to_st pub mod agents; pub mod atoms; pub mod authentication; +#[cfg(feature = "db")] +pub mod class_extender; pub mod client; pub mod collections; pub mod commit; diff --git a/lib/src/parse.rs b/lib/src/parse.rs index 08af5d4c9..fa84bbe0e 100644 --- a/lib/src/parse.rs +++ b/lib/src/parse.rs @@ -76,21 +76,7 @@ pub fn parse_json_ad_resource( parse_opts: &ParseOpts, ) -> AtomicResult { let json: Map = serde_json::from_str(string)?; - json_ad_object_to_resource(json, store, parse_opts) -} - -/// Parses a JSON-AD object, converts it to an Atomic Resource -#[tracing::instrument(skip(store))] -fn json_ad_object_to_resource( - json: Map, - store: &impl crate::Storelike, - parse_opts: &ParseOpts, -) -> AtomicResult { - match parse_json_ad_map_to_resource(json, store, parse_opts)? { - SubResource::Resource(r) => Ok(*r), - SubResource::Nested(_) => Err("It's a nested Resource, no @id found".into()), - SubResource::Subject(_) => Err("It's a string, not a nested resource".into()), - } + parse_json_ad_map_to_resource(json, store, None, parse_opts) } fn object_is_property(object: &serde_json::Value) -> bool { @@ -186,7 +172,7 @@ pub fn parse_json_ad_string( for item in arr { match item { serde_json::Value::Object(obj) => { - let resource = json_ad_object_to_resource(obj, store, parse_opts) + let resource = parse_json_ad_map_to_resource(obj, store, None, parse_opts) .map_err(|e| format!("Unable to process resource in array. {}", e))?; vec.push(resource); } @@ -199,7 +185,7 @@ pub fn parse_json_ad_string( } } serde_json::Value::Object(obj) => vec.push( - json_ad_object_to_resource(obj, store, parse_opts) + parse_json_ad_map_to_resource(obj, store, None, parse_opts) .map_err(|e| format!("Unable to parse object. {}", e))?, ), _other => return Err("Root JSON element must be an object or array.".into()), @@ -221,50 +207,252 @@ pub fn parse_json_ad_commit_resource( .get(urls::SUBJECT) .ok_or("No subject field in Commit.")? .to_string(); + + // Incoming commits do not have an @id field, we generate that from the signature. let subject = format!("{}/commits/{}", store.get_server_url()?, signature); - let mut resource = Resource::new(subject); - let propvals = match parse_json_ad_map_to_resource(json, store, &ParseOpts::default())? { - SubResource::Resource(r) => r.into_propvals(), - SubResource::Nested(pv) => pv, - SubResource::Subject(_) => { - return Err("Commit resource is a string, should be a resource.".into()) + + let resource = + parse_json_ad_map_to_resource(json, store, Some(subject), &ParseOpts::default())?; + + Ok(resource) +} + +/// Converts a string to a URL (subject), check for localid +fn try_to_subject(subject: &str, prop: &str, parse_opts: &ParseOpts) -> AtomicResult { + if check_valid_url(subject).is_ok() { + Ok(subject.into()) + } else if let Some(importer) = &parse_opts.importer { + Ok(generate_id_from_local_id(importer, subject)) + } else { + Err(AtomicError::parse_error( + &format!("Unable to parse string as URL: {}", subject), + None, + Some(prop), + )) + } +} + +fn parse_anonymous_resource( + map: &Map, + subject: Option<&str>, + store: &impl crate::Storelike, + parse_opts: &ParseOpts, +) -> AtomicResult { + let mut propvals = PropVals::new(); + + for (prop, val) in map { + if prop == "@id" || prop == urls::LOCAL_ID { + return Err(AtomicError::parse_error( + "`@id` and `localId` are not allowed in anonymous resources", + subject.as_deref(), + Some(prop), + )); } - }; - for (prop, val) in propvals { - resource.set(prop, val, store)?; + + let (updated_key, atomic_val) = parse_propval(prop, val, subject, store, parse_opts)?; + propvals.insert(updated_key.to_string(), atomic_val); } - Ok(resource) + + Ok(propvals) +} + +fn parse_propval( + key: &str, + val: &serde_json::Value, + subject: Option<&str>, + store: &impl crate::Storelike, + parse_opts: &ParseOpts, +) -> AtomicResult<(String, Value)> { + let prop = try_to_subject(&key, &key, parse_opts)?; + let property = store.get_property(&prop)?; + + let atomic_val: Value = match property.data_type { + DataType::AtomicUrl => { + match val { + serde_json::Value::String(str) => { + // If the value is not a valid URL, and we have an importer, we can generate_id_from_local_id + let url = try_to_subject(&str, &prop, parse_opts)?; + Value::new(&url, &property.data_type)? + } + serde_json::Value::Object(map) => { + let propvals = parse_anonymous_resource(&map, subject, store, parse_opts)?; + Value::NestedResource(SubResource::Nested(propvals)) + } + _ => { + return Err(AtomicError::parse_error( + "Invalid value for AtomicUrl, not a string or object", + subject.as_deref(), + Some(&prop), + )); + } + } + } + DataType::ResourceArray => { + let serde_json::Value::Array(array) = val else { + return Err(AtomicError::parse_error( + "Invalid value for ResourceArray, not an array", + subject.as_deref(), + Some(&prop), + )); + }; + + let mut newvec: Vec = Vec::new(); + for item in array { + match item { + serde_json::Value::String(str) => { + let url = try_to_subject(&str, &prop, parse_opts)?; + newvec.push(SubResource::Subject(url)) + } + // If it's an Object, it can be either an anonymous or a full resource. + serde_json::Value::Object(map) => { + let propvals = parse_anonymous_resource(&map, subject, store, parse_opts)?; + newvec.push(SubResource::Nested(propvals)) + } + err => { + return Err(AtomicError::parse_error( + &format!("Found non-string item in resource array: {err}."), + subject.as_deref(), + Some(&prop), + )) + } + } + } + Value::ResourceArray(newvec) + } + DataType::String => { + let serde_json::Value::String(str) = val else { + return Err(AtomicError::parse_error( + "Invalid value for String, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::String(str.clone()) + } + DataType::Slug => { + let serde_json::Value::String(str) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Slug, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&str, &DataType::Slug)? + } + DataType::Markdown => { + let serde_json::Value::String(str) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Markdown, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&str, &DataType::Markdown)? + } + DataType::Uri => { + let serde_json::Value::String(str) = val else { + return Err(AtomicError::parse_error( + "Invalid value for URI, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&str, &DataType::Uri)? + } + DataType::Date => { + let serde_json::Value::String(str) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Date, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&str, &DataType::Date)? + } + DataType::Boolean => { + let serde_json::Value::Bool(bool) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Boolean, not a boolean", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&bool.to_string(), &DataType::Boolean)? + } + DataType::Integer => { + let serde_json::Value::Number(num) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Integer, not a number", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&num.to_string(), &DataType::Integer)? + } + DataType::Float => { + let serde_json::Value::Number(num) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Float, not a number", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&num.to_string(), &DataType::Float)? + } + DataType::Timestamp => { + let serde_json::Value::Number(num) = val else { + return Err(AtomicError::parse_error( + "Invalid value for Timestamp, not a string", + subject.as_deref(), + Some(&prop), + )); + }; + + Value::new(&num.to_string(), &DataType::Timestamp)? + } + DataType::JSON => Value::JSON(val.clone()), + DataType::Unsupported(s) => { + return Err(AtomicError::parse_error( + &format!("Unsupported datatype: {s}"), + subject.as_deref(), + Some(&prop), + )); + } + }; + + Ok((prop, atomic_val)) } /// Parse a single Json AD string, convert to Atoms -/// Does not match all props to datatypes, so it could result in invalid data. /// Adds to the store if `add` is true. #[tracing::instrument(skip(store))] fn parse_json_ad_map_to_resource( json: Map, store: &impl crate::Storelike, + overwrite_subject: Option, parse_opts: &ParseOpts, -) -> AtomicResult { +) -> AtomicResult { let mut propvals = PropVals::new(); - let mut subject: Option = None; - - // Converts a string to a URL (subject), check for localid - let try_to_subject = |s: &str, prop: &str| -> AtomicResult { - if check_valid_url(s).is_ok() { - Ok(s.into()) - } else if let Some(importer) = &parse_opts.importer { - Ok(generate_id_from_local_id(importer, s)) - } else { - Err(AtomicError::parse_error( - &format!("Unable to parse string as URL: {}", s), - None, - Some(prop), - )) - } - }; + let mut subject = overwrite_subject.clone(); - for (mut prop, val) in json { + for (prop, val) in json { if prop == "@id" { + if overwrite_subject.is_some() { + return Err(AtomicError::parse_error( + "`@id` is not allowed in a resource with server generated subject.", + subject.as_deref(), + Some(&prop), + )); + } + subject = if let serde_json::Value::String(s) = val { check_valid_url(&s).map_err(|e| { AtomicError::parse_error( @@ -282,91 +470,42 @@ fn parse_json_ad_map_to_resource( )); }; continue; - } - - prop = try_to_subject(&prop, &prop)?; - - let atomic_val = match val { - serde_json::Value::Null => { + } else if prop == urls::LOCAL_ID && parse_opts.importer.is_some() { + if overwrite_subject.is_some() { return Err(AtomicError::parse_error( - "Null not allowed in JSON-AD", + "`@id` is not allowed in a resource with server generated subject.", subject.as_deref(), Some(&prop), )); } - serde_json::Value::Bool(bool) => Value::Boolean(bool), - serde_json::Value::Number(num) => { - let property = store.get_property(&prop)?; - // Also converts numbers to strings, not sure what to think about this. - // Does not result in invalid atomic data, but does allow for weird inputs - Value::new(&num.to_string(), &property.data_type)? - } - serde_json::Value::String(str) => { - // LocalIDs are mapped to @ids by appending the `localId` to the `importer`'s `parent`. - if prop == urls::LOCAL_ID && parse_opts.importer.is_some() { - let parent = parse_opts.importer.as_ref() - .ok_or_else(|| AtomicError::parse_error( - "Encountered `localId`, which means we need a `parent` in the parsing options.", - subject.as_deref(), - Some(&prop), - ))?; - subject = Some(generate_id_from_local_id(parent, &str)); - } - let property = store.get_property(&prop).map_err(|e| { - AtomicError::parse_error( - &format!("Unable to find property {prop}: {e}"), - subject.as_deref(), - Some(&prop), - ) - })?; - match property.data_type { - DataType::AtomicUrl => { - // If the value is not a valid URL, and we have an importer, we can generate_id_from_local_id - let url = try_to_subject(&str, &prop)?; - Value::new(&url, &property.data_type)? - } - other => Value::new(&str.to_string(), &other).map_err(|e| { - AtomicError::parse_error( - &format!("Unable to parse value for prop {prop}: {e}. Value: {str}"), - subject.as_deref(), - Some(&prop), - ) - })?, - } - } - // In Atomic Data, all arrays are Resource Arrays which are serialized JSON things. - // Maybe this step could be simplified? Just serialize to string? - serde_json::Value::Array(arr) => { - let mut newvec: Vec = Vec::new(); - for v in arr { - match v { - serde_json::Value::String(str) => { - let url = try_to_subject(&str, &prop)?; - newvec.push(SubResource::Subject(url)) - } - // If it's an Object, it can be either an anonymous or a full resource. - serde_json::Value::Object(map) => { - let propvals = parse_json_ad_map_to_resource(map, store, parse_opts)?; - newvec.push(propvals) - } - err => { - return Err(AtomicError::parse_error( - &format!("Found non-string item in resource array: {err}."), - subject.as_deref(), - Some(&prop), - )) - } - } - } - Value::ResourceArray(newvec) - } - serde_json::Value::Object(map) => { - Value::NestedResource(parse_json_ad_map_to_resource(map, store, parse_opts)?) - } - }; + // If the property is a localId we need to set to generate a subject and update the subject value. + let serde_json::Value::String(local_id) = val else { + return Err(AtomicError::parse_error( + "`localId` must be a string", + Some(&val.to_string()), + Some(&prop), + )); + }; + + let parent = parse_opts.importer.as_ref().ok_or_else(|| { + AtomicError::parse_error( + "Encountered `localId`, which means we need a `parent` in the parsing options.", + subject.as_deref(), + Some(&prop), + ) + })?; + + subject = Some(generate_id_from_local_id(parent, &local_id)); + + continue; + } + + let (new_key, atomic_val) = + parse_propval(&prop, &val, subject.as_deref(), store, parse_opts)?; + // Some of these values are _not correctly matched_ to the datatype. - propvals.insert(prop, atomic_val); + propvals.insert(new_key, atomic_val); } // if there is no parent set, we set it to the Importer if let Some(importer) = &parse_opts.importer { @@ -374,65 +513,72 @@ fn parse_json_ad_map_to_resource( propvals.insert(urls::PARENT.into(), Value::AtomicUrl(importer.into())); } } - if let Some(subj) = { subject } { - let r = match &parse_opts.save { - SaveOpts::DontSave => { - let mut r = Resource::new(subj); - r.set_propvals_unsafe(propvals); - r - } - SaveOpts::Save => { - let mut r = Resource::new(subj); - r.set_propvals_unsafe(propvals); - store.add_resource(&r)?; - r - } - SaveOpts::Commit => { - let mut r = if let Ok(orig) = store.get_resource(&subj) { - // If the resource already exists, and overwrites outside are not permitted, and it does not have the importer as parent... - // Then we throw! - // Because this would enable malicious users to overwrite resources that they shouldn't. - if !parse_opts.overwrite_outside { - let importer = parse_opts.importer.as_deref().unwrap(); - if !orig.has_parent(store, importer) { - Err( + + // If there is no subject, we return the propvals as a nested resource + let Some(subj) = subject else { + return Err(AtomicError::parse_error( + "No @id or localId found in resource", + None, + None, + )); + }; + + let r = match &parse_opts.save { + SaveOpts::DontSave => { + let mut r = Resource::new(subj); + r.set_propvals_unsafe(propvals); + r + } + SaveOpts::Save => { + let mut r = Resource::new(subj); + r.set_propvals_unsafe(propvals); + store.add_resource(&r)?; + r + } + SaveOpts::Commit => { + let mut r = if let Ok(orig) = store.get_resource(&subj) { + // If the resource already exists, and overwrites outside are not permitted, and it does not have the importer as parent... + // Then we throw! + // Because this would enable malicious users to overwrite resources that they shouldn't. + if !parse_opts.overwrite_outside { + let importer = parse_opts.importer.as_deref().unwrap(); + if !orig.has_parent(store, importer) { + Err( format!("Cannot overwrite {subj} outside of importer! Enable `overwrite_outside`"), )? - } - }; - orig - } else { - Resource::new(subj) - }; - for (prop, val) in propvals { - r.set(prop, val, store)?; - } - let signer = parse_opts - .signer - .clone() - .ok_or("No agent to sign Commit with. Either pass a `for_agent` or ")?; - let commit = r.get_commit_builder().clone().sign(&signer, store, &r)?; - let opts = CommitOpts { - validate_schema: true, - validate_signature: true, - validate_timestamp: false, - validate_rights: parse_opts.for_agent != ForAgent::Sudo, - validate_previous_commit: false, - validate_for_agent: Some(parse_opts.for_agent.to_string()), - update_index: true, + } }; - - store - .apply_commit(commit, &opts) - .map_err(|e| format!("Failed to save {}: {}", r.get_subject(), e))? - .resource_new - .unwrap() + orig + } else { + Resource::new(subj) + }; + for (prop, val) in propvals { + r.set(prop, val, store)?; } - }; - Ok(r.into()) - } else { - Ok(SubResource::Nested(propvals)) - } + let signer = parse_opts + .signer + .clone() + .ok_or("No agent to sign Commit with. Either pass a `for_agent` or ")?; + let commit = r.get_commit_builder().clone().sign(&signer, store, &r)?; + + let opts = CommitOpts { + validate_schema: true, + validate_signature: true, + validate_timestamp: false, + validate_rights: parse_opts.for_agent != ForAgent::Sudo, + validate_previous_commit: false, + validate_for_agent: Some(parse_opts.for_agent.to_string()), + update_index: true, + }; + + store + .apply_commit(commit, &opts) + .map_err(|e| format!("Failed to save {}: {}", r.get_subject(), e))? + .resource_new + .unwrap() + } + }; + Ok(r.into()) } fn generate_id_from_local_id(importer_subject: &str, local_id: &str) -> String { @@ -484,7 +630,7 @@ mod test { #[test] // This test should actually fail, I think, because the datatype should match the property. - // #[should_panic(expected = "Datatype")] + #[should_panic(expected = "Invalid value for Markdown")] fn parse_and_serialize_json_ad_wrong_datatype_int_to_str() { let store = crate::Store::init().unwrap(); store.populate().unwrap(); @@ -536,29 +682,7 @@ mod test { } #[test] - fn parse_nested_resource_map_roundtrip() { - let store = crate::Store::init().unwrap(); - store.populate().unwrap(); - - let json = r#"{ - "@id": "https://atomicdata.dev/thingWithNestedMaps", - "https://atomicdata.dev/properties/classtype": "https://atomicdata.dev/linkedThing", - "https://atomicdata.dev/properties/datatype": { - "https://atomicdata.dev/properties/name": "Anonymous nested resource" - }, - "https://atomicdata.dev/properties/parent": { - "@id": "https://atomicdata.dev/nestedThing", - "https://atomicdata.dev/properties/name": "Named Nested Resource" - } - }"#; - let parsed = parse_json_ad_resource(json, &store, &ParseOpts::default()).unwrap(); - let serialized = parsed.to_json_ad().unwrap(); - println!("{}", serialized); - assert_eq!(json.replace(' ', ""), serialized.replace(' ', "")); - } - - #[test] - fn parse_nested_resource_array() { + fn parser_should_error_when_encountering_nested_resource() { let store = crate::Store::init().unwrap(); store.populate().unwrap(); @@ -575,18 +699,8 @@ mod test { "https://atomicdata.dev/classes/ThirdThing" ] }"#; - let parsed = parse_json_ad_resource(json, &store, &ParseOpts::default()).unwrap(); - let members = parsed - .get(urls::COLLECTION_MEMBERS) - .unwrap() - .to_subjects(Some("https://atomicdata.dev/classes https://atomicdata.dev/properties/collection/members".into())) - .unwrap(); - let should_be = vec![ - "https://atomicdata.dev/classes/FirstThing", - "https://atomicdata.dev/classes https://atomicdata.dev/properties/collection/members 1", - "https://atomicdata.dev/classes/ThirdThing", - ]; - assert_eq!(members, should_be); + let parsed = parse_json_ad_resource(json, &store, &ParseOpts::default()); + assert!(parsed.is_err(), "Subresource with @id should have errored"); } fn create_store_and_importer() -> (crate::Store, String) { @@ -626,7 +740,52 @@ mod test { let found = store.get_resource(&imported_subject).unwrap(); println!("{:?}", found); assert_eq!(found.get(urls::NAME).unwrap().to_string(), "My resource"); - assert_eq!(found.get(urls::LOCAL_ID).unwrap().to_string(), local_id); + + // LocalId should be removed from the imported resource + assert_eq!(found.get(urls::LOCAL_ID).is_err(), true); + } + #[test] + fn import_resource_with_json() { + let (store, importer) = create_store_and_importer(); + + let local_id = "my-local-id"; + + let json = r#" + [ + { + "@id": "http://localhost:9883/01k06n9cz8r8vsdehh4btz8tdk", + "https://atomicdata.dev/properties/datatype": "https://atomicdata.dev/datatypes/json", + "https://atomicdata.dev/properties/description": "Een prop met een json value", + "https://atomicdata.dev/properties/isA": [ + "https://atomicdata.dev/classes/Property" + ], + "https://atomicdata.dev/properties/shortname": "nieuwe-json-prop" + }, { + "https://atomicdata.dev/properties/localId": "my-local-id", + "https://atomicdata.dev/properties/name": "My resource", + "http://localhost:9883/01k06n9cz8r8vsdehh4btz8tdk": { + "wat": "patat" + } + } + ]"#; + + let parse_opts = ParseOpts { + save: SaveOpts::Commit, + signer: Some(store.get_default_agent().unwrap()), + for_agent: ForAgent::Sudo, + overwrite_outside: false, + importer: Some(importer.clone()), + }; + + store.import(json, &parse_opts).unwrap(); + + let imported_subject = generate_id_from_local_id(&importer, local_id); + + let found = store.get_resource(&imported_subject).unwrap(); + assert_eq!(found.get(urls::NAME).unwrap().to_string(), "My resource"); + + // LocalId should be removed from the imported resource + assert_eq!(found.get(urls::LOCAL_ID).is_err(), true); } #[test] @@ -739,7 +898,7 @@ mod test { let json = r#"[ { "https://atomicdata.dev/properties/localId": "test1", - "newprop": "val" + "https://atomicdata.dev/properties/name": "val" }, { "https://atomicdata.dev/properties/localId": "test2" diff --git a/lib/src/plugins/bookmark.rs b/lib/src/plugins/bookmark.rs index ffe6861cf..543516193 100644 --- a/lib/src/plugins/bookmark.rs +++ b/lib/src/plugins/bookmark.rs @@ -14,6 +14,7 @@ use crate::{ client::fetch_body, endpoints::{Endpoint, HandleGetContext}, errors::AtomicResult, + storelike::ResourceResponse, urls, values::Value, AtomicError, Resource, @@ -32,7 +33,7 @@ pub fn bookmark_endpoint() -> Endpoint { } } -fn handle_bookmark_request(context: HandleGetContext) -> AtomicResult { +fn handle_bookmark_request(context: HandleGetContext) -> AtomicResult { let HandleGetContext { subject, store, @@ -54,7 +55,7 @@ fn handle_bookmark_request(context: HandleGetContext) -> AtomicResult let (name, path) = match (name, path) { (Some(name), Some(path)) => (name, path), - _ => return bookmark_endpoint().to_resource(store), + _ => return bookmark_endpoint().to_resource_response(store), }; let mut resource = Resource::new(subject.to_string()); @@ -91,7 +92,7 @@ fn handle_bookmark_request(context: HandleGetContext) -> AtomicResult resource.set(urls::PREVIEW.into(), Value::Markdown(md.into()), store)?; - Ok(resource) + Ok(ResourceResponse::Resource(resource)) } fn fetch_data(url: &str) -> AtomicResult { diff --git a/lib/src/plugins/chatroom.rs b/lib/src/plugins/chatroom.rs index 45352abb7..a79985027 100644 --- a/lib/src/plugins/chatroom.rs +++ b/lib/src/plugins/chatroom.rs @@ -5,22 +5,26 @@ They list a bunch of Messages. */ use crate::{ - agents::ForAgent, + class_extender::{ClassExtender, CommitExtenderContext, GetExtenderContext}, commit::{CommitBuilder, CommitOpts}, errors::AtomicResult, - storelike::Query, + storelike::{Query, QueryResult, ResourceResponse}, urls::{self, PARENT}, - utils, Resource, Storelike, Value, + utils, + values::SubResource, + Storelike, Value, }; // Find the messages for the ChatRoom -#[tracing::instrument(skip(store))] -pub fn construct_chatroom( - store: &impl Storelike, - url: url::Url, - resource: &mut Resource, - for_agent: &ForAgent, -) -> AtomicResult { +#[tracing::instrument(skip(context))] +pub fn construct_chatroom(context: GetExtenderContext) -> AtomicResult { + let GetExtenderContext { + store, + url, + db_resource: resource, + for_agent, + } = context; + // TODO: From range let mut start_val = utils::now(); for (k, v) in url.query_pairs() { @@ -47,11 +51,15 @@ pub fn construct_chatroom( for_agent: for_agent.clone(), }; - let mut messages_unfiltered = store.query(&query_children)?.resources; + let QueryResult { + mut subjects, + resources, + count, + } = store.query(&query_children)?; // An attempt at creating a `next_page` URL on the server. But to be honest, it's probably better to do this in the front-end. - if messages_unfiltered.len() > page_limit { - let last_subject = messages_unfiltered + if count > page_limit { + let last_subject = resources .last() .ok_or("There are more messages than the page limit")? .get_subject(); @@ -69,23 +77,29 @@ pub fn construct_chatroom( } // Clients expect messages to appear from old to new - messages_unfiltered.reverse(); + subjects.reverse(); + + resource.set(urls::MESSAGES.into(), subjects.into(), store)?; - resource.set(urls::MESSAGES.into(), messages_unfiltered.into(), store)?; - Ok(resource.to_owned()) + Ok(ResourceResponse::ResourceWithReferenced( + resource.to_owned(), + resources, + )) } /// Update the ChatRoom with the new message, make sure this is sent to all Subscribers -#[tracing::instrument(skip(store))] -pub fn after_apply_commit_message( - store: &impl Storelike, - _commit: &crate::Commit, - resource_new: &Resource, -) -> AtomicResult<()> { +#[tracing::instrument(skip(context))] +pub fn after_apply_commit_message(context: CommitExtenderContext) -> AtomicResult<()> { + let CommitExtenderContext { + store, + commit: applied_commit, + resource, + } = context; + // only update the ChatRoom for _new_ messages, not for edits - if _commit.previous_commit.is_none() { + if applied_commit.previous_commit.is_none() { // Get the related ChatRoom - let parent_subject = resource_new + let parent_subject = resource .get(urls::PARENT) .map_err(|_e| "Message must have a Parent!")? .to_string(); @@ -98,9 +112,14 @@ pub fn after_apply_commit_message( let chat_room = store.get_resource(&parent_subject)?; let mut commit_builder = CommitBuilder::new(parent_subject); - let new_message = crate::values::SubResource::Resource(Box::new(resource_new.to_owned())); - commit_builder.push_propval(urls::MESSAGES, new_message)?; + + commit_builder.push_propval( + urls::MESSAGES, + SubResource::Subject(resource.get_subject().to_string()), + )?; + let commit = commit_builder.sign(&store.get_default_agent()?, store, &chat_room)?; + let resp = commit.validate_and_build_response(&CommitOpts::no_validations_no_index(), store)?; @@ -108,3 +127,21 @@ pub fn after_apply_commit_message( } Ok(()) } + +pub fn build_chatroom_extender() -> ClassExtender { + ClassExtender { + class: urls::CHATROOM.to_string(), + on_resource_get: Some(construct_chatroom), + before_commit: None, + after_commit: None, + } +} + +pub fn build_message_extender() -> ClassExtender { + ClassExtender { + class: urls::MESSAGE.to_string(), + on_resource_get: None, + before_commit: None, + after_commit: Some(after_apply_commit_message), + } +} diff --git a/lib/src/plugins/collections.rs b/lib/src/plugins/collections.rs new file mode 100644 index 000000000..0fa1d9e06 --- /dev/null +++ b/lib/src/plugins/collections.rs @@ -0,0 +1,24 @@ +use crate::{ + class_extender::{ClassExtender, GetExtenderContext}, + collections::construct_collection_from_params, + errors::AtomicResult, + storelike::ResourceResponse, + urls, +}; + +pub fn build_collection_extender() -> ClassExtender { + ClassExtender { + class: urls::COLLECTION.to_string(), + on_resource_get: Some(|context| -> AtomicResult { + let GetExtenderContext { + store, + url, + db_resource: resource, + for_agent, + } = context; + construct_collection_from_params(store, url.query_pairs(), resource, for_agent) + }), + before_commit: None, + after_commit: None, + } +} diff --git a/lib/src/plugins/importer.rs b/lib/src/plugins/importer.rs index e333e9295..625912080 100644 --- a/lib/src/plugins/importer.rs +++ b/lib/src/plugins/importer.rs @@ -4,9 +4,10 @@ Importers allow users to (periodically) import JSON-AD files from a remote sourc use crate::{ agents::ForAgent, - endpoints::{Endpoint, HandlePostContext}, + endpoints::{Endpoint, HandleGetContext, HandlePostContext}, errors::AtomicResult, - urls, Resource, Storelike, + storelike::ResourceResponse, + urls, Storelike, }; pub fn import_endpoint() -> Endpoint { @@ -25,9 +26,13 @@ pub fn import_endpoint() -> Endpoint { } } +pub fn handle_get(context: HandleGetContext) -> AtomicResult { + import_endpoint().to_resource_response(context.store) +} + /// When an importer is shown, we list a bunch of Parameters and a list of previously imported items. #[tracing::instrument] -pub fn handle_post(context: HandlePostContext) -> AtomicResult { +pub fn handle_post(context: HandlePostContext) -> AtomicResult { let HandlePostContext { store, body, @@ -89,5 +94,5 @@ pub fn handle_post(context: HandlePostContext) -> AtomicResult { ); } - import_endpoint().to_resource(context.store) + import_endpoint().to_resource_response(context.store) } diff --git a/lib/src/plugins/invite.rs b/lib/src/plugins/invite.rs index a7b95c1b3..89a7df3a1 100644 --- a/lib/src/plugins/invite.rs +++ b/lib/src/plugins/invite.rs @@ -1,21 +1,26 @@ use crate::{ - agents::{Agent, ForAgent}, + agents::Agent, + class_extender::{ClassExtender, CommitExtenderContext, GetExtenderContext}, errors::AtomicResult, + storelike::ResourceResponse, urls, utils::check_valid_url, Resource, Storelike, Value, }; /// If there is a valid Agent in the correct query param, and the invite is valid, update the rights and respond with a redirect to the target resource -#[tracing::instrument(skip(store, query_params))] -pub fn construct_invite_redirect( - store: &impl Storelike, - query_params: url::form_urlencoded::Parse, - invite_resource: &mut Resource, - // Not used for invite redirects, invites are always public - for_agent: &ForAgent, -) -> AtomicResult { - let requested_subject = invite_resource.get_subject().to_string(); +#[tracing::instrument(skip(context))] +pub fn construct_invite_redirect(context: GetExtenderContext) -> AtomicResult { + let GetExtenderContext { + store, + url, + db_resource, + for_agent: _, + } = context; + + let query_params = url.query_pairs(); + + let requested_subject = db_resource.get_subject().to_string(); let mut pub_key = None; let mut invite_agent = None; for (k, v) in query_params { @@ -28,7 +33,7 @@ pub fn construct_invite_redirect( // Check if there is either a publicKey or an Agent present in the request. Either one is needed to continue accepting the invite. let agent = match (pub_key, invite_agent) { - (None, None) => return Ok(invite_resource.to_owned()), + (None, None) => return Ok(db_resource.to_owned().into()), (None, Some(agent_url)) => agent_url, (Some(public_key), None) => { let new_agent = Agent::new_from_public_key(store, &public_key)?; @@ -51,18 +56,18 @@ pub fn construct_invite_redirect( }; // If there are write or read rights - let write = if let Ok(bool) = invite_resource.get(urls::WRITE_BOOL) { + let write = if let Ok(bool) = db_resource.get(urls::WRITE_BOOL) { bool.to_bool()? } else { false }; - let target = &invite_resource + let target = &db_resource .get(urls::TARGET) .map_err(|e| { format!( "Invite {} does not have a target. {}", - invite_resource.get_subject(), + db_resource.get_subject(), e ) })? @@ -73,7 +78,7 @@ pub fn construct_invite_redirect( .map_err(|_| format!("Target for invite does not exist: {}", target))?; // If any usages left value is present, make sure it's a positive number and decrement it by 1. - if let Ok(usages_left) = invite_resource.get(urls::USAGES_LEFT) { + if let Ok(usages_left) = db_resource.get(urls::USAGES_LEFT) { let num = usages_left.to_int()?; if num == 0 { return Err("No usages left for this invite".into()); @@ -81,14 +86,15 @@ pub fn construct_invite_redirect( // Since the requested subject might have query params, we don't want to overwrite that one - we want to overwrite the clean resource. let mut url = url::Url::parse(&requested_subject)?; url.set_query(None); - invite_resource.set_subject(url.to_string()); - invite_resource.set(urls::USAGES_LEFT.into(), Value::Integer(num - 1), store)?; - invite_resource + + db_resource.set_subject(url.to_string()); + db_resource.set(urls::USAGES_LEFT.into(), Value::Integer(num - 1), store)?; + db_resource .save_locally(store) .map_err(|e| format!("Unable to save updated Invite. {}", e))?; } - if let Ok(expires) = invite_resource.get(urls::EXPIRES_AT) { + if let Ok(expires) = db_resource.get(urls::EXPIRES_AT) { if expires.to_int()? > crate::utils::now() { return Err("Invite is no longer valid".into()); } @@ -110,7 +116,7 @@ pub fn construct_invite_redirect( let mut redirect = Resource::new_instance(urls::REDIRECT, store)?; redirect.set( urls::DESTINATION.into(), - invite_resource.get(urls::TARGET)?.to_owned(), + db_resource.get(urls::TARGET)?.to_owned(), store, )?; redirect.set( @@ -120,7 +126,7 @@ pub fn construct_invite_redirect( )?; // The front-end requires the @id to be the same as requested redirect.set_subject(requested_subject); - Ok(redirect) + Ok(redirect.into()) } /// Adds the requested rights to the target resource. @@ -148,15 +154,28 @@ pub fn add_rights( } /// Check if the creator has rights to invite people (= write) to the target resource -pub fn before_apply_commit( - store: &impl Storelike, - commit: &crate::Commit, - resource_new: &Resource, -) -> AtomicResult<()> { - let target = resource_new +pub fn before_apply_commit(context: CommitExtenderContext) -> AtomicResult<()> { + let CommitExtenderContext { + store, + commit, + resource, + } = context; + + let target = resource .get(urls::TARGET) .map_err(|_e| "Invite does not have required Target attribute")?; + let target_resource = store.get_resource(&target.to_string())?; + crate::hierarchy::check_write(store, &target_resource, &commit.signer.clone().into())?; Ok(()) } + +pub fn build_invite_extender() -> ClassExtender { + ClassExtender { + class: urls::INVITE.to_string(), + on_resource_get: Some(construct_invite_redirect), + before_commit: Some(before_apply_commit), + after_commit: None, + } +} diff --git a/lib/src/plugins/mod.rs b/lib/src/plugins/mod.rs index 9f383bf51..626a103e8 100644 --- a/lib/src/plugins/mod.rs +++ b/lib/src/plugins/mod.rs @@ -41,9 +41,11 @@ pub mod invite; // Endpoints #[cfg(feature = "html")] pub mod bookmark; +pub mod collections; pub mod export; pub mod files; pub mod path; +pub mod plugins; pub mod prunetests; pub mod query; pub mod search; diff --git a/lib/src/plugins/path.rs b/lib/src/plugins/path.rs index 8f06f0922..252f290bc 100644 --- a/lib/src/plugins/path.rs +++ b/lib/src/plugins/path.rs @@ -1,6 +1,7 @@ use crate::{ endpoints::{Endpoint, HandleGetContext}, errors::AtomicResult, + storelike::ResourceResponse, urls, Resource, Storelike, }; @@ -16,7 +17,7 @@ pub fn path_endpoint() -> Endpoint { } #[tracing::instrument] -fn handle_path_request(context: HandleGetContext) -> AtomicResult { +fn handle_path_request(context: HandleGetContext) -> AtomicResult { let HandleGetContext { store, for_agent, @@ -30,7 +31,7 @@ fn handle_path_request(context: HandleGetContext) -> AtomicResult { }; } if path.is_none() { - return path_endpoint().to_resource(store); + return path_endpoint().to_resource_response(store); } let result = store.get_path(&path.unwrap(), None, for_agent)?; match result { @@ -42,7 +43,8 @@ fn handle_path_request(context: HandleGetContext) -> AtomicResult { resource.set_string(urls::ATOM_SUBJECT.into(), &atom.subject, store)?; resource.set_string(urls::ATOM_PROPERTY.into(), &atom.property, store)?; resource.set_string(urls::ATOM_VALUE.into(), &atom.value.to_string(), store)?; - Ok(resource) + + Ok(ResourceResponse::Resource(resource)) } } } diff --git a/lib/src/plugins/plugins.rs b/lib/src/plugins/plugins.rs new file mode 100644 index 000000000..e93519805 --- /dev/null +++ b/lib/src/plugins/plugins.rs @@ -0,0 +1,28 @@ +use crate::{class_extender::ClassExtender, endpoints::Endpoint}; + +pub fn default_class_extenders() -> Vec { + vec![ + crate::plugins::collections::build_collection_extender(), + crate::plugins::invite::build_invite_extender(), + crate::plugins::chatroom::build_chatroom_extender(), + crate::plugins::chatroom::build_message_extender(), + ] +} + +pub fn default_endpoints() -> Vec { + vec![ + crate::plugins::versioning::version_endpoint(), + crate::plugins::versioning::all_versions_endpoint(), + crate::plugins::path::path_endpoint(), + crate::plugins::search::search_endpoint(), + crate::plugins::files::upload_endpoint(), + crate::plugins::files::download_endpoint(), + crate::plugins::export::export_endpoint(), + #[cfg(feature = "html")] + crate::plugins::bookmark::bookmark_endpoint(), + crate::plugins::importer::import_endpoint(), + crate::plugins::query::query_endpoint(), + #[cfg(debug_assertions)] + crate::plugins::prunetests::prune_tests_endpoint(), + ] +} diff --git a/lib/src/plugins/prunetests.rs b/lib/src/plugins/prunetests.rs index bc93c153f..2cb42102a 100644 --- a/lib/src/plugins/prunetests.rs +++ b/lib/src/plugins/prunetests.rs @@ -3,7 +3,7 @@ use tracing::info; use crate::{ endpoints::{Endpoint, HandleGetContext, HandlePostContext}, errors::AtomicResult, - storelike::Query, + storelike::{Query, ResourceResponse}, urls, Resource, Storelike, Value, }; @@ -18,12 +18,12 @@ pub fn prune_tests_endpoint() -> Endpoint { } } -pub fn handle_get(context: HandleGetContext) -> AtomicResult { - prune_tests_endpoint().to_resource(context.store) +pub fn handle_get(context: HandleGetContext) -> AtomicResult { + prune_tests_endpoint().to_resource_response(context.store) } // Delete all drives with 'testdrive-' in their name. (These drive are generated with each e2e test run) -fn handle_prune_tests_request(context: HandlePostContext) -> AtomicResult { +fn handle_prune_tests_request(context: HandlePostContext) -> AtomicResult { let HandlePostContext { store, .. } = context; let mut query = Query::new_class(urls::DRIVE); @@ -59,7 +59,8 @@ fn handle_prune_tests_request(context: HandlePostContext) -> AtomicResult AtomicResult { diff --git a/lib/src/plugins/query.rs b/lib/src/plugins/query.rs index 0481a3ca7..116ccceb4 100644 --- a/lib/src/plugins/query.rs +++ b/lib/src/plugins/query.rs @@ -1,6 +1,7 @@ use crate::{ endpoints::{Endpoint, HandleGetContext}, errors::AtomicResult, + storelike::ResourceResponse, urls, Resource, }; @@ -26,7 +27,7 @@ pub fn query_endpoint() -> Endpoint { } } -fn handle_query_request(context: HandleGetContext) -> AtomicResult { +fn handle_query_request(context: HandleGetContext) -> AtomicResult { let HandleGetContext { subject, store, @@ -34,13 +35,16 @@ fn handle_query_request(context: HandleGetContext) -> AtomicResult { } = context; if subject.query_pairs().into_iter().next().is_none() { - return query_endpoint().to_resource(store); + return query_endpoint().to_resource_response(store); } + let mut resource = Resource::new(subject.to_string()); - crate::collections::construct_collection_from_params( + let collection_resource_response = crate::collections::construct_collection_from_params( store, subject.query_pairs(), &mut resource, for_agent, - ) + )?; + + Ok(collection_resource_response) } diff --git a/lib/src/plugins/search.rs b/lib/src/plugins/search.rs index ce3cf8a8a..8bb2e4b61 100644 --- a/lib/src/plugins/search.rs +++ b/lib/src/plugins/search.rs @@ -1,7 +1,8 @@ use crate::{ endpoints::{Endpoint, HandleGetContext}, errors::AtomicResult, - urls, Resource, + storelike::ResourceResponse, + urls, }; // Note that the actual logic of this endpoint resides in `atomic-server`, as it depends on the Actix runtime. @@ -20,7 +21,7 @@ pub fn search_endpoint() -> Endpoint { } } -fn handle_search(context: HandleGetContext) -> AtomicResult { +fn handle_search(context: HandleGetContext) -> AtomicResult { let HandleGetContext { subject, store, @@ -28,7 +29,7 @@ fn handle_search(context: HandleGetContext) -> AtomicResult { } = context; let params = subject.query_pairs(); if params.into_iter().next().is_none() { - return search_endpoint().to_resource(store); + return search_endpoint().to_resource_response(store); } return Err( "Search endpoint is only available through HTTP requests, not through webhooks".into(), diff --git a/lib/src/plugins/versioning.rs b/lib/src/plugins/versioning.rs index 20692a403..52d27243d 100644 --- a/lib/src/plugins/versioning.rs +++ b/lib/src/plugins/versioning.rs @@ -5,7 +5,7 @@ use crate::{ collections::CollectionBuilder, endpoints::{Endpoint, HandleGetContext}, errors::AtomicResult, - storelike::Query, + storelike::{Query, ResourceResponse}, urls, AtomicError, Commit, Resource, Storelike, }; @@ -33,7 +33,7 @@ pub fn all_versions_endpoint() -> Endpoint { } #[tracing::instrument] -fn handle_version_request(context: HandleGetContext) -> AtomicResult { +fn handle_version_request(context: HandleGetContext) -> AtomicResult { let params = context.subject.query_pairs(); let mut commit_url = None; for (k, v) in params { @@ -42,15 +42,15 @@ fn handle_version_request(context: HandleGetContext) -> AtomicResult { }; } if commit_url.is_none() { - return version_endpoint().to_resource(context.store); + return version_endpoint().to_resource_response(context.store); } let mut resource = construct_version(&commit_url.unwrap(), context.store, context.for_agent)?; resource.set_subject(context.subject.to_string()); - Ok(resource) + Ok(ResourceResponse::Resource(resource)) } #[tracing::instrument] -fn handle_all_versions_request(context: HandleGetContext) -> AtomicResult { +fn handle_all_versions_request(context: HandleGetContext) -> AtomicResult { let HandleGetContext { store, for_agent, @@ -64,7 +64,7 @@ fn handle_all_versions_request(context: HandleGetContext) -> AtomicResult AtomicResult>>()?; collection.members = new_members; - collection.to_resource(store) + + let resource_response = collection.to_resource(store)?; + Ok(resource_response) } /// Searches the local store for all commits with this subject, returns sorted from old to new. diff --git a/lib/src/populate.rs b/lib/src/populate.rs index a33f54e0d..da752e25e 100644 --- a/lib/src/populate.rs +++ b/lib/src/populate.rs @@ -251,19 +251,19 @@ pub fn populate_default_store(store: &impl Storelike) -> AtomicResult<()> { .map_err(|e| format!("Failed to import default_store.json: {e}"))?; store .import( - include_str!("../defaults/chatroom.json",), + include_str!("../defaults/chatroom.json"), &ParseOpts::default(), ) .map_err(|e| format!("Failed to import chatroom.json: {e}"))?; store .import( - include_str!("../defaults/table.json",), + include_str!("../defaults/table.json"), &ParseOpts::default(), ) .map_err(|e| format!("Failed to import table.json: {e}"))?; store .import( - include_str!("../defaults/ontologies.json",), + include_str!("../defaults/ontologies.json"), &ParseOpts::default(), ) .map_err(|e| format!("Failed to import ontologies.json: {e}"))?; @@ -290,7 +290,7 @@ pub fn populate_collections(store: &impl Storelike) -> AtomicResult<()> { /// Adds default Endpoints (versioning) to the Db. /// Makes sure they are fetchable pub fn populate_endpoints(store: &crate::Db) -> AtomicResult<()> { - let endpoints = crate::endpoints::default_endpoints(); + let endpoints = crate::plugins::plugins::default_endpoints(); let endpoints_collection = format!("{}/endpoints", store.get_server_url()?); for endpoint in endpoints { let mut resource = endpoint.to_resource(store)?; diff --git a/lib/src/resources.rs b/lib/src/resources.rs index 7a19157d5..6c2c9a24e 100644 --- a/lib/src/resources.rs +++ b/lib/src/resources.rs @@ -545,14 +545,15 @@ impl Resource { serde_json::to_string_pretty(&obj).map_err(|_| "Could not serialize to JSON-LD".into()) } + pub fn to_atoms_iter(&self) -> impl Iterator + '_ { + self.propvals.iter().map(|(property, value)| { + Atom::new(self.subject.to_string(), property.clone(), value.clone()) + }) + } + #[instrument(skip_all)] pub fn to_atoms(&self) -> Vec { - let mut atoms: Vec = Vec::new(); - for (property, value) in self.propvals.iter() { - let atom = Atom::new(self.subject.to_string(), property.clone(), value.clone()); - atoms.push(atom); - } - atoms + self.to_atoms_iter().collect() } #[instrument(skip_all)] @@ -561,12 +562,73 @@ impl Resource { pub fn to_n_triples(&self, store: &impl Storelike) -> AtomicResult { crate::serialize::atoms_to_ntriples(self.to_atoms(), store) } + + pub fn vec_to_json_ad(resources: &Vec) -> AtomicResult { + let str = resources + .iter() + .map(|r| r.to_json_ad()) + .collect::>>()? + .join(","); + + Ok(format!("[{}]", str)) + } + + pub fn vec_to_json(resources: &Vec, store: &impl Storelike) -> AtomicResult { + let str = resources + .iter() + .map(|r| r.to_json(store)) + .collect::>>()? + .join(","); + + Ok(format!("[{}]", str)) + } + + pub fn vec_to_json_ld( + resources: &Vec, + store: &impl Storelike, + ) -> AtomicResult { + let str = resources + .iter() + .map(|r| r.to_json_ld(store)) + .collect::>>()? + .join(","); + + Ok(format!("[{}]", str)) + } + + pub fn vec_to_atoms(resources: &Vec) -> Vec { + let mut atoms = Vec::new(); + + for resource in resources { + atoms.extend(resource.to_atoms_iter()); + } + + atoms + } + + pub fn vec_to_n_triples( + resources: &Vec, + store: &impl Storelike, + ) -> AtomicResult { + let atoms = Self::vec_to_atoms(resources); + crate::serialize::atoms_to_ntriples(atoms, store) + } +} + +impl From for crate::storelike::ResourceResponse { + fn from(resource: Resource) -> Self { + crate::storelike::ResourceResponse::Resource(resource) + } +} + +impl From<&Resource> for crate::storelike::ResourceResponse { + fn from(resource: &Resource) -> Self { + crate::storelike::ResourceResponse::Resource(resource.clone()) + } } #[cfg(test)] mod test { - use ntest::assert_panics; - use super::*; use crate::{test_utils::init_store, urls}; diff --git a/lib/src/serialize.rs b/lib/src/serialize.rs index a171482f0..62f363d63 100644 --- a/lib/src/serialize.rs +++ b/lib/src/serialize.rs @@ -32,16 +32,12 @@ fn val_to_serde(value: Value) -> AtomicResult { Value::Integer(val) => serde_json::from_str(&val.to_string()).unwrap_or_default(), Value::Float(val) => serde_json::from_str(&val.to_string()).unwrap_or_default(), Value::Markdown(val) => SerdeValue::String(val), + Value::Uri(val) => SerdeValue::String(val), + Value::JSON(val) => val, Value::ResourceArray(val) => { let mut vec: Vec = Vec::new(); for resource in val { match resource { - crate::values::SubResource::Resource(r) => { - vec.push(crate::serialize::propvals_to_json_ad_map( - r.get_propvals(), - Some(r.get_subject().clone()), - )?); - } crate::values::SubResource::Nested(pv) => { vec.push(crate::serialize::propvals_to_json_ad_map(&pv, None)?); } @@ -59,16 +55,11 @@ fn val_to_serde(value: Value) -> AtomicResult { Value::Boolean(val) => SerdeValue::Bool(val), // TODO: fix this for nested resources in json and json-ld serialization, because this will cause them to fall back to json-ad Value::NestedResource(res) => match res { - crate::values::SubResource::Resource(r) => crate::serialize::propvals_to_json_ad_map( - r.get_propvals(), - Some(r.get_subject().clone()), - )?, crate::values::SubResource::Nested(propvals) => { propvals_to_json_ad_map(&propvals, None)? } crate::values::SubResource::Subject(s) => SerdeValue::String(s), }, - Value::Resource(_) => todo!(), }; Ok(json_val) } diff --git a/lib/src/store.rs b/lib/src/store.rs index 2fb308360..206a7dd1c 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -189,6 +189,11 @@ impl Storelike for Store { if let Some(resource) = self.hashmap.lock().unwrap().get(subject) { return Ok(resource.clone()); } + + if let Ok(resource) = self.fetch_resource(subject, self.get_default_agent().ok().as_ref()) { + return Ok(resource); + }; + self.handle_not_found( subject, "Not found in HashMap.".into(), @@ -243,7 +248,7 @@ impl Storelike for Store { // These nested resources are not fully calculated - they will be presented as -is match self.get_resource_extended(subject, true, &q.for_agent) { Ok(resource) => { - resources.push(resource); + resources.push(resource.to_single()); } Err(e) => match &e.error_type { crate::AtomicErrorType::NotFoundError => {} diff --git a/lib/src/storelike.rs b/lib/src/storelike.rs index 533a82c7e..12160a509 100644 --- a/lib/src/storelike.rs +++ b/lib/src/storelike.rs @@ -7,6 +7,7 @@ use crate::{ hierarchy, schema::{Class, Property}, urls, + values::SubResource, }; use crate::{errors::AtomicResult, parse::parse_json_ad_string}; use crate::{mapping::Mapping, values::Value, Atom, Resource}; @@ -17,6 +18,109 @@ pub enum PathReturn { Atom(Box), } +pub enum ResourceResponse { + Resource(Resource), + ResourceWithReferenced(Resource, Vec), +} + +impl ResourceResponse { + /// Only take the main resource, discard any referenced resources. + pub fn to_single(&self) -> Resource { + match self { + ResourceResponse::Resource(resource) => resource.clone(), + ResourceResponse::ResourceWithReferenced(resource, _) => resource.clone(), + } + } + + pub fn to_json_ad(&self) -> AtomicResult { + match self { + ResourceResponse::Resource(resource) => Ok(resource.to_json_ad()?), + ResourceResponse::ResourceWithReferenced(resource, references) => { + let mut list = references.clone(); + list.push(resource.clone()); + Ok(Resource::vec_to_json_ad(&list)?) + } + } + } + + pub fn to_json(&self, store: &impl Storelike) -> AtomicResult { + match self { + ResourceResponse::Resource(resource) => Ok(resource.to_json(store)?), + ResourceResponse::ResourceWithReferenced(resource, references) => { + let mut list = references.clone(); + list.push(resource.clone()); + Ok(Resource::vec_to_json(&list, store)?) + } + } + } + + pub fn to_json_ld(&self, store: &impl Storelike) -> AtomicResult { + match self { + ResourceResponse::Resource(resource) => Ok(resource.to_json_ld(store)?), + ResourceResponse::ResourceWithReferenced(resource, references) => { + let mut list = references.clone(); + list.push(resource.clone()); + Ok(Resource::vec_to_json_ld(&list, store)?) + } + } + } + + pub fn to_atoms(&self) -> Vec { + match self { + ResourceResponse::Resource(resource) => resource.to_atoms(), + ResourceResponse::ResourceWithReferenced(resource, references) => { + let mut list = references.clone(); + list.push(resource.clone()); + Resource::vec_to_atoms(&list) + } + } + } + + pub fn to_n_triples(&self, store: &impl Storelike) -> AtomicResult { + match self { + ResourceResponse::Resource(resource) => Ok(resource.to_n_triples(store)?), + ResourceResponse::ResourceWithReferenced(resource, references) => { + let mut list = references.clone(); + list.push(resource.clone()); + Ok(Resource::vec_to_n_triples(&list, store)?) + } + } + } + + /// Takes a vector of resources and returns a ResourceResponse::ResourceWithReferenced + /// If the main subject is not found it will Error + pub fn from_vec(main_subject: &str, vec: Vec) -> AtomicResult { + if vec.len() == 0 { + return Err("No resources found".into()); + } + if vec.len() == 1 { + return Ok(ResourceResponse::Resource(vec[0].clone())); + } + + let mut resource: Option = None; + let mut referenced = Vec::new(); + + for r in vec { + if r.get_subject() == main_subject { + resource = Some(r); + } else { + referenced.push(r); + } + } + + let Some(resource) = resource else { + return Err(AtomicError::not_found(format!( + "Resource with subject {} not found", + main_subject + ))); + }; + + Ok(ResourceResponse::ResourceWithReferenced( + resource, referenced, + )) + } +} + pub type ResourceCollection = Vec; /// Storelike provides many useful methods for interacting with an Atomic Store. @@ -152,9 +256,23 @@ pub trait Storelike: Sized { subject: &str, client_agent: Option<&Agent>, ) -> AtomicResult { - let resource: Resource = crate::client::fetch_resource(subject, self, client_agent)?; - self.add_resource_opts(&resource, true, true, true)?; - Ok(resource) + let response = crate::client::fetch_resource(subject, self, client_agent)?; + + match response { + ResourceResponse::Resource(resource) => { + self.add_resource_opts(&resource, true, true, true)?; + + Ok(resource) + } + ResourceResponse::ResourceWithReferenced(resource, referenced) => { + self.add_resource_opts(&resource, true, true, true)?; + for r in referenced { + self.add_resource_opts(&r, true, true, true)?; + } + + Ok(resource) + } + } } /// Performs a full-text search on the Server's /search endpoint. @@ -168,9 +286,18 @@ pub trait Storelike: Sized { let subject = crate::client::search::build_search_subject(&server_url, query, opts); let resource = self.fetch_resource(&subject, self.get_default_agent().ok().as_ref())?; let results: Vec = match resource.get(urls::ENDPOINT_RESULTS) { - Ok(Value::ResourceArray(vec)) => { - vec.iter().cloned().map(|r| r.try_into().unwrap()).collect() - } + Ok(Value::ResourceArray(vec)) => vec + .iter() + .filter_map(|s| match s { + SubResource::Subject(result_subject) => { + match self.get_resource(result_subject) { + Ok(r) => Some(r), + Err(err) => Some(err.into_resource(subject.clone())), + } + } + SubResource::Nested(_) => None, + }) + .collect(), _ => return Err("No 'ENDPOINT_RESULTS' in response from server.".into()), }; Ok(results) @@ -223,11 +350,11 @@ pub trait Storelike: Sized { subject: &str, skip_dynamic: bool, for_agent: &ForAgent, - ) -> AtomicResult { + ) -> AtomicResult { let _ignore = skip_dynamic; let resource = self.get_resource(subject)?; hierarchy::check_read(self, &resource, for_agent)?; - Ok(resource) + Ok(resource.into()) } /// This function is called whenever a Commit is applied. @@ -290,7 +417,9 @@ pub trait Storelike: Sized { // The URL of the next resource let mut subject = id_url; // Set the currently selectred resource parent, which starts as the root of the search - let mut resource = self.get_resource_extended(&subject, false, for_agent)?; + let mut resource = self + .get_resource_extended(&subject, false, for_agent)? + .to_single(); // During each of the iterations of the loop, the scope changes. // Try using pathreturn... let mut current: PathReturn = PathReturn::Subject(subject.clone()); @@ -324,7 +453,9 @@ pub trait Storelike: Sized { ))? .to_string(); subject = url; - resource = self.get_resource_extended(&subject, false, for_agent)?; + resource = self + .get_resource_extended(&subject, false, for_agent)? + .to_single(); current = PathReturn::Subject(subject.clone()); continue; } diff --git a/lib/src/urls.rs b/lib/src/urls.rs index 7ba27457f..cc821d733 100644 --- a/lib/src/urls.rs +++ b/lib/src/urls.rs @@ -142,6 +142,8 @@ pub const RESOURCE_ARRAY: &str = "https://atomicdata.dev/datatypes/resourceArray pub const BOOLEAN: &str = "https://atomicdata.dev/datatypes/boolean"; pub const DATE: &str = "https://atomicdata.dev/datatypes/date"; pub const TIMESTAMP: &str = "https://atomicdata.dev/datatypes/timestamp"; +pub const URI: &str = "https://atomicdata.dev/datatypes/uri"; +pub const JSON: &str = "https://atomicdata.dev/datatypes/json"; // Methods pub const INSERT: &str = "https://atomicdata.dev/methods/insert"; diff --git a/lib/src/utils.rs b/lib/src/utils.rs index d40fb1096..1c5d99e05 100644 --- a/lib/src/utils.rs +++ b/lib/src/utils.rs @@ -27,6 +27,18 @@ pub fn check_valid_url(url: &str) -> AtomicResult<()> { Ok(()) } +pub fn check_valid_uri(uri: &str) -> AtomicResult<()> { + url::Url::parse(uri).map_err(|e| format!("Invalid URI: {}. {}", uri, e))?; + Ok(()) +} + +pub fn check_valid_json(json: &str) -> AtomicResult<()> { + let _: serde_json::Value = + serde_json::from_str(json).map_err(|e| format!("Invalid JSON: {}. {}", json, e))?; + + Ok(()) +} + /// Returns the current timestamp in milliseconds since UNIX epoch pub fn now() -> i64 { std::time::SystemTime::now() diff --git a/lib/src/values.rs b/lib/src/values.rs index e177701e2..8ce897cc0 100644 --- a/lib/src/values.rs +++ b/lib/src/values.rs @@ -1,8 +1,11 @@ //! A value is the part of an Atom that contains the actual information. use crate::{ - datatype::match_datatype, datatype::DataType, errors::AtomicResult, resources::PropVals, - utils::check_valid_url, Resource, + datatype::{match_datatype, DataType}, + errors::AtomicResult, + resources::PropVals, + utils::{check_valid_uri, check_valid_url}, + Resource, }; use regex::Regex; use serde::{Deserialize, Serialize}; @@ -23,15 +26,15 @@ pub enum Value { /// Unix Epoch datetime in milliseconds Timestamp(i64), NestedResource(SubResource), - Resource(Box), Boolean(bool), + Uri(String), + JSON(serde_json::Value), Unsupported(UnsupportedValue), } /// A resource in a JSON-AD body can be any of these #[derive(Clone, Debug, Serialize, Deserialize)] pub enum SubResource { - Resource(Box), // I was considering using Resources for these, but that would involve // storing the paths in both the NestedResource as well as its parent // context, which could produce inconsistencies. @@ -39,18 +42,6 @@ pub enum SubResource { Subject(String), } -// try convert subresource into resource -impl TryInto for SubResource { - type Error = String; - - fn try_into(self) -> Result { - match self { - SubResource::Resource(r) => Ok(*r.clone()), - _ => Err("SubResource is not a Resource".into()), - } - } -} - /// When the Datatype of a Value is not handled by this library #[derive(Clone, Debug, Serialize, Deserialize)] pub struct UnsupportedValue { @@ -91,8 +82,9 @@ impl Value { Value::Timestamp(_) => DataType::Timestamp, // TODO: these datatypes are not the same Value::NestedResource(_) => DataType::AtomicUrl, - Value::Resource(_) => DataType::AtomicUrl, Value::Boolean(_) => DataType::Boolean, + Value::Uri(_) => DataType::Uri, + Value::JSON(_) => DataType::JSON, Value::Unsupported(s) => DataType::Unsupported(s.datatype.clone()), } } @@ -126,6 +118,14 @@ impl Value { check_valid_url(value)?; Ok(Value::AtomicUrl(value.into())) } + DataType::Uri => { + check_valid_uri(value)?; + Ok(Value::Uri(value.into())) + } + DataType::JSON => { + let json: serde_json::Value = serde_json::from_str(value)?; + Ok(Value::JSON(json)) + } DataType::ResourceArray => { let vector: Vec = crate::parse::parse_json_array(value).map_err(|e| { format!("Could not deserialize ResourceArray: {}. Should be a JSON array of strings. {}", &value, e) @@ -185,7 +185,6 @@ impl Value { arr.iter() .enumerate() .for_each(|(i, r)| match r.to_owned() { - SubResource::Resource(e) => vec.push(e.get_subject().into()), SubResource::Nested(_e) => { let path_base = if let Some(p) = &parent_path { p.to_string() @@ -206,10 +205,6 @@ impl Value { // TODO: change the data model of nested resources to store the subject of the parent, so we can construct a path Err("Can't convert nested resources to subjects.".into()) } - Value::Resource(r) => { - vec.push(r.get_subject().into()); - Ok(vec) - } other => Err(format!("Value {} is not a Resource Array, but {}", self, other).into()), } } @@ -255,7 +250,6 @@ impl Value { Value::ResourceArray(_v) => self.to_subjects(None).unwrap_or_else(|_| vec![]), Value::AtomicUrl(v) => vec![v.into()], // TODO We don't index nested resources for now - Value::Resource(_r) => return None, Value::NestedResource(_r) => return None, // This might result in unnecessarily long strings, sometimes. We may want to shorten them later. val => vec![val.to_string()], @@ -318,7 +312,6 @@ impl From> for Value { impl From for Value { fn from(val: SubResource) -> Self { match val { - SubResource::Resource(r) => r.into(), SubResource::Nested(n) => n.into(), SubResource::Subject(s) => s.into(), } @@ -343,28 +336,6 @@ impl From for Value { } } -impl From for Value { - fn from(val: Resource) -> Self { - Value::Resource(Box::new(val)) - } -} - -impl From> for Value { - fn from(val: Box) -> Self { - Value::Resource((*val).into()) - } -} - -impl From> for Value { - fn from(val: Vec) -> Self { - let mut vec = Vec::new(); - for i in val { - vec.push(SubResource::Resource(Box::new(i))); - } - Value::ResourceArray(vec) - } -} - use std::fmt; impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -385,14 +356,10 @@ impl fmt::Display for Value { Value::Slug(s) => write!(f, "{}", s), Value::String(s) => write!(f, "{}", s), Value::Timestamp(i) => write!(f, "{}", i), - Value::Resource(r) => write!( - f, - "{}", - r.to_json_ad() - .unwrap_or_else(|_e| format!("Could not serialize resource: {:?}", r)) - ), Value::NestedResource(n) => write!(f, "{:?}", n), Value::Boolean(b) => write!(f, "{}", b), + Value::Uri(s) => write!(f, "{}", s), + Value::JSON(s) => write!(f, "{}", s), Value::Unsupported(u) => write!(f, "{}", u.value), } } @@ -403,12 +370,6 @@ impl fmt::Display for SubResource { let mut s: String = String::new(); match self { - SubResource::Resource(r) => { - s.push_str( - &r.to_json_ad() - .unwrap_or_else(|_e| format!("Could not serialize resource: {:?}", r)), - ); - } SubResource::Nested(pv) => { let serialized = crate::serialize::propvals_to_json_ad_map(pv, None) .unwrap_or_else(|_e| { @@ -442,7 +403,7 @@ impl From for SubResource { impl From for SubResource { fn from(val: Resource) -> Self { - SubResource::Resource(Box::new(val)) + SubResource::Subject(val.get_subject().into()) } } @@ -460,6 +421,16 @@ mod test { assert!(date.to_string() == "1200-02-02"); let float = Value::new("1.123123", &DataType::Float).unwrap(); assert!(float.to_string() == "1.123123"); + let uri = Value::new("ldap://[2001:db8::7]/c=GB?objectClass?one", &DataType::Uri).unwrap(); + assert!(uri.to_string() == "ldap://[2001:db8::7]/c=GB?objectClass?one"); + + let json = Value::new("{\"foo\": \"bar\", \"baz\": 123}", &DataType::JSON).unwrap(); + // Note: JSON serialization switches the order of the keys. + assert!( + json.to_string() == "{\"baz\":123,\"foo\":\"bar\"}" + || json.to_string() == "{\"foo\":\"bar\",\"baz\":123}" + ); + let converted = Value::from(8); assert!(converted.to_string() == "8"); } @@ -472,6 +443,12 @@ mod test { Value::new("120-02-02", &DataType::Date).unwrap_err(); Value::new("12000-02-02", &DataType::Date).unwrap_err(); Value::new("a", &DataType::Float).unwrap_err(); + Value::new("blabliebla", &DataType::Uri).unwrap_err(); + Value::new( + "{\"foo\": \"bar\", \"trailing comma\": 123,}", + &DataType::JSON, + ) + .unwrap_err(); } #[test] diff --git a/server/build.rs b/server/build.rs index a3d3234b8..9ba065a51 100644 --- a/server/build.rs +++ b/server/build.rs @@ -20,6 +20,15 @@ struct Dirs { fn main() -> std::io::Result<()> { // Uncomment this line if you want faster builds during development // return Ok(()); + + // If the ATOMICSERVER_SKIP_JS_BUILD environment variable is set, skip the JS build + if let Ok(env_skip) = std::env::var("ATOMICSERVER_SKIP_JS_BUILD") { + if env_skip == "true" { + p!("ATOMICSERVER_SKIP_JS_BUILD is set, skipping JS build."); + return Ok(()); + } + } + const BROWSER_ROOT: &str = "../browser/"; let dirs: Dirs = { Dirs { diff --git a/server/src/appstate.rs b/server/src/appstate.rs index 6105eac4f..25795e7b7 100644 --- a/server/src/appstate.rs +++ b/server/src/appstate.rs @@ -3,8 +3,9 @@ use crate::{ commit_monitor::CommitMonitor, config::Config, errors::AtomicServerResult, search::SearchState, }; use atomic_lib::{ - agents::{generate_public_key, Agent}, + agents::Agent, commit::CommitResponse, + config::{ClientConfig, SharedConfig}, Storelike, }; @@ -121,25 +122,26 @@ impl Drop for AppState { fn set_default_agent(config: &Config, store: &impl Storelike) -> AtomicServerResult<()> { tracing::info!("Setting default agent"); - let ag_cfg: atomic_lib::config::Config = match atomic_lib::config::read_config(Some( - &config.config_file_path, - )) { + let agent = match atomic_lib::config::read_config(Some(&config.config_file_path)) { Ok(agent_config) => { - match store.get_resource(&agent_config.agent) { - Ok(_) => agent_config, + let agent = Agent::from_secret(&agent_config.shared.agent_secret)?; + match store.get_resource(&agent.subject) { + Ok(_) => agent, Err(e) => { - if agent_config.agent.contains(&config.server_url) { + if agent.subject.contains(&config.server_url) { // If there is an agent in the config, but not in the store, // That probably means that the DB has been erased and only the config file exists. // This means that the Agent from the Config file should be recreated, using its private key. tracing::info!("Agent not retrievable, but config was found. Recreating Agent in new store."); + let recreated_agent = Agent::new_from_private_key( "server".into(), store, - &agent_config.private_key, + &agent.private_key.ok_or("No private key found")?, )?; store.add_resource(&recreated_agent.to_resource()?)?; - agent_config + + recreated_agent } else { return Err(format!( "An agent is present in {:?}, but this agent cannot be retrieved. Either make sure the agent is retrievable, or remove it from your config. {}", @@ -152,26 +154,23 @@ fn set_default_agent(config: &Config, store: &impl Storelike) -> AtomicServerRes Err(_no_config) => { let agent = store.create_agent(Some("server"))?; let cfg = atomic_lib::config::Config { - agent: agent.subject.clone(), - server: config.server_url.clone(), - private_key: agent - .private_key - .expect("No private key for agent. Check the config file."), + shared: SharedConfig { + agent_secret: agent.build_secret()?, + }, + client: Some(ClientConfig { + server_url: config.server_url.clone(), + }), }; - let config_string = - atomic_lib::config::write_config(&config.config_file_path, cfg.clone())?; + + cfg.save(&config.config_file_path)?; + + let config_string = cfg.to_string()?; tracing::warn!("No existing config found, created a new Config at {:?}. Copy this to your client machine (running atomic-cli or atomic-data-browser) to log in with these credentials. \n{}", &config.config_file_path, config_string); - cfg + + agent } }; - let agent = Agent { - subject: ag_cfg.agent.clone(), - private_key: Some(ag_cfg.private_key.clone()), - public_key: generate_public_key(&ag_cfg.private_key).public, - created_at: 0, - name: None, - }; tracing::info!("Default Agent is set: {}", &agent.subject); store.set_default_agent(agent); Ok(()) diff --git a/server/src/handlers/download.rs b/server/src/handlers/download.rs index 24cf50bdb..7f9fd49f1 100644 --- a/server/src/handlers/download.rs +++ b/server/src/handlers/download.rs @@ -38,7 +38,11 @@ pub async fn handle_download( let for_agent = get_client_agent(headers, &appstate, subject.clone())?; tracing::info!("handle_download: {}", subject); - let resource = store.get_resource_extended(&subject, false, &for_agent)?; + + let resource = store + .get_resource_extended(&subject, false, &for_agent)? + .to_single(); + download_file_handler_partial(&resource, &req, ¶ms, &appstate) } diff --git a/server/src/handlers/export.rs b/server/src/handlers/export.rs index dc9449f79..5662fa86c 100644 --- a/server/src/handlers/export.rs +++ b/server/src/handlers/export.rs @@ -77,7 +77,8 @@ impl<'a> CSVExporter<'a> { println!("Exporting resource to CSV: {}", subject); let resource = self .store - .get_resource_extended(subject, false, self.agent)?; + .get_resource_extended(subject, false, self.agent)? + .to_single(); let binding = resource.get_classes(self.store)?; @@ -109,14 +110,14 @@ impl<'a> CSVExporter<'a> { Value::AtomicUrl(subject) => self .store .get_resource_extended(subject, false, self.agent)? + .to_single() .get_propvals() .clone(), - Value::Resource(resource) => resource.get_propvals().clone(), Value::NestedResource(nested) => match nested { - SubResource::Resource(resource) => resource.get_propvals().clone(), SubResource::Subject(subject) => self .store .get_resource_extended(subject, false, self.agent)? + .to_single() .get_propvals() .clone(), SubResource::Nested(props) => props.clone(), @@ -139,9 +140,6 @@ impl<'a> CSVExporter<'a> { let mut order = vec![]; for value in requires.iter().chain(recommends.iter()) { match value { - SubResource::Resource(resource) => { - order.push(resource.get_subject().clone()); - } SubResource::Subject(subject) => { order.push(subject.clone()); } @@ -210,12 +208,16 @@ impl<'a> CSVExporter<'a> { fn create_csv_header_from_props(&self, props: &[String]) -> AtomicResult { let mut header = "subject".to_string(); for prop in props.iter() { - let name: String = - if let Ok(resource) = self.store.get_resource_extended(prop, true, self.agent) { - resource.get(urls::SHORTNAME)?.to_string() - } else { - prop.to_string() - }; + let name: String = if let Ok(resource_response) = + self.store.get_resource_extended(prop, true, self.agent) + { + resource_response + .to_single() + .get(urls::SHORTNAME)? + .to_string() + } else { + prop.to_string() + }; header.push_str(&format!(",{}", name)); } @@ -243,10 +245,6 @@ impl<'a> CSVExporter<'a> { .iter() .map(|v| match v { SubResource::Subject(subject) => self.get_name_from_subject(subject), - SubResource::Resource(resource) => self.get_name_from_propvals( - resource.get_propvals(), - resource.get_subject().clone(), - ), SubResource::Nested(nested) => { self.get_name_from_propvals(nested, "".to_string()) } @@ -255,19 +253,19 @@ impl<'a> CSVExporter<'a> { names.join(", ") } - Value::Resource(resource) => { - self.get_name_from_propvals(resource.get_propvals(), resource.get_subject().clone()) - } Value::AtomicUrl(subject) => self.get_name_from_subject(subject), _ => value.to_string(), } } fn get_name_from_subject(&self, subject: &str) -> String { - let Ok(resource) = self.store.get_resource_extended(subject, true, self.agent) else { + let Ok(resource_response) = self.store.get_resource_extended(subject, true, self.agent) + else { return subject.to_string(); }; + let resource = resource_response.to_single(); + self.get_name_from_propvals(resource.get_propvals(), resource.get_subject().clone()) } diff --git a/server/src/handlers/search.rs b/server/src/handlers/search.rs index b314f7182..d4681faaa 100644 --- a/server/src/handlers/search.rs +++ b/server/src/handlers/search.rs @@ -96,25 +96,29 @@ pub async fn search_query( // Get all resources returned by the search, this also performs authorization checks! let resources = get_resources(req, &appstate, &subject, subjects.clone(), limit)?; - if params.include.unwrap_or(false) { - results_resource.set(urls::ENDPOINT_RESULTS.into(), resources.into(), store)?; + // Convert the list of resources back into subjects. + let filtered_subjects: Vec = + resources.iter().map(|r| r.get_subject().clone()).collect(); + + results_resource.set( + urls::ENDPOINT_RESULTS.into(), + filtered_subjects.into(), + store, + )?; + + let mut result_vec: Vec = if params.include.unwrap_or(false) { + resources } else { - // Convert the list of resources back into subjects. - let filtered_subjects: Vec = - resources.iter().map(|r| r.get_subject().clone()).collect(); - - results_resource.set( - urls::ENDPOINT_RESULTS.into(), - filtered_subjects.into(), - store, - )?; - } + vec![] + }; + + result_vec.push(results_resource); let mut builder = HttpResponse::Ok(); builder.append_header(("Server-Timing", timer.header_value())); // TODO: support other serialization options - Ok(builder.body(results_resource.to_json_ad()?)) + Ok(builder.body(Resource::vec_to_json_ad(&result_vec)?)) } #[derive(Debug, std::hash::Hash, Eq, PartialEq)] @@ -144,7 +148,7 @@ fn get_resources( match appstate.store.get_resource_extended(&s, true, &for_agent) { Ok(r) => { if resources.len() < limit { - resources.push(r); + resources.push(r.to_single()); } else { break; } diff --git a/server/src/handlers/single_page_app.rs b/server/src/handlers/single_page_app.rs index 6b8cbba0f..0e7fe8329 100644 --- a/server/src/handlers/single_page_app.rs +++ b/server/src/handlers/single_page_app.rs @@ -12,12 +12,12 @@ pub async fn single_page( ) -> AtomicServerResult { let template = include_str!("../../assets_tmp/index.html"); let subject = format!("{}/{}", appstate.store.get_server_url()?, path); - let meta_tags: MetaTags = if let Ok(resource) = + let meta_tags: MetaTags = if let Ok(resource_response) = appstate .store .get_resource_extended(&subject, true, &ForAgent::Public) { - resource.into() + resource_response.into() } else { MetaTags::default() }; @@ -41,6 +41,7 @@ pub async fn single_page( } use atomic_lib::agents::ForAgent; +use atomic_lib::storelike::ResourceResponse; use atomic_lib::urls; use atomic_lib::Resource; use atomic_lib::Storelike; @@ -53,6 +54,28 @@ struct MetaTags { json: Option, } +impl From for MetaTags { + fn from(rr: ResourceResponse) -> Self { + match rr { + ResourceResponse::Resource(r) => r.into(), + ResourceResponse::ResourceWithReferenced(ref resource, _) => { + let mut tags: MetaTags = resource.clone().into(); + + let json = if let Ok(serialized) = rr.to_json_ad() { + // TODO: also fetch the parents for extra fast first renders. + Some(serialized) + } else { + None + }; + + tags.json = json; + + tags + } + } + } +} + impl From for MetaTags { fn from(r: Resource) -> Self { let description = if let Ok(d) = r.get(urls::DESCRIPTION) {