| import 'react/jsx-runtime'; |
| import { i as getEntryDataFilepath, j as getValueAtPropPath, k as getCollectionFormat, c as getCollectionPath, o as object, l as getSlugGlobForCollection, n as getDataFileExtension, e as getSingletonFormat, h as getSingletonPath, p as getCollectionItemPath } from './index-49bb61a8.react-server.js'; |
| import { assertNever } from 'emery'; |
| import '@braintree/sanitize-url'; |
| import { F as FieldDataError } from './index-e8fd44a1.react-server.js'; |
| import { load } from 'js-yaml'; |
| import * as React from 'react'; |
|
|
| const getInitialPropsValue = _getInitialPropsValue; |
| function _getInitialPropsValue(schema) { |
| switch (schema.kind) { |
| case 'form': |
| return schema.defaultValue(); |
| case 'child': |
| return schema.options.kind === 'block' ? [{ |
| type: 'paragraph', |
| children: [{ |
| text: '' |
| }] |
| }] : null; |
| case 'conditional': |
| { |
| const defaultValue = schema.discriminant.defaultValue(); |
| return { |
| discriminant: defaultValue, |
| value: getInitialPropsValue(schema.values[defaultValue.toString()]) |
| }; |
| } |
| case 'object': |
| { |
| const obj = {}; |
| for (const key of Object.keys(schema.fields)) { |
| obj[key] = getInitialPropsValue(schema.fields[key]); |
| } |
| return obj; |
| } |
| case 'array': |
| { |
| return []; |
| } |
| } |
| assertNever(schema); |
| } |
|
|
| function validateArrayLength(schema, val, path) { |
| var _schema$validation, _schema$validation2; |
| if (((_schema$validation = schema.validation) === null || _schema$validation === void 0 || (_schema$validation = _schema$validation.length) === null || _schema$validation === void 0 ? void 0 : _schema$validation.min) !== undefined && val.length < schema.validation.length.min) { |
| return new PropValidationError(new FieldDataError(`Must have at least ${schema.validation.length.min} element${schema.validation.length.min === 1 ? '' : 's'}`), path, schema); |
| } |
| if (((_schema$validation2 = schema.validation) === null || _schema$validation2 === void 0 || (_schema$validation2 = _schema$validation2.length) === null || _schema$validation2 === void 0 ? void 0 : _schema$validation2.max) !== undefined && val.length > schema.validation.length.max) { |
| return new PropValidationError(new FieldDataError(`Must have at most ${schema.validation.length.max} element${schema.validation.length.max === 1 ? '' : 's'}}`), path, schema); |
| } |
| } |
|
|
| class PropValidationError extends Error { |
| constructor(cause, path, schema) { |
| super(`field error at ${path.join('.')}`, { |
| cause |
| }); |
| this.path = path; |
| this.schema = schema; |
| this.cause = cause; |
| } |
| } |
| function toFormFieldStoredValue(val) { |
| if (val === null) { |
| return undefined; |
| } |
| return val; |
| } |
| const isArray = Array.isArray; |
| function parseProps(schema, _value, path, pathWithArrayFieldSlugs, parseFormField, /** This should be true for the reader and false elsewhere */ |
| validateArrayFieldLength) { |
| let value = toFormFieldStoredValue(_value); |
| if (schema.kind === 'form') { |
| try { |
| return parseFormField(schema, value, path, pathWithArrayFieldSlugs); |
| } catch (err) { |
| throw new PropValidationError(err, path, schema); |
| } |
| } |
| if (schema.kind === 'child') { |
| return null; |
| } |
| if (schema.kind === 'conditional') { |
| if (value === undefined) { |
| return getInitialPropsValue(schema); |
| } |
| try { |
| if (typeof value !== 'object' || value === null || isArray(value) || value instanceof Date) { |
| throw new FieldDataError('Must be an object'); |
| } |
| for (const key of Object.keys(value)) { |
| if (key !== 'discriminant' && key !== 'value') { |
| throw new FieldDataError(`Must only contain keys "discriminant" and "value", not "${key}"`); |
| } |
| } |
| } catch (err) { |
| throw new PropValidationError(err, path, schema); |
| } |
| const parsedDiscriminant = parseProps(schema.discriminant, value.discriminant, path.concat('discriminant'), pathWithArrayFieldSlugs.concat('discriminant'), parseFormField, validateArrayFieldLength); |
| return { |
| discriminant: parsedDiscriminant, |
| value: parseProps(schema.values[parsedDiscriminant], value.value, path.concat('value'), pathWithArrayFieldSlugs.concat('value'), parseFormField, validateArrayFieldLength) |
| }; |
| } |
| if (schema.kind === 'object') { |
| if (value === undefined) { |
| value = {}; |
| } |
| try { |
| if (typeof value !== 'object' || value === null || isArray(value) || value instanceof Date) { |
| throw new FieldDataError('Must be an object'); |
| } |
| const allowedKeysSet = new Set(Object.keys(schema.fields)); |
| for (const key of Object.keys(value)) { |
| if (!allowedKeysSet.has(key)) { |
| throw new FieldDataError(`Key on object value "${key}" is not allowed`); |
| } |
| } |
| } catch (err) { |
| throw new PropValidationError(err, path, schema); |
| } |
| const val = {}; |
| const errors = []; |
| for (const key of Object.keys(schema.fields)) { |
| let individualVal = value[key]; |
| try { |
| const propVal = parseProps(schema.fields[key], individualVal, path.concat(key), pathWithArrayFieldSlugs.concat(key), parseFormField, validateArrayFieldLength); |
| val[key] = propVal; |
| } catch (err) { |
| errors.push(err); |
| } |
| } |
| if (errors.length) { |
| throw new AggregateError(errors); |
| } |
| return val; |
| } |
| if (schema.kind === 'array') { |
| if (value === undefined) { |
| return []; |
| } |
| try { |
| if (!isArray(value)) { |
| throw new FieldDataError('Must be an array'); |
| } |
| } catch (err) { |
| throw new PropValidationError(err, path, schema); |
| } |
| const errors = []; |
| try { |
| if (validateArrayFieldLength) { |
| const error = validateArrayLength(schema, value, path); |
| if (error !== undefined) { |
| errors.push(error); |
| } |
| } |
| return value.map((innerVal, i) => { |
| try { |
| let slug = i.toString(); |
| if (schema.slugField && typeof innerVal === 'object' && innerVal !== null && !isArray(innerVal) && !(innerVal instanceof Date)) { |
| if (schema.element.kind !== 'object') { |
| throw new Error('slugField on array fields requires the an object field element'); |
| } |
| const slugField = schema.element.fields[schema.slugField]; |
| if (!slugField) { |
| throw new Error(`slugField "${schema.slugField}" does not exist on object field`); |
| } |
| if (slugField.kind !== 'form') { |
| throw new Error(`slugField "${schema.slugField}" is not a form field`); |
| } |
| if (slugField.formKind !== 'slug') { |
| throw new Error(`slugField "${schema.slugField}" is not a slug field`); |
| } |
| let parsedSlugFieldValue; |
| try { |
| parsedSlugFieldValue = slugField.parse(toFormFieldStoredValue(innerVal[schema.slugField]), undefined); |
| } catch (err) { |
| throw new AggregateError([err]); |
| } |
| slug = slugField.serializeWithSlug(parsedSlugFieldValue).slug; |
| } |
| return parseProps(schema.element, innerVal, path.concat(i), pathWithArrayFieldSlugs.concat(slug), parseFormField, validateArrayFieldLength); |
| } catch (err) { |
| errors.push(err); |
| } |
| }); |
| } finally { |
| if (errors.length) { |
| throw new AggregateError(errors); |
| } |
| } |
| } |
| assertNever(schema); |
| } |
|
|
| const textDecoder = new TextDecoder(); |
| const textEncoder = new TextEncoder(); |
| function splitFrontmatter(data) { |
| const str = textDecoder.decode(data); |
| const match = str.match(/^---(?:\r?\n([^]*?))?\r?\n---\r?\n?/); |
| if (match) { |
| var _match$; |
| const encoded = textEncoder.encode(match[0]); |
| return { |
| frontmatter: (_match$ = match[1]) !== null && _match$ !== void 0 ? _match$ : '', |
| content: data.slice(encoded.byteLength) |
| }; |
| } |
| return null; |
| } |
| function loadDataFile(data, formatInfo, requireFrontmatter = false) { |
| const parse = formatInfo.data === 'json' ? JSON.parse : load; |
| if (!formatInfo.contentField) { |
| const dataFile = textDecoder.decode(data); |
| return { |
| loaded: parse(dataFile) |
| }; |
| } |
| const res = splitFrontmatter(data); |
| if (requireFrontmatter && !res) { |
| throw new Error('Frontmatter not found'); |
| } |
| return { |
| loaded: res === null ? {} : parse(res.frontmatter), |
| extraFakeFile: { |
| path: `${formatInfo.contentField.path.join('/')}${formatInfo.contentField.contentExtension}`, |
| contents: res === null ? data : res.content |
| } |
| }; |
| } |
|
|
| function cache$1(func) { |
| return func; |
| } |
|
|
| // we conditionally using it since it's not actually in stable react releases yet |
| // (though it should be unnecessary since this file is only imported in react-server environments anyway) |
| // it's a function because some tools try to be smart with accessing things on namespace imports |
| // and error at build time if you try to read an export that doesn't exist on a namespace object |
| function getCache(react) { |
| var _react$cache; |
| return (_react$cache = react.cache) !== null && _react$cache !== void 0 ? _react$cache : cache$1; |
| } |
| const cache = getCache(React); |
|
|
| function flattenErrors(error) { |
| if (error instanceof AggregateError) { |
| return error.errors.flatMap(flattenErrors); |
| } |
| return [error]; |
| } |
| function formatFormDataError(error) { |
| const flatErrors = flattenErrors(error); |
| return flatErrors.map(error => { |
| if (error instanceof PropValidationError) { |
| const path = error.path.join('.'); |
| return `${path}: ${error.cause instanceof FieldDataError ? error.cause.message : `Unexpected error: ${error.cause}`}`; |
| } |
| return `Unexpected error: ${error}`; |
| }).join('\n'); |
| } |
|
|
| async function getAllEntries(parent, fsReader) { |
| return (await Promise.all((await fsReader.readdir(parent)).map(async dirent => { |
| const name = `${parent}${dirent.name}`; |
| const entry = { |
| entry: dirent, |
| name |
| }; |
| if (dirent.kind === 'directory') { |
| return [entry, ...(await getAllEntries(`${name}/`, fsReader))]; |
| } |
| return entry; |
| }))).flat(); |
| } |
| const listCollection = cache(async function listCollection(collectionPath, glob, formatInfo, extension, fsReader) { |
| const entries = glob === '*' ? (await fsReader.readdir(collectionPath)).map(entry => ({ |
| entry, |
| name: entry.name |
| })) : (await getAllEntries(`${collectionPath}/`, fsReader)).map(x => ({ |
| entry: x.entry, |
| name: x.name.slice(collectionPath.length + 1) |
| })); |
| return (await Promise.all(entries.map(async x => { |
| if (formatInfo.dataLocation === 'index') { |
| if (x.entry.kind !== 'directory') return []; |
| if (!(await fsReader.fileExists(getEntryDataFilepath(`${collectionPath}/${x.name}`, formatInfo)))) { |
| return []; |
| } |
| return [x.name]; |
| } else { |
| if (x.entry.kind !== 'file' || !x.name.endsWith(extension)) { |
| return []; |
| } |
| return [x.name.slice(0, -extension.length)]; |
| } |
| }))).flat(); |
| }); |
| function collectionReader(collection, config, fsReader) { |
| const formatInfo = getCollectionFormat(config, collection); |
| const collectionPath = getCollectionPath(config, collection); |
| const collectionConfig = config.collections[collection]; |
| const schema = object(collectionConfig.schema); |
| const glob = getSlugGlobForCollection(config, collection); |
| const extension = getDataFileExtension(formatInfo); |
| const read = (slug, ...args) => { |
| var _args$; |
| return readItem(schema, formatInfo, getCollectionItemPath(config, collection, slug), (_args$ = args[0]) === null || _args$ === void 0 ? void 0 : _args$.resolveLinkedFiles, `"${slug}" in collection "${collection}"`, fsReader, slug, collectionConfig.slugField, glob); |
| }; |
| const list = () => listCollection(collectionPath, glob, formatInfo, extension, fsReader); |
| return { |
| read, |
| readOrThrow: async (...args) => { |
| const entry = await read(...args); |
| if (entry === null) { |
| throw new Error(`Entry "${args[0]}" not found in collection "${collection}"`); |
| } |
| return entry; |
| }, |
| // TODO: this could drop the fs.stat call that list does for each item |
| // since we just immediately read it |
| all: async (...args) => { |
| const slugs = await list(); |
| return (await Promise.all(slugs.map(async slug => { |
| const entry = await read(slug, args[0]); |
| if (entry === null) return []; |
| return [{ |
| slug, |
| entry |
| }]; |
| }))).flat(); |
| }, |
| list |
| }; |
| } |
| const readItem = cache(async function readItem(rootSchema, formatInfo, itemDir, resolveLinkedFiles, debugReference, fsReader, ...slugInfo) { |
| if (typeof slugInfo[0] === 'string') { |
| if (slugInfo[0].includes('\\')) return null; |
| const split = slugInfo[0].split('/'); |
| if (slugInfo[2] === '*' && split.length !== 1) return null; |
| if (split.includes('..') || split.includes('.')) return null; |
| } |
| const dataFile = await fsReader.readFile(getEntryDataFilepath(itemDir, formatInfo)); |
| if (dataFile === null) return null; |
| const { |
| loaded, |
| extraFakeFile |
| } = loadDataFile(dataFile, formatInfo); |
| const contentFieldPathsToEagerlyResolve = resolveLinkedFiles ? [] : undefined; |
| let validated; |
| try { |
| validated = parseProps(rootSchema, loaded, [], [], (schema, value, path, pathWithArrayFieldSlugs) => { |
| if (schema.formKind === 'asset' || schema.formKind === 'assets') { |
| return schema.reader.parse(value); |
| } |
| if (schema.formKind === 'content') { |
| contentFieldPathsToEagerlyResolve === null || contentFieldPathsToEagerlyResolve === void 0 || contentFieldPathsToEagerlyResolve.push(path); |
| return async () => { |
| let content; |
| const filename = pathWithArrayFieldSlugs.join('/') + schema.contentExtension; |
| if (filename === (extraFakeFile === null || extraFakeFile === void 0 ? void 0 : extraFakeFile.path)) { |
| content = extraFakeFile.contents; |
| } else { |
| var _await$fsReader$readF; |
| content = (_await$fsReader$readF = await fsReader.readFile(`${itemDir}/${filename}`)) !== null && _await$fsReader$readF !== void 0 ? _await$fsReader$readF : undefined; |
| } |
| return schema.reader.parse(value, { |
| content |
| }); |
| }; |
| } |
| if (path.length === 1 && slugInfo[0] !== undefined) { |
| const [slug, slugField, glob] = slugInfo; |
| if (path[0] === slugField) { |
| if (schema.formKind !== 'slug') { |
| throw new Error(`Slug field ${slugInfo[1]} is not a slug field`); |
| } |
| return schema.reader.parseWithSlug(value, { |
| slug, |
| glob |
| }); |
| } |
| } |
| return schema.reader.parse(value); |
| }, true); |
| if (contentFieldPathsToEagerlyResolve !== null && contentFieldPathsToEagerlyResolve !== void 0 && contentFieldPathsToEagerlyResolve.length) { |
| await Promise.all(contentFieldPathsToEagerlyResolve.map(async path => { |
| const parentValue = getValueAtPropPath(validated, path.slice(0, -1)); |
| const keyOnParent = path[path.length - 1]; |
| const originalValue = parentValue[keyOnParent]; |
| parentValue[keyOnParent] = await originalValue(); |
| })); |
| } |
| } catch (err) { |
| const formatted = formatFormDataError(err); |
| throw new Error(`Invalid data for ${debugReference}:\n${formatted}`); |
| } |
| return validated; |
| }); |
| function singletonReader(singleton, config, fsReader) { |
| const formatInfo = getSingletonFormat(config, singleton); |
| const singletonPath = getSingletonPath(config, singleton); |
| const schema = object(config.singletons[singleton].schema); |
| const read = (...args) => { |
| var _args$2; |
| return readItem(schema, formatInfo, singletonPath, (_args$2 = args[0]) === null || _args$2 === void 0 ? void 0 : _args$2.resolveLinkedFiles, `singleton "${singleton}"`, fsReader, undefined); |
| }; |
| return { |
| read, |
| readOrThrow: async (...opts) => { |
| const entry = await read(...opts); |
| if (entry === null) { |
| throw new Error(`Singleton "${singleton}" not found`); |
| } |
| return entry; |
| } |
| }; |
| } |
|
|
| export { cache as a, collectionReader as c, singletonReader as s }; |