From a305c7ce3f66e62d7d4b5ea5c56bf0971632290c Mon Sep 17 00:00:00 2001 From: Thierry Bela Nanga Date: Tue, 22 Apr 2025 23:05:55 -0400 Subject: [PATCH 1/5] fix type checking #75 --- CHANGELOG.md | 2 +- README.md | 8 +- dist/index-umd-web.js | 462 ++++++++++++------ dist/index.cjs | 462 ++++++++++++------ dist/index.d.ts | 2 +- dist/lib/ast/features/prefix.js | 4 +- dist/lib/ast/features/transform.js | 10 +- dist/lib/ast/math/math.js | 1 + dist/lib/ast/transform/matrix.js | 18 +- dist/lib/ast/transform/minify.js | 10 +- dist/lib/ast/transform/utils.js | 2 +- dist/lib/parser/parse.js | 8 +- dist/lib/renderer/color/color.js | 108 +++- dist/lib/renderer/color/colormix.js | 3 + dist/lib/renderer/color/hex.js | 24 +- dist/lib/renderer/color/hsl.js | 9 +- dist/lib/renderer/color/lab.js | 3 + dist/lib/renderer/color/lch.js | 5 +- dist/lib/renderer/color/oklab.js | 5 +- dist/lib/renderer/color/oklch.js | 3 + dist/lib/renderer/color/relativecolor.js | 4 + dist/lib/renderer/color/rgb.js | 17 +- dist/lib/renderer/color/srgb.js | 31 +- dist/lib/renderer/color/utils/components.js | 15 +- dist/lib/renderer/render.js | 52 +- dist/lib/syntax/syntax.js | 120 ++--- dist/lib/validation/at-rules/container.js | 12 +- dist/lib/validation/at-rules/document.js | 2 +- dist/lib/validation/at-rules/media.js | 1 - src/@types/ast.d.ts | 4 +- src/@types/index.d.ts | 2 +- src/@types/shorthand.d.ts | 2 +- src/@types/token.d.ts | 2 +- src/@types/visitor.d.ts | 4 +- src/lib/ast/expand.ts | 24 +- src/lib/ast/features/calc.ts | 2 +- src/lib/ast/features/inlinecssvariables.ts | 2 +- src/lib/ast/features/prefix.ts | 27 +- src/lib/ast/features/transform.ts | 25 +- src/lib/ast/math/expression.ts | 2 +- src/lib/ast/math/math.ts | 5 +- src/lib/ast/minify.ts | 16 +- src/lib/ast/transform/compute.ts | 14 +- src/lib/ast/transform/convert.ts | 16 +- src/lib/ast/transform/matrix.ts | 26 +- src/lib/ast/transform/minify.ts | 18 +- src/lib/ast/transform/perspective.ts | 2 +- src/lib/ast/transform/translate.ts | 2 +- src/lib/ast/transform/utils.ts | 4 +- src/lib/parser/declaration/list.ts | 2 +- src/lib/parser/declaration/map.ts | 4 +- src/lib/parser/declaration/set.ts | 3 +- src/lib/parser/parse.ts | 78 ++- src/lib/parser/tokenize.ts | 4 +- src/lib/parser/utils/config.ts | 2 +- src/lib/parser/utils/type.ts | 4 +- src/lib/renderer/color/a98rgb.ts | 6 +- src/lib/renderer/color/color.ts | 213 ++++++-- src/lib/renderer/color/colormix.ts | 55 ++- src/lib/renderer/color/hex.ts | 47 +- src/lib/renderer/color/hsl.ts | 35 +- src/lib/renderer/color/hwb.ts | 14 +- src/lib/renderer/color/lab.ts | 25 +- src/lib/renderer/color/lch.ts | 19 +- src/lib/renderer/color/oklab.ts | 32 +- src/lib/renderer/color/oklch.ts | 19 +- src/lib/renderer/color/p3.ts | 6 +- src/lib/renderer/color/prophotorgb.ts | 4 +- src/lib/renderer/color/rec2020.ts | 6 +- src/lib/renderer/color/relativecolor.ts | 20 +- src/lib/renderer/color/rgb.ts | 41 +- src/lib/renderer/color/srgb.ts | 66 ++- src/lib/renderer/color/utils/components.ts | 23 +- src/lib/renderer/color/utils/constants.ts | 2 +- src/lib/renderer/color/xyz.ts | 6 +- src/lib/renderer/color/xyzd50.ts | 10 +- src/lib/renderer/render.ts | 272 ++++++----- src/lib/syntax/syntax.ts | 175 +++---- src/lib/validation/at-rules/container.ts | 49 +- src/lib/validation/at-rules/counter-style.ts | 4 +- src/lib/validation/at-rules/custom-media.ts | 10 +- src/lib/validation/at-rules/document.ts | 12 +- src/lib/validation/at-rules/else.ts | 2 +- .../at-rules/font-feature-values.ts | 6 +- src/lib/validation/at-rules/import.ts | 21 +- src/lib/validation/at-rules/index.ts | 28 +- src/lib/validation/at-rules/keyframes.ts | 6 +- src/lib/validation/at-rules/layer.ts | 6 +- src/lib/validation/at-rules/media.ts | 25 +- src/lib/validation/at-rules/namespace.ts | 8 +- .../validation/at-rules/page-margin-box.ts | 4 +- src/lib/validation/at-rules/page.ts | 6 +- src/lib/validation/at-rules/supports.ts | 46 +- src/lib/validation/at-rules/when.ts | 18 +- src/lib/validation/atrule.ts | 14 +- src/lib/validation/config.ts | 4 +- src/lib/validation/declaration.ts | 2 +- src/lib/validation/selector.ts | 2 +- src/lib/validation/syntax.ts | 49 +- src/lib/validation/syntaxes/bg-layer.ts | 6 +- .../validation/syntaxes/complex-selector.ts | 2 +- src/lib/validation/syntaxes/family-name.ts | 2 +- .../syntaxes/keyframe-block-list.ts | 6 +- .../validation/syntaxes/keyframe-selector.ts | 4 +- src/lib/validation/syntaxes/layer-name.ts | 4 +- .../syntaxes/relative-selector-list.ts | 2 +- src/lib/validation/syntaxes/url.ts | 4 +- src/lib/validation/utils/list.ts | 4 +- src/lib/validation/utils/whitespace.ts | 2 +- src/web/load.ts | 2 +- test/allFiles.js | 5 +- test/specs/code/transform.js | 18 +- 112 files changed, 1963 insertions(+), 1215 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb92ed5d..0a2ba14d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ # v1.0.0 -- [x] experimental minification : CSS transform module level 2 +- [x] minification : CSS transform module level 2 - [x] translate - [x] scale - [x] rotate diff --git a/README.md b/README.md index b93d2dae..3168a440 100644 --- a/README.md +++ b/README.md @@ -27,16 +27,14 @@ $ deno add @tbela99/css-parser - fault-tolerant parser, will try to fix invalid tokens according to the CSS syntax module 3 recommendations. - fast and efficient minification without unsafe transforms, see [benchmark](https://tbela99.github.io/css-parser/benchmark/index.html) -- minify colors. -- support css color level 4 & 5: color(), lab(), lch(), oklab(), oklch(), color-mix(), light-dark(), system colors and +- minify colors: color(), lab(), lch(), oklab(), oklch(), color-mix(), light-dark(), system colors and relative color - generate nested css rules - convert nested css rules to legacy syntax - generate sourcemap - compute css shorthands. see supported properties list below -- experimental minification : css transform module level 2 -- evaluate math functions: calc(), clamp(), min(), max(), round(), mod(), rem(), sin(), cos(), tan(), asin(), acos(), - atan(), atan2(), pow(), sqrt(), hypot(), log(), exp(), abs(), sign() +- css transform functions minification +- evaluate math functions: calc(), clamp(), min(), max(), etc. - inline css variables - remove duplicate properties - flatten @import rules diff --git a/dist/index-umd-web.js b/dist/index-umd-web.js index 2c831f10..f56bf798 100644 --- a/dist/index-umd-web.js +++ b/dist/index-umd-web.js @@ -431,25 +431,35 @@ return value; } function hsl2hex(token) { - return `${hsl2rgb(token).reduce(toHexString, '#')}`; + const t = hsl2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function hwb2hex(token) { - return `${hwb2rgb(token).reduce(toHexString, '#')}`; + const t = hwb2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function cmyk2hex(token) { - return `#${cmyk2rgb(token).reduce(toHexString, '')}`; + const t = cmyk2rgb(token); + return t == null ? null : `#${t.reduce(toHexString, '')}`; } function oklab2hex(token) { - return `${oklab2rgb(token).reduce(toHexString, '#')}`; + const t = oklab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function oklch2hex(token) { - return `${oklch2rgb(token).reduce(toHexString, '#')}`; + const value = oklch2rgb(token); + if (value == null) { + return null; + } + return `${value.reduce(toHexString, '#')}`; } function lab2hex(token) { - return `${lab2rgb(token).reduce(toHexString, '#')}`; + const t = lab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function lch2hex(token) { - return `${lch2rgb(token).reduce(toHexString, '#')}`; + const t = lch2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function srgb2hexvalues(r, g, b, alpha) { return [r, g, b].concat(alpha == null || alpha == 1 ? [] : [alpha]).reduce((acc, value) => acc + minmax(Math.round(255 * value), 0, 255).toString(16).padStart(2, '0'), '#'); @@ -463,8 +473,19 @@ return { typ: exports.EnumToken.Number, val: parseInt(t, 16).toString() }; }); } - return token.chi - .filter((t) => ![exports.EnumToken.LiteralTokenType, exports.EnumToken.CommentTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType].includes(t.typ)); + const result = []; + for (const child of (token.chi)) { + if ([ + exports.EnumToken.LiteralTokenType, exports.EnumToken.CommentTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType + ].includes(child.typ)) { + continue; + } + if (child.typ == exports.EnumToken.ColorTokenType && child.val == 'currentcolor') { + return null; + } + result.push(child); + } + return result; } function XYZ_to_lin_sRGB(x, y, z) { @@ -556,9 +577,12 @@ } function getLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore @@ -614,6 +638,9 @@ } function getOKLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { return []; @@ -678,9 +705,12 @@ } function getOKLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore @@ -863,6 +893,9 @@ } function getLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { return []; @@ -947,7 +980,7 @@ return null; } function rgb2srgb(token) { - return getComponents(token).map((t, index) => index == 3 ? ((t.typ == exports.EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == exports.EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255); + return getComponents(token)?.map?.((t, index) => index == 3 ? ((t.typ == exports.EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == exports.EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255) ?? null; } function hex2srgb(token) { const value = expandHexValue(token.kin == 'lit' ? COLORS_NAMES[token.val.toLowerCase()] : token.val); @@ -962,7 +995,10 @@ return lsrgb2srgbvalues(...XYZ_to_lin_sRGB(x, y, z)); } function hwb2srgb(token) { - const { h: hue, s: white, l: black, a: alpha } = hslvalues(token); + const { h: hue, s: white, l: black, a: alpha } = hslvalues(token) ?? {}; + if (hue == null || white == null || black == null) { + return []; + } const rgb = hsl2srgbvalues(hue, 1, .5); for (let i = 0; i < 3; i++) { rgb[i] *= (1 - white - black); @@ -974,11 +1010,17 @@ return rgb; } function hsl2srgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a); } function cmyk2srgb(token) { const components = getComponents(token); + if (components == null) { + return null; + } // @ts-ignore let t = components[0]; // @ts-ignore @@ -1010,7 +1052,10 @@ return rgb; } function oklab2srgb(token) { - const [l, a, b, alpha] = getOKLABComponents(token); + const [l, a, b, alpha] = getOKLABComponents(token) ?? []; + if (l == null || a == null || b == null) { + return null; + } const rgb = OKLab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -1018,7 +1063,7 @@ return rgb; } function oklch2srgb(token) { - const [l, c, h, alpha] = getOKLCHComponents(token) ?? {}; + const [l, c, h, alpha] = getOKLCHComponents(token) ?? []; if (l == null || c == null || h == null) { return null; } @@ -1031,6 +1076,9 @@ } function hslvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } let t; // @ts-ignore let h = getAngle(components[0]); @@ -1106,6 +1154,9 @@ } function lab2srgb(token) { const [l, a, b, alpha] = getLABComponents(token); + if (l == null || a == null || b == null) { + return null; + } const rgb = Lab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -1115,6 +1166,9 @@ function lch2srgb(token) { // @ts-ignore const [l, a, b, alpha] = lch2labvalues(...getLCHComponents(token)); + if (l == null || a == null || b == null) { + return null; + } // https://www.w3.org/TR/css-color-4/#lab-to-lch const rgb = Lab_to_sRGB(l, a, b); if (alpha != 1) { @@ -1174,26 +1228,29 @@ return rgb; } function hwb2rgb(token) { - return hwb2srgb(token).map(srgb2rgb); + return hwb2srgb(token)?.map?.(srgb2rgb) ?? null; } function hsl2rgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a).map((t) => minmax(Math.round(t * 255), 0, 255)); } function cmyk2rgb(token) { - return cmyk2srgb(token).map(srgb2rgb); + return cmyk2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklab2rgb(token) { - return oklab2srgb(token).map(srgb2rgb); + return oklab2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklch2rgb(token) { - return oklch2srgb(token).map(srgb2rgb); + return oklch2srgb(token)?.map?.(srgb2rgb) ?? null; } function lab2rgb(token) { - return lab2srgb(token).map(srgb2rgb); + return lab2srgb(token)?.map?.(srgb2rgb) ?? null; } function lch2rgb(token) { - return lch2srgb(token).map(srgb2rgb); + return lch2srgb(token)?.map?.(srgb2rgb) ?? null; } function hwb2hsv(h, w, b, a) { @@ -1221,6 +1278,9 @@ } function rgb2hsl(token) { const chi = getComponents(token); + if (chi == null) { + return null; + } // @ts-ignore let t = chi[0]; // @ts-ignore @@ -1279,12 +1339,14 @@ return rgb2hslvalues(...lch2rgb(token)); } function oklab2hsl(token) { + const t = oklab2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklab2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function oklch2hsl(token) { + const t = oklch2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklch2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function rgb2hslvalues(r, g, b, a = null) { return srgb2hsl(r / 255, g / 255, b / 255, a); @@ -1634,10 +1696,15 @@ } let values = []; if (to == 'hsl') { + let t; switch (token.kin) { case 'rgb': case 'rgba': - values.push(...rgb2hsl(token)); + t = rgb2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hex': case 'lit': @@ -1647,10 +1714,18 @@ values.push(...hwb2hsl(token)); break; case 'oklab': - values.push(...oklab2hsl(token)); + t = oklab2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2hsl(token)); + t = oklch2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': values.push(...lab2hsl(token)); @@ -1699,28 +1774,53 @@ } } else if (to == 'rgb') { + let t; switch (token.kin) { case 'hex': case 'lit': values.push(...hex2rgb(token)); break; case 'hsl': - values.push(...hsl2rgb(token)); + t = hsl2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2rgb(token)); + t = hwb2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2rgb(token)); + t = oklab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2rgb(token)); + t = oklch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2rgb(token)); + t = lab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2rgb(token)); + t = lch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': // @ts-ignore @@ -1906,6 +2006,7 @@ } } else if (colorFuncColorSpace.includes(to)) { + let t; switch (token.kin) { case 'hex': case 'lit': @@ -1913,30 +2014,60 @@ break; case 'rgb': case 'rgba': - values.push(...rgb2srgb(token)); + t = rgb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hsl': case 'hsla': - values.push(...hsl2srgb(token)); + t = hsl2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2srgb(token)); + t = hwb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2srgb(token)); + t = lab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2srgb(token)); + t = oklab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2srgb(token)); + t = lch2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - // @ts-ignore - values.push(...srgb2oklch(...color2srgbvalues(token))); + t = color2srgbvalues(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': const val = color2srgbvalues(token); + if (val == null) { + return null; + } switch (to) { case 'srgb': values.push(...val); @@ -1990,6 +2121,9 @@ } function color2srgbvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } const colorSpace = components.shift(); let values = components.map((val) => getNumber(val)); switch (colorSpace.val) { @@ -2252,6 +2386,9 @@ } const components1 = getComponents(color1); const components2 = getComponents(color2); + if (components1 == null || components2 == null) { + return null; + } if ((components1[3] != null && components1[3].typ == exports.EnumToken.IdenTokenType && components1[3].val == 'none') && values2.length == 4) { values1[3] = values2[3]; } @@ -2563,6 +2700,7 @@ r2 = l1.r.val * r1.l.val; break; } + // @ts-ignore const a2 = simplify(l2, r2); if (a2[1] == 1) { return a2[0]; @@ -3184,19 +3322,23 @@ } else if ([exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == exports.EnumToken.IdenTokenType && exp.val in values) { + // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: exports.EnumToken.NumberTokenType, + // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { + // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { + // @ts-ignore parent = exp; } if (value.typ == exports.EnumToken.PercentageTokenType) { @@ -3595,20 +3737,25 @@ if (isColor(token)) { // @ts-ignore token.typ = exports.EnumToken.ColorTokenType; + // @ts-ignore if (token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'from') { // @ts-ignore token.cal = 'rel'; } - else if (token.val == 'color-mix' && token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'in') { - // @ts-ignore - token.cal = 'mix'; - } - else { - if (token.val == 'color') { + else { // @ts-ignore + if (token.val == 'color-mix' && token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'in') { // @ts-ignore - token.cal = 'col'; + token.cal = 'mix'; + } + else { + // @ts-ignore + if (token.val == 'color') { + // @ts-ignore + token.cal = 'col'; + } + // @ts-ignore + token.chi = token.chi.filter((t) => ![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)); } - token.chi = token.chi.filter((t) => ![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)); } } } @@ -3688,12 +3835,14 @@ if (token.cal == 'rel' && ['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch', 'color'].includes(token.val)) { const chi = getComponents(token); const offset = token.val == 'color' ? 2 : 1; - // @ts-ignore - const color = chi[1]; - const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); - if (components != null) { - token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; - delete token.cal; + if (chi != null) { + // @ts-ignore + const color = chi[1]; + const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); + if (components != null) { + token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; + delete token.cal; + } } } if (token.val == 'color') { @@ -3756,7 +3905,7 @@ else if (token.val == 'lch') { value = lch2hex(token); } - if (value !== '') { + if (value !== '' && value != null) { return reduceHexValue(value); } } @@ -3782,15 +3931,6 @@ token.chi[0].val?.typ != exports.EnumToken.FractionTokenType) { return token.chi.reduce((acc, curr) => acc + renderToken(curr, options, cache, reducer), ''); } - // if (token.typ == EnumToken.FunctionTokenType && transformFunctions.includes(token.val)) { - // - // const children = token.val.startsWith('matrix') ? null : stripCommaToken(token.chi.slice()) as Token[]; - // - // if (children != null) { - // - // return token.val + '(' + children.reduce((acc: string, curr: Token) => acc + (acc.length > 0 ? ' ' : '') + renderToken(curr, options, cache, reducer), '') + ')'; - // } - // } // @ts-ignore return ( /* options.minify && 'Pseudo-class-func' == token.typ && token.val.slice(0, 2) == '::' ? token.val.slice(1) :*/token.val ?? '') + '(' + token.chi.reduce(reducer, '') + ')'; case exports.EnumToken.MatchExpressionTokenType: @@ -3955,7 +4095,11 @@ if (!('original' in token)) { // do not modify original token token = { ...token }; - Object.defineProperty(token, 'original', { enumerable: false, writable: false, value: token.val }); + Object.defineProperty(token, 'original', { + enumerable: false, + writable: false, + value: token.val + }); } // @ts-ignore if (!(token.original in cache)) { @@ -4433,7 +4577,9 @@ } let isLegacySyntax = false; if (token.typ == exports.EnumToken.FunctionTokenType && token.chi.length > 0 && colorsFunc.includes(token.val)) { + // @ts-ignore if (token.val == 'light-dark') { + // @ts-ignore const children = token.chi.filter((t) => [exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.LiteralTokenType, exports.EnumToken.ColorTokenType, exports.EnumToken.FunctionTokenType, exports.EnumToken.PercentageTokenType].includes(t.typ)); if (children.length != 2) { return false; @@ -4442,7 +4588,9 @@ return true; } } + // @ts-ignore if (token.val == 'color') { + // @ts-ignore const children = token.chi.filter((t) => [exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.LiteralTokenType, exports.EnumToken.ColorTokenType, exports.EnumToken.FunctionTokenType, exports.EnumToken.PercentageTokenType].includes(t.typ)); const isRelative = children[0].typ == exports.EnumToken.IdenTokenType && children[0].val == 'from'; if (children.length < 4 || children.length > 8) { @@ -4491,73 +4639,79 @@ } return true; } - else if (token.val == 'color-mix') { - const children = token.chi.reduce((acc, t) => { - if (t.typ == exports.EnumToken.CommaTokenType) { - acc.push([]); - } - else { - if (![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)) { - acc[acc.length - 1].push(t); + else { // @ts-ignore + if (token.val == 'color-mix') { + // @ts-ignore + const children = token.chi.reduce((acc, t) => { + if (t.typ == exports.EnumToken.CommaTokenType) { + acc.push([]); } - } - return acc; - }, [[]]); - if (children.length == 3) { - if (children[0].length > 3 || - children[0][0].typ != exports.EnumToken.IdenTokenType || - children[0][0].val != 'in' || - !isColorspace(children[0][1]) || - (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || - children[1].length > 2 || - children[1][0].typ != exports.EnumToken.ColorTokenType || - children[2].length > 2 || - children[2][0].typ != exports.EnumToken.ColorTokenType) { - return false; - } - if (children[1].length == 2) { - if (!(children[1][1].typ == exports.EnumToken.PercentageTokenType || (children[1][1].typ == exports.EnumToken.NumberTokenType && children[1][1].val == '0'))) { - return false; + else { + if (![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)) { + acc[acc.length - 1].push(t); + } } - } - if (children[2].length == 2) { - if (!(children[2][1].typ == exports.EnumToken.PercentageTokenType || (children[2][1].typ == exports.EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return acc; + }, [[]]); + if (children.length == 3) { + if (children[0].length > 3 || + children[0][0].typ != exports.EnumToken.IdenTokenType || + children[0][0].val != 'in' || + !isColorspace(children[0][1]) || + (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || + children[1].length > 2 || + children[1][0].typ != exports.EnumToken.ColorTokenType || + children[2].length > 2 || + children[2][0].typ != exports.EnumToken.ColorTokenType) { return false; } + if (children[1].length == 2) { + if (!(children[1][1].typ == exports.EnumToken.PercentageTokenType || (children[1][1].typ == exports.EnumToken.NumberTokenType && children[1][1].val == '0'))) { + return false; + } + } + if (children[2].length == 2) { + if (!(children[2][1].typ == exports.EnumToken.PercentageTokenType || (children[2][1].typ == exports.EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return false; + } + } + return true; } - return true; - } - return false; - } - else { - const keywords = ['from', 'none']; - if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { - keywords.push('alpha', ...token.val.slice(-3).split('')); + return false; } - // @ts-ignore - for (const v of token.chi) { - if (v.typ == exports.EnumToken.CommaTokenType) { - isLegacySyntax = true; + else { + const keywords = ['from', 'none']; + // @ts-ignore + if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { + // @ts-ignore + keywords.push('alpha', ...token.val.slice(-3).split('')); } - if (v.typ == exports.EnumToken.IdenTokenType) { - if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { - return false; + // @ts-ignore + for (const v of token.chi) { + if (v.typ == exports.EnumToken.CommaTokenType) { + isLegacySyntax = true; } - if (keywords.includes(v.val)) { - if (isLegacySyntax) { + if (v.typ == exports.EnumToken.IdenTokenType) { + if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { return false; } - if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { - return false; + if (keywords.includes(v.val)) { + if (isLegacySyntax) { + return false; + } + // @ts-ignore + if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { + return false; + } } + continue; + } + if (v.typ == exports.EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { + continue; + } + if (![exports.EnumToken.ColorTokenType, exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType, exports.EnumToken.LiteralTokenType].includes(v.typ)) { + return false; } - continue; - } - if (v.typ == exports.EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { - continue; - } - if (![exports.EnumToken.ColorTokenType, exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType, exports.EnumToken.LiteralTokenType].includes(v.typ)) { - return false; } } } @@ -12732,7 +12886,6 @@ if (chi[0].typ == exports.EnumToken.MediaQueryConditionTokenType) { return chi[0].l.typ == exports.EnumToken.IdenTokenType; } - // console.error(chi[0].parent); return false; } function validateMediaFeature(token) { @@ -13579,7 +13732,7 @@ } if (t[0].typ == exports.EnumToken.UrlFunctionTokenType) { result = validateURL(t[0]); - if (result.valid == ValidationLevel.Drop) { + if (result?.valid == ValidationLevel.Drop) { return result; } continue; @@ -13925,10 +14078,10 @@ break; } token = queries[0]; - if (token.typ == exports.EnumToken.MediaFeatureNotTokenType) { + if (token?.typ == exports.EnumToken.MediaFeatureNotTokenType) { token = token.val; } - if (token.typ != exports.EnumToken.ParensTokenType && (token.typ != exports.EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { + if (token?.typ != exports.EnumToken.ParensTokenType && (token?.typ != exports.EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { return { valid: ValidationLevel.Drop, matches: [], @@ -13938,7 +14091,7 @@ tokens }; } - if (token.typ == exports.EnumToken.ParensTokenType) { + if (token?.typ == exports.EnumToken.ParensTokenType) { result = validateContainerSizeFeature(token.chi, atRule); } else if (token.val == 'scroll-state') { @@ -13956,7 +14109,7 @@ break; } token = queries[0]; - if (token.typ != exports.EnumToken.MediaFeatureAndTokenType && token.typ != exports.EnumToken.MediaFeatureOrTokenType) { + if (token?.typ != exports.EnumToken.MediaFeatureAndTokenType && token?.typ != exports.EnumToken.MediaFeatureOrTokenType) { return { valid: ValidationLevel.Drop, matches: [], @@ -13967,9 +14120,9 @@ }; } if (tokenType == null) { - tokenType = token.typ; + tokenType = token?.typ; } - if (tokenType != token.typ) { + if (tokenType == null || tokenType != token?.typ) { return { valid: ValidationLevel.Drop, matches: [], @@ -14419,7 +14572,7 @@ removeCharset: true, removeEmpty: true, removeDuplicateDeclarations: true, - computeTransform: false, + computeTransform: true, computeShorthand: true, computeCalcExpression: true, inlineCssVariables: false, @@ -14541,6 +14694,7 @@ await parseNode(tokens, context, stats, options, errors, src, map, rawTokens); rawTokens.length = 0; if (context != null && context.typ == exports.EnumToken.InvalidRuleTokenType) { + // @ts-ignore const index = context.chi.findIndex((node) => node == context); if (index > -1) { context.chi.splice(index, 1); @@ -14918,7 +15072,6 @@ if (valid.valid != ValidationLevel.Valid) { const node = { typ: exports.EnumToken.InvalidRuleTokenType, - // @ts-ignore sel: tokens.reduce((acc, curr) => acc + renderToken(curr, { minify: false }), ''), chi: [] }; @@ -15165,8 +15318,6 @@ } } if (value.typ == exports.EnumToken.ParensTokenType || (value.typ == exports.EnumToken.FunctionTokenType && ['media', 'supports', 'style', 'scroll-state'].includes(value.val))) { - // @todo parse range and declarations - // parseDeclaration(parent.chi); let i; let nameIndex = -1; let valueIndex = -1; @@ -15730,7 +15881,7 @@ upper++; } if (upper < t.chi.length && - t.chi[upper].typ == exports.EnumToken.Iden && + t.chi[upper].typ == exports.EnumToken.IdenTokenType && ['i', 's'].includes(t.chi[upper].val.toLowerCase())) { t.chi[m].attr = t.chi[upper].val; t.chi.splice(upper, 1); @@ -16384,9 +16535,9 @@ for (const { value } of walkValues(node.val)) { if (value.typ == exports.EnumToken.IdenTokenType && value.val.charAt(0) == '-' && value.val.charAt(1) != '-') { // @ts-ignore - const values = config$1.declarations[node.nam].ast.slice(); + const values = config$1.declarations[node.nam].ast?.slice?.(); const match = value.val.match(/^-(.*?)-(.*)$/); - if (match != null) { + if (values != null && match != null) { const val = matchToken({ ...value, val: match[2] }, values); if (val != null) { // @ts-ignore @@ -17797,7 +17948,7 @@ } function toZero(v) { for (let i = 0; i < v.length; i++) { - if (Math.abs(v[i]) <= 1e-5) { + if (Math.abs(v[i]) <= epsilon) { v[i] = 0; } else { @@ -17950,12 +18101,13 @@ if (mat.typ == exports.EnumToken.IdenTokenType) { return mat.val == 'none' ? identity() : null; } - const children = mat.chi.filter(t => t.typ == exports.EnumToken.NumberTokenType || t.typ == exports.EnumToken.IdenTokenType); + const children = mat.chi.filter((t) => t.typ == exports.EnumToken.NumberTokenType || t.typ == exports.EnumToken.IdenTokenType); const values = []; for (const child of children) { if (child.typ != exports.EnumToken.NumberTokenType) { return null; } + // @ts-ignore values.push(getNumber(child)); } // @ts-ignore @@ -17965,7 +18117,6 @@ function matrix(values) { const matrix = identity(); if (values.length === 6) { - // matrix(scaleX(), skewY(), skewX(), scaleY(), translateX(), translateY()) matrix[0][0] = values[0]; matrix[0][1] = values[1]; matrix[1][0] = values[2]; @@ -17992,11 +18143,13 @@ matrix[3][3] = values[15]; } else { - throw new RangeError('expecting 6 or 16 values'); + return null; } return matrix; } function serialize(matrix) { + matrix = matrix.map(t => toZero(t.slice())); + // @ts-ignore if (eq(matrix, identity())) { return { typ: exports.EnumToken.IdenTokenType, @@ -18008,20 +18161,20 @@ return { typ: exports.EnumToken.FunctionTokenType, val: 'matrix', - chi: toZero([ + chi: [ matrix[0][0], matrix[0][1], matrix[1][0], matrix[1][1], matrix[3][0], matrix[3][1] - ]).reduce((acc, t) => { + ].reduce((acc, t) => { if (acc.length > 0) { acc.push({ typ: exports.EnumToken.CommaTokenType }); } acc.push({ typ: exports.EnumToken.NumberTokenType, - val: reduceNumber(t.toPrecision(6)) + val: reduceNumber(t) }); return acc; }, []) @@ -18036,7 +18189,7 @@ } acc.push({ typ: exports.EnumToken.NumberTokenType, - val: reduceNumber(round(curr)) + val: reduceNumber(curr) }); return acc; }, []) @@ -18045,7 +18198,7 @@ // translate → rotate → skew → scale function minify$1(matrix) { - const decomposed = /* is2DMatrix(matrix) ? decompose2(matrix) : */ decompose(matrix); + const decomposed = decompose(matrix); if (decomposed == null) { return null; } @@ -18301,7 +18454,7 @@ }); } // identity - return result.length == 0 || eq(result, identity()) ? [ + return result.length == 0 || (result.length == 1 && eqMatrix(identity(), result)) ? [ { typ: exports.EnumToken.IdenTokenType, val: 'none' @@ -18312,17 +18465,14 @@ let mat = identity(); let tmp = identity(); // @ts-ignore - const data = parseMatrix(a); - // console.error({data}); + const data = Array.isArray(a) ? a : parseMatrix(a); for (const transform of b) { tmp = computeMatrix([transform], identity()); - // console.error({transform: renderToken(transform), tmp}); if (tmp == null) { return false; } mat = multiply(mat, tmp); } - // console.error({mat}); if (mat == null) { return false; } @@ -18699,8 +18849,7 @@ for (; i < ast.chi.length; i++) { // @ts-ignore node = ast.chi[i]; - if (node.typ != exports.EnumToken.DeclarationNodeType || - (!node.nam.startsWith('--') && !node.nam.match(/^(-[a-z]+-)?transform$/))) { + if (node.typ != exports.EnumToken.DeclarationNodeType || !node.nam.match(/^(-[a-z]+-)?transform$/)) { continue; } const children = node.val.slice(); @@ -18716,13 +18865,6 @@ if (eqMatrix(matrix, minified)) { r.push(minified); } - // console.error(JSON.stringify({ - // matrix: renderToken(matrix), - // cumulative: cumulative.reduce((acc, curr) => acc + renderToken(curr), ''), - // minified: minified.reduce((acc, curr) => acc + renderToken(curr), ''), - // r: r[0].reduce((acc, curr) => acc + renderToken(curr), ''), - // all: r.map(r => r.reduce((acc, curr) => acc + renderToken(curr), '')) - // }, null, 1)); const l = renderToken(matrix).length; node.val = r.reduce((acc, curr) => { if (curr.reduce((acc, t) => acc + renderToken(t), '').length < l) { diff --git a/dist/index.cjs b/dist/index.cjs index d7b2f500..e7f65475 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -430,25 +430,35 @@ function rgb2hex(token) { return value; } function hsl2hex(token) { - return `${hsl2rgb(token).reduce(toHexString, '#')}`; + const t = hsl2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function hwb2hex(token) { - return `${hwb2rgb(token).reduce(toHexString, '#')}`; + const t = hwb2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function cmyk2hex(token) { - return `#${cmyk2rgb(token).reduce(toHexString, '')}`; + const t = cmyk2rgb(token); + return t == null ? null : `#${t.reduce(toHexString, '')}`; } function oklab2hex(token) { - return `${oklab2rgb(token).reduce(toHexString, '#')}`; + const t = oklab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function oklch2hex(token) { - return `${oklch2rgb(token).reduce(toHexString, '#')}`; + const value = oklch2rgb(token); + if (value == null) { + return null; + } + return `${value.reduce(toHexString, '#')}`; } function lab2hex(token) { - return `${lab2rgb(token).reduce(toHexString, '#')}`; + const t = lab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function lch2hex(token) { - return `${lch2rgb(token).reduce(toHexString, '#')}`; + const t = lch2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function srgb2hexvalues(r, g, b, alpha) { return [r, g, b].concat(alpha == null || alpha == 1 ? [] : [alpha]).reduce((acc, value) => acc + minmax(Math.round(255 * value), 0, 255).toString(16).padStart(2, '0'), '#'); @@ -462,8 +472,19 @@ function getComponents(token) { return { typ: exports.EnumToken.Number, val: parseInt(t, 16).toString() }; }); } - return token.chi - .filter((t) => ![exports.EnumToken.LiteralTokenType, exports.EnumToken.CommentTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType].includes(t.typ)); + const result = []; + for (const child of (token.chi)) { + if ([ + exports.EnumToken.LiteralTokenType, exports.EnumToken.CommentTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType + ].includes(child.typ)) { + continue; + } + if (child.typ == exports.EnumToken.ColorTokenType && child.val == 'currentcolor') { + return null; + } + result.push(child); + } + return result; } function XYZ_to_lin_sRGB(x, y, z) { @@ -555,9 +576,12 @@ function xyz2lchvalues(x, y, z, alpha) { } function getLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore @@ -613,6 +637,9 @@ function srgb2oklch(r, g, blue, alpha) { } function getOKLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { return []; @@ -677,9 +704,12 @@ function srgb2oklab(r, g, blue, alpha) { } function getOKLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore @@ -862,6 +892,9 @@ function lch2labvalues(l, c, h, a = null) { } function getLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.IdenTokenType].includes(components[i].typ)) { return []; @@ -946,7 +979,7 @@ function srgbvalues(token) { return null; } function rgb2srgb(token) { - return getComponents(token).map((t, index) => index == 3 ? ((t.typ == exports.EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == exports.EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255); + return getComponents(token)?.map?.((t, index) => index == 3 ? ((t.typ == exports.EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == exports.EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255) ?? null; } function hex2srgb(token) { const value = expandHexValue(token.kin == 'lit' ? COLORS_NAMES[token.val.toLowerCase()] : token.val); @@ -961,7 +994,10 @@ function xyz2srgb(x, y, z) { return lsrgb2srgbvalues(...XYZ_to_lin_sRGB(x, y, z)); } function hwb2srgb(token) { - const { h: hue, s: white, l: black, a: alpha } = hslvalues(token); + const { h: hue, s: white, l: black, a: alpha } = hslvalues(token) ?? {}; + if (hue == null || white == null || black == null) { + return []; + } const rgb = hsl2srgbvalues(hue, 1, .5); for (let i = 0; i < 3; i++) { rgb[i] *= (1 - white - black); @@ -973,11 +1009,17 @@ function hwb2srgb(token) { return rgb; } function hsl2srgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a); } function cmyk2srgb(token) { const components = getComponents(token); + if (components == null) { + return null; + } // @ts-ignore let t = components[0]; // @ts-ignore @@ -1009,7 +1051,10 @@ function cmyk2srgb(token) { return rgb; } function oklab2srgb(token) { - const [l, a, b, alpha] = getOKLABComponents(token); + const [l, a, b, alpha] = getOKLABComponents(token) ?? []; + if (l == null || a == null || b == null) { + return null; + } const rgb = OKLab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -1017,7 +1062,7 @@ function oklab2srgb(token) { return rgb; } function oklch2srgb(token) { - const [l, c, h, alpha] = getOKLCHComponents(token) ?? {}; + const [l, c, h, alpha] = getOKLCHComponents(token) ?? []; if (l == null || c == null || h == null) { return null; } @@ -1030,6 +1075,9 @@ function oklch2srgb(token) { } function hslvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } let t; // @ts-ignore let h = getAngle(components[0]); @@ -1105,6 +1153,9 @@ function hsl2srgbvalues(h, s, l, a = null) { } function lab2srgb(token) { const [l, a, b, alpha] = getLABComponents(token); + if (l == null || a == null || b == null) { + return null; + } const rgb = Lab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -1114,6 +1165,9 @@ function lab2srgb(token) { function lch2srgb(token) { // @ts-ignore const [l, a, b, alpha] = lch2labvalues(...getLCHComponents(token)); + if (l == null || a == null || b == null) { + return null; + } // https://www.w3.org/TR/css-color-4/#lab-to-lch const rgb = Lab_to_sRGB(l, a, b); if (alpha != 1) { @@ -1173,26 +1227,29 @@ function hex2rgb(token) { return rgb; } function hwb2rgb(token) { - return hwb2srgb(token).map(srgb2rgb); + return hwb2srgb(token)?.map?.(srgb2rgb) ?? null; } function hsl2rgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a).map((t) => minmax(Math.round(t * 255), 0, 255)); } function cmyk2rgb(token) { - return cmyk2srgb(token).map(srgb2rgb); + return cmyk2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklab2rgb(token) { - return oklab2srgb(token).map(srgb2rgb); + return oklab2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklch2rgb(token) { - return oklch2srgb(token).map(srgb2rgb); + return oklch2srgb(token)?.map?.(srgb2rgb) ?? null; } function lab2rgb(token) { - return lab2srgb(token).map(srgb2rgb); + return lab2srgb(token)?.map?.(srgb2rgb) ?? null; } function lch2rgb(token) { - return lch2srgb(token).map(srgb2rgb); + return lch2srgb(token)?.map?.(srgb2rgb) ?? null; } function hwb2hsv(h, w, b, a) { @@ -1220,6 +1277,9 @@ function hex2hsl(token) { } function rgb2hsl(token) { const chi = getComponents(token); + if (chi == null) { + return null; + } // @ts-ignore let t = chi[0]; // @ts-ignore @@ -1278,12 +1338,14 @@ function lch2hsl(token) { return rgb2hslvalues(...lch2rgb(token)); } function oklab2hsl(token) { + const t = oklab2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklab2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function oklch2hsl(token) { + const t = oklch2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklch2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function rgb2hslvalues(r, g, b, a = null) { return srgb2hsl(r / 255, g / 255, b / 255, a); @@ -1633,10 +1695,15 @@ function convert(token, to) { } let values = []; if (to == 'hsl') { + let t; switch (token.kin) { case 'rgb': case 'rgba': - values.push(...rgb2hsl(token)); + t = rgb2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hex': case 'lit': @@ -1646,10 +1713,18 @@ function convert(token, to) { values.push(...hwb2hsl(token)); break; case 'oklab': - values.push(...oklab2hsl(token)); + t = oklab2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2hsl(token)); + t = oklch2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': values.push(...lab2hsl(token)); @@ -1698,28 +1773,53 @@ function convert(token, to) { } } else if (to == 'rgb') { + let t; switch (token.kin) { case 'hex': case 'lit': values.push(...hex2rgb(token)); break; case 'hsl': - values.push(...hsl2rgb(token)); + t = hsl2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2rgb(token)); + t = hwb2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2rgb(token)); + t = oklab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2rgb(token)); + t = oklch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2rgb(token)); + t = lab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2rgb(token)); + t = lch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': // @ts-ignore @@ -1905,6 +2005,7 @@ function convert(token, to) { } } else if (colorFuncColorSpace.includes(to)) { + let t; switch (token.kin) { case 'hex': case 'lit': @@ -1912,30 +2013,60 @@ function convert(token, to) { break; case 'rgb': case 'rgba': - values.push(...rgb2srgb(token)); + t = rgb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hsl': case 'hsla': - values.push(...hsl2srgb(token)); + t = hsl2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2srgb(token)); + t = hwb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2srgb(token)); + t = lab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2srgb(token)); + t = oklab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2srgb(token)); + t = lch2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - // @ts-ignore - values.push(...srgb2oklch(...color2srgbvalues(token))); + t = color2srgbvalues(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': const val = color2srgbvalues(token); + if (val == null) { + return null; + } switch (to) { case 'srgb': values.push(...val); @@ -1989,6 +2120,9 @@ function minmax(value, min, max) { } function color2srgbvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } const colorSpace = components.shift(); let values = components.map((val) => getNumber(val)); switch (colorSpace.val) { @@ -2251,6 +2385,9 @@ function colorMix(colorSpace, hueInterpolationMethod, color1, percentage1, color } const components1 = getComponents(color1); const components2 = getComponents(color2); + if (components1 == null || components2 == null) { + return null; + } if ((components1[3] != null && components1[3].typ == exports.EnumToken.IdenTokenType && components1[3].val == 'none') && values2.length == 4) { values1[3] = values2[3]; } @@ -2562,6 +2699,7 @@ function compute$1(a, b, op) { r2 = l1.r.val * r1.l.val; break; } + // @ts-ignore const a2 = simplify(l2, r2); if (a2[1] == 1) { return a2[0]; @@ -3183,19 +3321,23 @@ function computeComponentValue(expr, converted, values) { } else if ([exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == exports.EnumToken.IdenTokenType && exp.val in values) { + // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: exports.EnumToken.NumberTokenType, + // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { + // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { + // @ts-ignore parent = exp; } if (value.typ == exports.EnumToken.PercentageTokenType) { @@ -3594,20 +3736,25 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (isColor(token)) { // @ts-ignore token.typ = exports.EnumToken.ColorTokenType; + // @ts-ignore if (token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'from') { // @ts-ignore token.cal = 'rel'; } - else if (token.val == 'color-mix' && token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'in') { - // @ts-ignore - token.cal = 'mix'; - } - else { - if (token.val == 'color') { + else { // @ts-ignore + if (token.val == 'color-mix' && token.chi[0].typ == exports.EnumToken.IdenTokenType && token.chi[0].val == 'in') { // @ts-ignore - token.cal = 'col'; + token.cal = 'mix'; + } + else { + // @ts-ignore + if (token.val == 'color') { + // @ts-ignore + token.cal = 'col'; + } + // @ts-ignore + token.chi = token.chi.filter((t) => ![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)); } - token.chi = token.chi.filter((t) => ![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)); } } } @@ -3687,12 +3834,14 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (token.cal == 'rel' && ['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch', 'color'].includes(token.val)) { const chi = getComponents(token); const offset = token.val == 'color' ? 2 : 1; - // @ts-ignore - const color = chi[1]; - const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); - if (components != null) { - token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; - delete token.cal; + if (chi != null) { + // @ts-ignore + const color = chi[1]; + const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); + if (components != null) { + token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; + delete token.cal; + } } } if (token.val == 'color') { @@ -3755,7 +3904,7 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, else if (token.val == 'lch') { value = lch2hex(token); } - if (value !== '') { + if (value !== '' && value != null) { return reduceHexValue(value); } } @@ -3781,15 +3930,6 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, token.chi[0].val?.typ != exports.EnumToken.FractionTokenType) { return token.chi.reduce((acc, curr) => acc + renderToken(curr, options, cache, reducer), ''); } - // if (token.typ == EnumToken.FunctionTokenType && transformFunctions.includes(token.val)) { - // - // const children = token.val.startsWith('matrix') ? null : stripCommaToken(token.chi.slice()) as Token[]; - // - // if (children != null) { - // - // return token.val + '(' + children.reduce((acc: string, curr: Token) => acc + (acc.length > 0 ? ' ' : '') + renderToken(curr, options, cache, reducer), '') + ')'; - // } - // } // @ts-ignore return ( /* options.minify && 'Pseudo-class-func' == token.typ && token.val.slice(0, 2) == '::' ? token.val.slice(1) :*/token.val ?? '') + '(' + token.chi.reduce(reducer, '') + ')'; case exports.EnumToken.MatchExpressionTokenType: @@ -3954,7 +4094,11 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (!('original' in token)) { // do not modify original token token = { ...token }; - Object.defineProperty(token, 'original', { enumerable: false, writable: false, value: token.val }); + Object.defineProperty(token, 'original', { + enumerable: false, + writable: false, + value: token.val + }); } // @ts-ignore if (!(token.original in cache)) { @@ -4432,7 +4576,9 @@ function isColor(token) { } let isLegacySyntax = false; if (token.typ == exports.EnumToken.FunctionTokenType && token.chi.length > 0 && colorsFunc.includes(token.val)) { + // @ts-ignore if (token.val == 'light-dark') { + // @ts-ignore const children = token.chi.filter((t) => [exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.LiteralTokenType, exports.EnumToken.ColorTokenType, exports.EnumToken.FunctionTokenType, exports.EnumToken.PercentageTokenType].includes(t.typ)); if (children.length != 2) { return false; @@ -4441,7 +4587,9 @@ function isColor(token) { return true; } } + // @ts-ignore if (token.val == 'color') { + // @ts-ignore const children = token.chi.filter((t) => [exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.LiteralTokenType, exports.EnumToken.ColorTokenType, exports.EnumToken.FunctionTokenType, exports.EnumToken.PercentageTokenType].includes(t.typ)); const isRelative = children[0].typ == exports.EnumToken.IdenTokenType && children[0].val == 'from'; if (children.length < 4 || children.length > 8) { @@ -4490,73 +4638,79 @@ function isColor(token) { } return true; } - else if (token.val == 'color-mix') { - const children = token.chi.reduce((acc, t) => { - if (t.typ == exports.EnumToken.CommaTokenType) { - acc.push([]); - } - else { - if (![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)) { - acc[acc.length - 1].push(t); + else { // @ts-ignore + if (token.val == 'color-mix') { + // @ts-ignore + const children = token.chi.reduce((acc, t) => { + if (t.typ == exports.EnumToken.CommaTokenType) { + acc.push([]); } - } - return acc; - }, [[]]); - if (children.length == 3) { - if (children[0].length > 3 || - children[0][0].typ != exports.EnumToken.IdenTokenType || - children[0][0].val != 'in' || - !isColorspace(children[0][1]) || - (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || - children[1].length > 2 || - children[1][0].typ != exports.EnumToken.ColorTokenType || - children[2].length > 2 || - children[2][0].typ != exports.EnumToken.ColorTokenType) { - return false; - } - if (children[1].length == 2) { - if (!(children[1][1].typ == exports.EnumToken.PercentageTokenType || (children[1][1].typ == exports.EnumToken.NumberTokenType && children[1][1].val == '0'))) { - return false; + else { + if (![exports.EnumToken.WhitespaceTokenType, exports.EnumToken.CommentTokenType].includes(t.typ)) { + acc[acc.length - 1].push(t); + } } - } - if (children[2].length == 2) { - if (!(children[2][1].typ == exports.EnumToken.PercentageTokenType || (children[2][1].typ == exports.EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return acc; + }, [[]]); + if (children.length == 3) { + if (children[0].length > 3 || + children[0][0].typ != exports.EnumToken.IdenTokenType || + children[0][0].val != 'in' || + !isColorspace(children[0][1]) || + (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || + children[1].length > 2 || + children[1][0].typ != exports.EnumToken.ColorTokenType || + children[2].length > 2 || + children[2][0].typ != exports.EnumToken.ColorTokenType) { return false; } + if (children[1].length == 2) { + if (!(children[1][1].typ == exports.EnumToken.PercentageTokenType || (children[1][1].typ == exports.EnumToken.NumberTokenType && children[1][1].val == '0'))) { + return false; + } + } + if (children[2].length == 2) { + if (!(children[2][1].typ == exports.EnumToken.PercentageTokenType || (children[2][1].typ == exports.EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return false; + } + } + return true; } - return true; - } - return false; - } - else { - const keywords = ['from', 'none']; - if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { - keywords.push('alpha', ...token.val.slice(-3).split('')); + return false; } - // @ts-ignore - for (const v of token.chi) { - if (v.typ == exports.EnumToken.CommaTokenType) { - isLegacySyntax = true; + else { + const keywords = ['from', 'none']; + // @ts-ignore + if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { + // @ts-ignore + keywords.push('alpha', ...token.val.slice(-3).split('')); } - if (v.typ == exports.EnumToken.IdenTokenType) { - if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { - return false; + // @ts-ignore + for (const v of token.chi) { + if (v.typ == exports.EnumToken.CommaTokenType) { + isLegacySyntax = true; } - if (keywords.includes(v.val)) { - if (isLegacySyntax) { + if (v.typ == exports.EnumToken.IdenTokenType) { + if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { return false; } - if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { - return false; + if (keywords.includes(v.val)) { + if (isLegacySyntax) { + return false; + } + // @ts-ignore + if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { + return false; + } } + continue; + } + if (v.typ == exports.EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { + continue; + } + if (![exports.EnumToken.ColorTokenType, exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType, exports.EnumToken.LiteralTokenType].includes(v.typ)) { + return false; } - continue; - } - if (v.typ == exports.EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { - continue; - } - if (![exports.EnumToken.ColorTokenType, exports.EnumToken.IdenTokenType, exports.EnumToken.NumberTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.CommaTokenType, exports.EnumToken.WhitespaceTokenType, exports.EnumToken.LiteralTokenType].includes(v.typ)) { - return false; } } } @@ -12831,7 +12985,6 @@ function validateMediaCondition(token, atRule) { if (chi[0].typ == exports.EnumToken.MediaQueryConditionTokenType) { return chi[0].l.typ == exports.EnumToken.IdenTokenType; } - // console.error(chi[0].parent); return false; } function validateMediaFeature(token) { @@ -13678,7 +13831,7 @@ function validateAtRuleDocument(atRule, options, root) { } if (t[0].typ == exports.EnumToken.UrlFunctionTokenType) { result = validateURL(t[0]); - if (result.valid == ValidationLevel.Drop) { + if (result?.valid == ValidationLevel.Drop) { return result; } continue; @@ -14024,10 +14177,10 @@ function validateAtRuleContainerQueryList(tokens, atRule) { break; } token = queries[0]; - if (token.typ == exports.EnumToken.MediaFeatureNotTokenType) { + if (token?.typ == exports.EnumToken.MediaFeatureNotTokenType) { token = token.val; } - if (token.typ != exports.EnumToken.ParensTokenType && (token.typ != exports.EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { + if (token?.typ != exports.EnumToken.ParensTokenType && (token?.typ != exports.EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { return { valid: ValidationLevel.Drop, matches: [], @@ -14037,7 +14190,7 @@ function validateAtRuleContainerQueryList(tokens, atRule) { tokens }; } - if (token.typ == exports.EnumToken.ParensTokenType) { + if (token?.typ == exports.EnumToken.ParensTokenType) { result = validateContainerSizeFeature(token.chi, atRule); } else if (token.val == 'scroll-state') { @@ -14055,7 +14208,7 @@ function validateAtRuleContainerQueryList(tokens, atRule) { break; } token = queries[0]; - if (token.typ != exports.EnumToken.MediaFeatureAndTokenType && token.typ != exports.EnumToken.MediaFeatureOrTokenType) { + if (token?.typ != exports.EnumToken.MediaFeatureAndTokenType && token?.typ != exports.EnumToken.MediaFeatureOrTokenType) { return { valid: ValidationLevel.Drop, matches: [], @@ -14066,9 +14219,9 @@ function validateAtRuleContainerQueryList(tokens, atRule) { }; } if (tokenType == null) { - tokenType = token.typ; + tokenType = token?.typ; } - if (tokenType != token.typ) { + if (tokenType == null || tokenType != token?.typ) { return { valid: ValidationLevel.Drop, matches: [], @@ -14518,7 +14671,7 @@ async function doParse(iterator, options = {}) { removeCharset: true, removeEmpty: true, removeDuplicateDeclarations: true, - computeTransform: false, + computeTransform: true, computeShorthand: true, computeCalcExpression: true, inlineCssVariables: false, @@ -14640,6 +14793,7 @@ async function doParse(iterator, options = {}) { await parseNode(tokens, context, stats, options, errors, src, map, rawTokens); rawTokens.length = 0; if (context != null && context.typ == exports.EnumToken.InvalidRuleTokenType) { + // @ts-ignore const index = context.chi.findIndex((node) => node == context); if (index > -1) { context.chi.splice(index, 1); @@ -15017,7 +15171,6 @@ async function parseNode(results, context, stats, options, errors, src, map, raw if (valid.valid != ValidationLevel.Valid) { const node = { typ: exports.EnumToken.InvalidRuleTokenType, - // @ts-ignore sel: tokens.reduce((acc, curr) => acc + renderToken(curr, { minify: false }), ''), chi: [] }; @@ -15264,8 +15417,6 @@ function parseAtRulePrelude(tokens, atRule) { } } if (value.typ == exports.EnumToken.ParensTokenType || (value.typ == exports.EnumToken.FunctionTokenType && ['media', 'supports', 'style', 'scroll-state'].includes(value.val))) { - // @todo parse range and declarations - // parseDeclaration(parent.chi); let i; let nameIndex = -1; let valueIndex = -1; @@ -15829,7 +15980,7 @@ function parseTokens(tokens, options = {}) { upper++; } if (upper < t.chi.length && - t.chi[upper].typ == exports.EnumToken.Iden && + t.chi[upper].typ == exports.EnumToken.IdenTokenType && ['i', 's'].includes(t.chi[upper].val.toLowerCase())) { t.chi[m].attr = t.chi[upper].val; t.chi.splice(upper, 1); @@ -16483,9 +16634,9 @@ class ComputePrefixFeature { for (const { value } of walkValues(node.val)) { if (value.typ == exports.EnumToken.IdenTokenType && value.val.charAt(0) == '-' && value.val.charAt(1) != '-') { // @ts-ignore - const values = config$1.declarations[node.nam].ast.slice(); + const values = config$1.declarations[node.nam].ast?.slice?.(); const match = value.val.match(/^-(.*?)-(.*)$/); - if (match != null) { + if (values != null && match != null) { const val = matchToken({ ...value, val: match[2] }, values); if (val != null) { // @ts-ignore @@ -17896,7 +18047,7 @@ function invertMatrix4(m) { } function toZero(v) { for (let i = 0; i < v.length; i++) { - if (Math.abs(v[i]) <= 1e-5) { + if (Math.abs(v[i]) <= epsilon) { v[i] = 0; } else { @@ -18049,12 +18200,13 @@ function parseMatrix(mat) { if (mat.typ == exports.EnumToken.IdenTokenType) { return mat.val == 'none' ? identity() : null; } - const children = mat.chi.filter(t => t.typ == exports.EnumToken.NumberTokenType || t.typ == exports.EnumToken.IdenTokenType); + const children = mat.chi.filter((t) => t.typ == exports.EnumToken.NumberTokenType || t.typ == exports.EnumToken.IdenTokenType); const values = []; for (const child of children) { if (child.typ != exports.EnumToken.NumberTokenType) { return null; } + // @ts-ignore values.push(getNumber(child)); } // @ts-ignore @@ -18064,7 +18216,6 @@ function parseMatrix(mat) { function matrix(values) { const matrix = identity(); if (values.length === 6) { - // matrix(scaleX(), skewY(), skewX(), scaleY(), translateX(), translateY()) matrix[0][0] = values[0]; matrix[0][1] = values[1]; matrix[1][0] = values[2]; @@ -18091,11 +18242,13 @@ function matrix(values) { matrix[3][3] = values[15]; } else { - throw new RangeError('expecting 6 or 16 values'); + return null; } return matrix; } function serialize(matrix) { + matrix = matrix.map(t => toZero(t.slice())); + // @ts-ignore if (eq(matrix, identity())) { return { typ: exports.EnumToken.IdenTokenType, @@ -18107,20 +18260,20 @@ function serialize(matrix) { return { typ: exports.EnumToken.FunctionTokenType, val: 'matrix', - chi: toZero([ + chi: [ matrix[0][0], matrix[0][1], matrix[1][0], matrix[1][1], matrix[3][0], matrix[3][1] - ]).reduce((acc, t) => { + ].reduce((acc, t) => { if (acc.length > 0) { acc.push({ typ: exports.EnumToken.CommaTokenType }); } acc.push({ typ: exports.EnumToken.NumberTokenType, - val: reduceNumber(t.toPrecision(6)) + val: reduceNumber(t) }); return acc; }, []) @@ -18135,7 +18288,7 @@ function serialize(matrix) { } acc.push({ typ: exports.EnumToken.NumberTokenType, - val: reduceNumber(round(curr)) + val: reduceNumber(curr) }); return acc; }, []) @@ -18144,7 +18297,7 @@ function serialize(matrix) { // translate → rotate → skew → scale function minify$1(matrix) { - const decomposed = /* is2DMatrix(matrix) ? decompose2(matrix) : */ decompose(matrix); + const decomposed = decompose(matrix); if (decomposed == null) { return null; } @@ -18400,7 +18553,7 @@ function minify$1(matrix) { }); } // identity - return result.length == 0 || eq(result, identity()) ? [ + return result.length == 0 || (result.length == 1 && eqMatrix(identity(), result)) ? [ { typ: exports.EnumToken.IdenTokenType, val: 'none' @@ -18411,17 +18564,14 @@ function eqMatrix(a, b) { let mat = identity(); let tmp = identity(); // @ts-ignore - const data = parseMatrix(a); - // console.error({data}); + const data = Array.isArray(a) ? a : parseMatrix(a); for (const transform of b) { tmp = computeMatrix([transform], identity()); - // console.error({transform: renderToken(transform), tmp}); if (tmp == null) { return false; } mat = multiply(mat, tmp); } - // console.error({mat}); if (mat == null) { return false; } @@ -18798,8 +18948,7 @@ class TransformCssFeature { for (; i < ast.chi.length; i++) { // @ts-ignore node = ast.chi[i]; - if (node.typ != exports.EnumToken.DeclarationNodeType || - (!node.nam.startsWith('--') && !node.nam.match(/^(-[a-z]+-)?transform$/))) { + if (node.typ != exports.EnumToken.DeclarationNodeType || !node.nam.match(/^(-[a-z]+-)?transform$/)) { continue; } const children = node.val.slice(); @@ -18815,13 +18964,6 @@ class TransformCssFeature { if (eqMatrix(matrix, minified)) { r.push(minified); } - // console.error(JSON.stringify({ - // matrix: renderToken(matrix), - // cumulative: cumulative.reduce((acc, curr) => acc + renderToken(curr), ''), - // minified: minified.reduce((acc, curr) => acc + renderToken(curr), ''), - // r: r[0].reduce((acc, curr) => acc + renderToken(curr), ''), - // all: r.map(r => r.reduce((acc, curr) => acc + renderToken(curr), '')) - // }, null, 1)); const l = renderToken(matrix).length; node.val = r.reduce((acc, curr) => { if (curr.reduce((acc, t) => acc + renderToken(t), '').length < l) { diff --git a/dist/index.d.ts b/dist/index.d.ts index d7b14abb..9448bfa3 100644 --- a/dist/index.d.ts +++ b/dist/index.d.ts @@ -887,7 +887,7 @@ export declare interface AstInvalidRule extends BaseToken { typ: EnumToken.InvalidRuleTokenType; sel: string; - chi: Array; + chi: Array; } export declare interface AstInvalidAtRule extends BaseToken { diff --git a/dist/lib/ast/features/prefix.js b/dist/lib/ast/features/prefix.js index 44b7a603..ee10db3f 100644 --- a/dist/lib/ast/features/prefix.js +++ b/dist/lib/ast/features/prefix.js @@ -43,9 +43,9 @@ class ComputePrefixFeature { for (const { value } of walkValues(node.val)) { if (value.typ == EnumToken.IdenTokenType && value.val.charAt(0) == '-' && value.val.charAt(1) != '-') { // @ts-ignore - const values = config.declarations[node.nam].ast.slice(); + const values = config.declarations[node.nam].ast?.slice?.(); const match = value.val.match(/^-(.*?)-(.*)$/); - if (match != null) { + if (values != null && match != null) { const val = matchToken({ ...value, val: match[2] }, values); if (val != null) { // @ts-ignore diff --git a/dist/lib/ast/features/transform.js b/dist/lib/ast/features/transform.js index b6a0d935..0e3630a1 100644 --- a/dist/lib/ast/features/transform.js +++ b/dist/lib/ast/features/transform.js @@ -30,8 +30,7 @@ class TransformCssFeature { for (; i < ast.chi.length; i++) { // @ts-ignore node = ast.chi[i]; - if (node.typ != EnumToken.DeclarationNodeType || - (!node.nam.startsWith('--') && !node.nam.match(/^(-[a-z]+-)?transform$/))) { + if (node.typ != EnumToken.DeclarationNodeType || !node.nam.match(/^(-[a-z]+-)?transform$/)) { continue; } const children = node.val.slice(); @@ -47,13 +46,6 @@ class TransformCssFeature { if (eqMatrix(matrix, minified)) { r.push(minified); } - // console.error(JSON.stringify({ - // matrix: renderToken(matrix), - // cumulative: cumulative.reduce((acc, curr) => acc + renderToken(curr), ''), - // minified: minified.reduce((acc, curr) => acc + renderToken(curr), ''), - // r: r[0].reduce((acc, curr) => acc + renderToken(curr), ''), - // all: r.map(r => r.reduce((acc, curr) => acc + renderToken(curr), '')) - // }, null, 1)); const l = renderToken(matrix).length; node.val = r.reduce((acc, curr) => { if (curr.reduce((acc, t) => acc + renderToken(t), '').length < l) { diff --git a/dist/lib/ast/math/math.js b/dist/lib/ast/math/math.js index 4d52a50c..464294a1 100644 --- a/dist/lib/ast/math/math.js +++ b/dist/lib/ast/math/math.js @@ -87,6 +87,7 @@ function compute(a, b, op) { r2 = l1.r.val * r1.l.val; break; } + // @ts-ignore const a2 = simplify(l2, r2); if (a2[1] == 1) { return a2[0]; diff --git a/dist/lib/ast/transform/matrix.js b/dist/lib/ast/transform/matrix.js index f275bb2a..02dc8e0c 100644 --- a/dist/lib/ast/transform/matrix.js +++ b/dist/lib/ast/transform/matrix.js @@ -1,4 +1,4 @@ -import { is2DMatrix, toZero, round, identity } from './utils.js'; +import { toZero, is2DMatrix, identity } from './utils.js'; import { EnumToken } from '../types.js'; import { reduceNumber } from '../../renderer/render.js'; import { eq } from '../../parser/utils/eq.js'; @@ -13,12 +13,13 @@ function parseMatrix(mat) { if (mat.typ == EnumToken.IdenTokenType) { return mat.val == 'none' ? identity() : null; } - const children = mat.chi.filter(t => t.typ == EnumToken.NumberTokenType || t.typ == EnumToken.IdenTokenType); + const children = mat.chi.filter((t) => t.typ == EnumToken.NumberTokenType || t.typ == EnumToken.IdenTokenType); const values = []; for (const child of children) { if (child.typ != EnumToken.NumberTokenType) { return null; } + // @ts-ignore values.push(getNumber(child)); } // @ts-ignore @@ -28,7 +29,6 @@ function parseMatrix(mat) { function matrix(values) { const matrix = identity(); if (values.length === 6) { - // matrix(scaleX(), skewY(), skewX(), scaleY(), translateX(), translateY()) matrix[0][0] = values[0]; matrix[0][1] = values[1]; matrix[1][0] = values[2]; @@ -55,11 +55,13 @@ function matrix(values) { matrix[3][3] = values[15]; } else { - throw new RangeError('expecting 6 or 16 values'); + return null; } return matrix; } function serialize(matrix) { + matrix = matrix.map(t => toZero(t.slice())); + // @ts-ignore if (eq(matrix, identity())) { return { typ: EnumToken.IdenTokenType, @@ -71,20 +73,20 @@ function serialize(matrix) { return { typ: EnumToken.FunctionTokenType, val: 'matrix', - chi: toZero([ + chi: [ matrix[0][0], matrix[0][1], matrix[1][0], matrix[1][1], matrix[3][0], matrix[3][1] - ]).reduce((acc, t) => { + ].reduce((acc, t) => { if (acc.length > 0) { acc.push({ typ: EnumToken.CommaTokenType }); } acc.push({ typ: EnumToken.NumberTokenType, - val: reduceNumber(t.toPrecision(6)) + val: reduceNumber(t) }); return acc; }, []) @@ -99,7 +101,7 @@ function serialize(matrix) { } acc.push({ typ: EnumToken.NumberTokenType, - val: reduceNumber(round(curr)) + val: reduceNumber(curr) }); return acc; }, []) diff --git a/dist/lib/ast/transform/minify.js b/dist/lib/ast/transform/minify.js index b84918f0..43693dfc 100644 --- a/dist/lib/ast/transform/minify.js +++ b/dist/lib/ast/transform/minify.js @@ -1,12 +1,11 @@ import { multiply, identity, epsilon, decompose, round, toZero } from './utils.js'; import { EnumToken } from '../types.js'; -import { eq } from '../../parser/utils/eq.js'; import { computeMatrix } from './compute.js'; import { parseMatrix } from './matrix.js'; // translate → rotate → skew → scale function minify(matrix) { - const decomposed = /* is2DMatrix(matrix) ? decompose2(matrix) : */ decompose(matrix); + const decomposed = decompose(matrix); if (decomposed == null) { return null; } @@ -262,7 +261,7 @@ function minify(matrix) { }); } // identity - return result.length == 0 || eq(result, identity()) ? [ + return result.length == 0 || (result.length == 1 && eqMatrix(identity(), result)) ? [ { typ: EnumToken.IdenTokenType, val: 'none' @@ -273,17 +272,14 @@ function eqMatrix(a, b) { let mat = identity(); let tmp = identity(); // @ts-ignore - const data = parseMatrix(a); - // console.error({data}); + const data = Array.isArray(a) ? a : parseMatrix(a); for (const transform of b) { tmp = computeMatrix([transform], identity()); - // console.error({transform: renderToken(transform), tmp}); if (tmp == null) { return false; } mat = multiply(mat, tmp); } - // console.error({mat}); if (mat == null) { return false; } diff --git a/dist/lib/ast/transform/utils.js b/dist/lib/ast/transform/utils.js index dae3be2f..3f53bfe7 100644 --- a/dist/lib/ast/transform/utils.js +++ b/dist/lib/ast/transform/utils.js @@ -171,7 +171,7 @@ function invertMatrix4(m) { } function toZero(v) { for (let i = 0; i < v.length; i++) { - if (Math.abs(v[i]) <= 1e-5) { + if (Math.abs(v[i]) <= epsilon) { v[i] = 0; } else { diff --git a/dist/lib/parser/parse.js b/dist/lib/parser/parse.js index 10fb612b..365a4010 100644 --- a/dist/lib/parser/parse.js +++ b/dist/lib/parser/parse.js @@ -56,7 +56,7 @@ async function doParse(iterator, options = {}) { removeCharset: true, removeEmpty: true, removeDuplicateDeclarations: true, - computeTransform: false, + computeTransform: true, computeShorthand: true, computeCalcExpression: true, inlineCssVariables: false, @@ -178,6 +178,7 @@ async function doParse(iterator, options = {}) { await parseNode(tokens, context, stats, options, errors, src, map, rawTokens); rawTokens.length = 0; if (context != null && context.typ == EnumToken.InvalidRuleTokenType) { + // @ts-ignore const index = context.chi.findIndex((node) => node == context); if (index > -1) { context.chi.splice(index, 1); @@ -555,7 +556,6 @@ async function parseNode(results, context, stats, options, errors, src, map, raw if (valid.valid != ValidationLevel.Valid) { const node = { typ: EnumToken.InvalidRuleTokenType, - // @ts-ignore sel: tokens.reduce((acc, curr) => acc + renderToken(curr, { minify: false }), ''), chi: [] }; @@ -802,8 +802,6 @@ function parseAtRulePrelude(tokens, atRule) { } } if (value.typ == EnumToken.ParensTokenType || (value.typ == EnumToken.FunctionTokenType && ['media', 'supports', 'style', 'scroll-state'].includes(value.val))) { - // @todo parse range and declarations - // parseDeclaration(parent.chi); let i; let nameIndex = -1; let valueIndex = -1; @@ -1367,7 +1365,7 @@ function parseTokens(tokens, options = {}) { upper++; } if (upper < t.chi.length && - t.chi[upper].typ == EnumToken.Iden && + t.chi[upper].typ == EnumToken.IdenTokenType && ['i', 's'].includes(t.chi[upper].val.toLowerCase())) { t.chi[m].attr = t.chi[upper].val; t.chi.splice(upper, 1); diff --git a/dist/lib/renderer/color/color.js b/dist/lib/renderer/color/color.js index 7f2ecadb..37781822 100644 --- a/dist/lib/renderer/color/color.js +++ b/dist/lib/renderer/color/color.js @@ -8,7 +8,7 @@ import { lch2hwb, lab2hwb, oklch2hwb, oklab2hwb, hsl2hwb, rgb2hwb } from './hwb. import { srgb2lab, oklch2lab, oklab2lab, lch2lab, hwb2lab, hsl2lab, rgb2lab, hex2lab } from './lab.js'; import { srgb2lch, oklch2lch, oklab2lch, lab2lch, hwb2lch, hsl2lch, rgb2lch, hex2lch } from './lch.js'; import { srgb2oklab, oklch2oklab, lch2oklab, lab2oklab, hwb2oklab, hsl2oklab, rgb2oklab, hex2oklab } from './oklab.js'; -import { lch2oklch, oklab2oklch, lab2oklch, hwb2oklch, hsl2oklch, rgb2oklch, hex2oklch, srgb2oklch } from './oklch.js'; +import { lch2oklch, oklab2oklch, lab2oklch, hwb2oklch, hsl2oklch, rgb2oklch, hex2oklch } from './oklch.js'; import { colorFuncColorSpace } from './utils/constants.js'; import { getComponents } from './utils/components.js'; import { xyz2srgb, lsrgb2srgbvalues, srgb2lsrgbvalues, lch2srgb, oklab2srgb, lab2srgb, hwb2srgb, hsl2srgb, rgb2srgb, hex2srgb } from './srgb.js'; @@ -33,10 +33,15 @@ function convert(token, to) { } let values = []; if (to == 'hsl') { + let t; switch (token.kin) { case 'rgb': case 'rgba': - values.push(...rgb2hsl(token)); + t = rgb2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hex': case 'lit': @@ -46,10 +51,18 @@ function convert(token, to) { values.push(...hwb2hsl(token)); break; case 'oklab': - values.push(...oklab2hsl(token)); + t = oklab2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2hsl(token)); + t = oklch2hsl(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': values.push(...lab2hsl(token)); @@ -98,28 +111,53 @@ function convert(token, to) { } } else if (to == 'rgb') { + let t; switch (token.kin) { case 'hex': case 'lit': values.push(...hex2rgb(token)); break; case 'hsl': - values.push(...hsl2rgb(token)); + t = hsl2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2rgb(token)); + t = hwb2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2rgb(token)); + t = oklab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - values.push(...oklch2rgb(token)); + t = oklch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2rgb(token)); + t = lab2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2rgb(token)); + t = lch2rgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': // @ts-ignore @@ -305,6 +343,7 @@ function convert(token, to) { } } else if (colorFuncColorSpace.includes(to)) { + let t; switch (token.kin) { case 'hex': case 'lit': @@ -312,30 +351,60 @@ function convert(token, to) { break; case 'rgb': case 'rgba': - values.push(...rgb2srgb(token)); + t = rgb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hsl': case 'hsla': - values.push(...hsl2srgb(token)); + t = hsl2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'hwb': - values.push(...hwb2srgb(token)); + t = hwb2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lab': - values.push(...lab2srgb(token)); + t = lab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklab': - values.push(...oklab2srgb(token)); + t = oklab2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'lch': - values.push(...lch2srgb(token)); + t = lch2srgb(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'oklch': - // @ts-ignore - values.push(...srgb2oklch(...color2srgbvalues(token))); + t = color2srgbvalues(token); + if (t == null) { + return null; + } + values.push(...t); break; case 'color': const val = color2srgbvalues(token); + if (val == null) { + return null; + } switch (to) { case 'srgb': values.push(...val); @@ -389,6 +458,9 @@ function minmax(value, min, max) { } function color2srgbvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } const colorSpace = components.shift(); let values = components.map((val) => getNumber(val)); switch (colorSpace.val) { diff --git a/dist/lib/renderer/color/colormix.js b/dist/lib/renderer/color/colormix.js index c9c66c64..2820cb4d 100644 --- a/dist/lib/renderer/color/colormix.js +++ b/dist/lib/renderer/color/colormix.js @@ -113,6 +113,9 @@ function colorMix(colorSpace, hueInterpolationMethod, color1, percentage1, color } const components1 = getComponents(color1); const components2 = getComponents(color2); + if (components1 == null || components2 == null) { + return null; + } if ((components1[3] != null && components1[3].typ == EnumToken.IdenTokenType && components1[3].val == 'none') && values2.length == 4) { values1[3] = values2[3]; } diff --git a/dist/lib/renderer/color/hex.js b/dist/lib/renderer/color/hex.js index c7c1f7ec..cd9fc4c2 100644 --- a/dist/lib/renderer/color/hex.js +++ b/dist/lib/renderer/color/hex.js @@ -67,25 +67,35 @@ function rgb2hex(token) { return value; } function hsl2hex(token) { - return `${hsl2rgb(token).reduce(toHexString, '#')}`; + const t = hsl2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function hwb2hex(token) { - return `${hwb2rgb(token).reduce(toHexString, '#')}`; + const t = hwb2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function cmyk2hex(token) { - return `#${cmyk2rgb(token).reduce(toHexString, '')}`; + const t = cmyk2rgb(token); + return t == null ? null : `#${t.reduce(toHexString, '')}`; } function oklab2hex(token) { - return `${oklab2rgb(token).reduce(toHexString, '#')}`; + const t = oklab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function oklch2hex(token) { - return `${oklch2rgb(token).reduce(toHexString, '#')}`; + const value = oklch2rgb(token); + if (value == null) { + return null; + } + return `${value.reduce(toHexString, '#')}`; } function lab2hex(token) { - return `${lab2rgb(token).reduce(toHexString, '#')}`; + const t = lab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function lch2hex(token) { - return `${lch2rgb(token).reduce(toHexString, '#')}`; + const t = lch2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } function srgb2hexvalues(r, g, b, alpha) { return [r, g, b].concat(alpha == null || alpha == 1 ? [] : [alpha]).reduce((acc, value) => acc + minmax(Math.round(255 * value), 0, 255).toString(16).padStart(2, '0'), '#'); diff --git a/dist/lib/renderer/color/hsl.js b/dist/lib/renderer/color/hsl.js index ae62079b..9a51e332 100644 --- a/dist/lib/renderer/color/hsl.js +++ b/dist/lib/renderer/color/hsl.js @@ -17,6 +17,9 @@ function hex2hsl(token) { } function rgb2hsl(token) { const chi = getComponents(token); + if (chi == null) { + return null; + } // @ts-ignore let t = chi[0]; // @ts-ignore @@ -75,12 +78,14 @@ function lch2hsl(token) { return rgb2hslvalues(...lch2rgb(token)); } function oklab2hsl(token) { + const t = oklab2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklab2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function oklch2hsl(token) { + const t = oklch2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklch2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } function rgb2hslvalues(r, g, b, a = null) { return srgb2hsl(r / 255, g / 255, b / 255, a); diff --git a/dist/lib/renderer/color/lab.js b/dist/lib/renderer/color/lab.js index f88f5f9e..afd081e6 100644 --- a/dist/lib/renderer/color/lab.js +++ b/dist/lib/renderer/color/lab.js @@ -87,6 +87,9 @@ function lch2labvalues(l, c, h, a = null) { } function getLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { return []; diff --git a/dist/lib/renderer/color/lch.js b/dist/lib/renderer/color/lch.js index 79d5b2cb..033c600a 100644 --- a/dist/lib/renderer/color/lch.js +++ b/dist/lib/renderer/color/lch.js @@ -59,9 +59,12 @@ function xyz2lchvalues(x, y, z, alpha) { } function getLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore diff --git a/dist/lib/renderer/color/oklab.js b/dist/lib/renderer/color/oklab.js index 6732384d..a7d7d17e 100644 --- a/dist/lib/renderer/color/oklab.js +++ b/dist/lib/renderer/color/oklab.js @@ -52,9 +52,12 @@ function srgb2oklab(r, g, blue, alpha) { } function getOKLABComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } // @ts-ignore diff --git a/dist/lib/renderer/color/oklch.js b/dist/lib/renderer/color/oklch.js index 58a36e58..ad5d87f9 100644 --- a/dist/lib/renderer/color/oklch.js +++ b/dist/lib/renderer/color/oklch.js @@ -44,6 +44,9 @@ function srgb2oklch(r, g, blue, alpha) { } function getOKLCHComponents(token) { const components = getComponents(token); + if (components == null) { + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { return []; diff --git a/dist/lib/renderer/color/relativecolor.js b/dist/lib/renderer/color/relativecolor.js index b35f2de9..dfe52c54 100644 --- a/dist/lib/renderer/color/relativecolor.js +++ b/dist/lib/renderer/color/relativecolor.js @@ -98,19 +98,23 @@ function computeComponentValue(expr, converted, values) { } else if ([EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == EnumToken.IdenTokenType && exp.val in values) { + // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: EnumToken.NumberTokenType, + // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { + // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { + // @ts-ignore parent = exp; } if (value.typ == EnumToken.PercentageTokenType) { diff --git a/dist/lib/renderer/color/rgb.js b/dist/lib/renderer/color/rgb.js index 5ca886db..94a8db33 100644 --- a/dist/lib/renderer/color/rgb.js +++ b/dist/lib/renderer/color/rgb.js @@ -21,26 +21,29 @@ function hex2rgb(token) { return rgb; } function hwb2rgb(token) { - return hwb2srgb(token).map(srgb2rgb); + return hwb2srgb(token)?.map?.(srgb2rgb) ?? null; } function hsl2rgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a).map((t) => minmax(Math.round(t * 255), 0, 255)); } function cmyk2rgb(token) { - return cmyk2srgb(token).map(srgb2rgb); + return cmyk2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklab2rgb(token) { - return oklab2srgb(token).map(srgb2rgb); + return oklab2srgb(token)?.map?.(srgb2rgb) ?? null; } function oklch2rgb(token) { - return oklch2srgb(token).map(srgb2rgb); + return oklch2srgb(token)?.map?.(srgb2rgb) ?? null; } function lab2rgb(token) { - return lab2srgb(token).map(srgb2rgb); + return lab2srgb(token)?.map?.(srgb2rgb) ?? null; } function lch2rgb(token) { - return lch2srgb(token).map(srgb2rgb); + return lch2srgb(token)?.map?.(srgb2rgb) ?? null; } export { cmyk2rgb, hex2rgb, hsl2rgb, hwb2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb, srgb2rgb }; diff --git a/dist/lib/renderer/color/srgb.js b/dist/lib/renderer/color/srgb.js index b0c96e75..4f7dedde 100644 --- a/dist/lib/renderer/color/srgb.js +++ b/dist/lib/renderer/color/srgb.js @@ -44,7 +44,7 @@ function srgbvalues(token) { return null; } function rgb2srgb(token) { - return getComponents(token).map((t, index) => index == 3 ? ((t.typ == EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255); + return getComponents(token)?.map?.((t, index) => index == 3 ? ((t.typ == EnumToken.IdenTokenType && t.val == 'none') ? 1 : getNumber(t)) : (t.typ == EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255) ?? null; } function hex2srgb(token) { const value = expandHexValue(token.kin == 'lit' ? COLORS_NAMES[token.val.toLowerCase()] : token.val); @@ -59,7 +59,10 @@ function xyz2srgb(x, y, z) { return lsrgb2srgbvalues(...XYZ_to_lin_sRGB(x, y, z)); } function hwb2srgb(token) { - const { h: hue, s: white, l: black, a: alpha } = hslvalues(token); + const { h: hue, s: white, l: black, a: alpha } = hslvalues(token) ?? {}; + if (hue == null || white == null || black == null) { + return []; + } const rgb = hsl2srgbvalues(hue, 1, .5); for (let i = 0; i < 3; i++) { rgb[i] *= (1 - white - black); @@ -71,11 +74,17 @@ function hwb2srgb(token) { return rgb; } function hsl2srgb(token) { - let { h, s, l, a } = hslvalues(token); + let { h, s, l, a } = hslvalues(token) ?? {}; + if (h == null || s == null || l == null) { + return null; + } return hsl2srgbvalues(h, s, l, a); } function cmyk2srgb(token) { const components = getComponents(token); + if (components == null) { + return null; + } // @ts-ignore let t = components[0]; // @ts-ignore @@ -107,7 +116,10 @@ function cmyk2srgb(token) { return rgb; } function oklab2srgb(token) { - const [l, a, b, alpha] = getOKLABComponents(token); + const [l, a, b, alpha] = getOKLABComponents(token) ?? []; + if (l == null || a == null || b == null) { + return null; + } const rgb = OKLab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -115,7 +127,7 @@ function oklab2srgb(token) { return rgb; } function oklch2srgb(token) { - const [l, c, h, alpha] = getOKLCHComponents(token) ?? {}; + const [l, c, h, alpha] = getOKLCHComponents(token) ?? []; if (l == null || c == null || h == null) { return null; } @@ -128,6 +140,9 @@ function oklch2srgb(token) { } function hslvalues(token) { const components = getComponents(token); + if (components == null) { + return null; + } let t; // @ts-ignore let h = getAngle(components[0]); @@ -203,6 +218,9 @@ function hsl2srgbvalues(h, s, l, a = null) { } function lab2srgb(token) { const [l, a, b, alpha] = getLABComponents(token); + if (l == null || a == null || b == null) { + return null; + } const rgb = Lab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { rgb.push(alpha); @@ -212,6 +230,9 @@ function lab2srgb(token) { function lch2srgb(token) { // @ts-ignore const [l, a, b, alpha] = lch2labvalues(...getLCHComponents(token)); + if (l == null || a == null || b == null) { + return null; + } // https://www.w3.org/TR/css-color-4/#lab-to-lch const rgb = Lab_to_sRGB(l, a, b); if (alpha != 1) { diff --git a/dist/lib/renderer/color/utils/components.js b/dist/lib/renderer/color/utils/components.js index ae522d1e..3c2b1ea9 100644 --- a/dist/lib/renderer/color/utils/components.js +++ b/dist/lib/renderer/color/utils/components.js @@ -15,8 +15,19 @@ function getComponents(token) { return { typ: EnumToken.Number, val: parseInt(t, 16).toString() }; }); } - return token.chi - .filter((t) => ![EnumToken.LiteralTokenType, EnumToken.CommentTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType].includes(t.typ)); + const result = []; + for (const child of (token.chi)) { + if ([ + EnumToken.LiteralTokenType, EnumToken.CommentTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType + ].includes(child.typ)) { + continue; + } + if (child.typ == EnumToken.ColorTokenType && child.val == 'currentcolor') { + return null; + } + result.push(child); + } + return result; } export { getComponents }; diff --git a/dist/lib/renderer/render.js b/dist/lib/renderer/render.js index e1a488be..32599e33 100644 --- a/dist/lib/renderer/render.js +++ b/dist/lib/renderer/render.js @@ -259,20 +259,25 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (isColor(token)) { // @ts-ignore token.typ = EnumToken.ColorTokenType; + // @ts-ignore if (token.chi[0].typ == EnumToken.IdenTokenType && token.chi[0].val == 'from') { // @ts-ignore token.cal = 'rel'; } - else if (token.val == 'color-mix' && token.chi[0].typ == EnumToken.IdenTokenType && token.chi[0].val == 'in') { - // @ts-ignore - token.cal = 'mix'; - } - else { - if (token.val == 'color') { + else { // @ts-ignore + if (token.val == 'color-mix' && token.chi[0].typ == EnumToken.IdenTokenType && token.chi[0].val == 'in') { + // @ts-ignore + token.cal = 'mix'; + } + else { // @ts-ignore - token.cal = 'col'; + if (token.val == 'color') { + // @ts-ignore + token.cal = 'col'; + } + // @ts-ignore + token.chi = token.chi.filter((t) => ![EnumToken.WhitespaceTokenType, EnumToken.CommaTokenType, EnumToken.CommentTokenType].includes(t.typ)); } - token.chi = token.chi.filter((t) => ![EnumToken.WhitespaceTokenType, EnumToken.CommaTokenType, EnumToken.CommentTokenType].includes(t.typ)); } } } @@ -352,12 +357,14 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (token.cal == 'rel' && ['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch', 'color'].includes(token.val)) { const chi = getComponents(token); const offset = token.val == 'color' ? 2 : 1; - // @ts-ignore - const color = chi[1]; - const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); - if (components != null) { - token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; - delete token.cal; + if (chi != null) { + // @ts-ignore + const color = chi[1]; + const components = parseRelativeColor(token.val == 'color' ? chi[offset].val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); + if (components != null) { + token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; + delete token.cal; + } } } if (token.val == 'color') { @@ -420,7 +427,7 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, else if (token.val == 'lch') { value = lch2hex(token); } - if (value !== '') { + if (value !== '' && value != null) { return reduceHexValue(value); } } @@ -446,15 +453,6 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, token.chi[0].val?.typ != EnumToken.FractionTokenType) { return token.chi.reduce((acc, curr) => acc + renderToken(curr, options, cache, reducer), ''); } - // if (token.typ == EnumToken.FunctionTokenType && transformFunctions.includes(token.val)) { - // - // const children = token.val.startsWith('matrix') ? null : stripCommaToken(token.chi.slice()) as Token[]; - // - // if (children != null) { - // - // return token.val + '(' + children.reduce((acc: string, curr: Token) => acc + (acc.length > 0 ? ' ' : '') + renderToken(curr, options, cache, reducer), '') + ')'; - // } - // } // @ts-ignore return ( /* options.minify && 'Pseudo-class-func' == token.typ && token.val.slice(0, 2) == '::' ? token.val.slice(1) :*/token.val ?? '') + '(' + token.chi.reduce(reducer, '') + ')'; case EnumToken.MatchExpressionTokenType: @@ -619,7 +617,11 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (!('original' in token)) { // do not modify original token token = { ...token }; - Object.defineProperty(token, 'original', { enumerable: false, writable: false, value: token.val }); + Object.defineProperty(token, 'original', { + enumerable: false, + writable: false, + value: token.val + }); } // @ts-ignore if (!(token.original in cache)) { diff --git a/dist/lib/syntax/syntax.js b/dist/lib/syntax/syntax.js index a5116c01..9cae0b31 100644 --- a/dist/lib/syntax/syntax.js +++ b/dist/lib/syntax/syntax.js @@ -418,7 +418,9 @@ function isColor(token) { } let isLegacySyntax = false; if (token.typ == EnumToken.FunctionTokenType && token.chi.length > 0 && colorsFunc.includes(token.val)) { + // @ts-ignore if (token.val == 'light-dark') { + // @ts-ignore const children = token.chi.filter((t) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); if (children.length != 2) { return false; @@ -427,7 +429,9 @@ function isColor(token) { return true; } } + // @ts-ignore if (token.val == 'color') { + // @ts-ignore const children = token.chi.filter((t) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); const isRelative = children[0].typ == EnumToken.IdenTokenType && children[0].val == 'from'; if (children.length < 4 || children.length > 8) { @@ -476,73 +480,79 @@ function isColor(token) { } return true; } - else if (token.val == 'color-mix') { - const children = token.chi.reduce((acc, t) => { - if (t.typ == EnumToken.CommaTokenType) { - acc.push([]); - } - else { - if (![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)) { - acc[acc.length - 1].push(t); + else { // @ts-ignore + if (token.val == 'color-mix') { + // @ts-ignore + const children = token.chi.reduce((acc, t) => { + if (t.typ == EnumToken.CommaTokenType) { + acc.push([]); } - } - return acc; - }, [[]]); - if (children.length == 3) { - if (children[0].length > 3 || - children[0][0].typ != EnumToken.IdenTokenType || - children[0][0].val != 'in' || - !isColorspace(children[0][1]) || - (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || - children[1].length > 2 || - children[1][0].typ != EnumToken.ColorTokenType || - children[2].length > 2 || - children[2][0].typ != EnumToken.ColorTokenType) { - return false; - } - if (children[1].length == 2) { - if (!(children[1][1].typ == EnumToken.PercentageTokenType || (children[1][1].typ == EnumToken.NumberTokenType && children[1][1].val == '0'))) { - return false; + else { + if (![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)) { + acc[acc.length - 1].push(t); + } } - } - if (children[2].length == 2) { - if (!(children[2][1].typ == EnumToken.PercentageTokenType || (children[2][1].typ == EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return acc; + }, [[]]); + if (children.length == 3) { + if (children[0].length > 3 || + children[0][0].typ != EnumToken.IdenTokenType || + children[0][0].val != 'in' || + !isColorspace(children[0][1]) || + (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || + children[1].length > 2 || + children[1][0].typ != EnumToken.ColorTokenType || + children[2].length > 2 || + children[2][0].typ != EnumToken.ColorTokenType) { return false; } + if (children[1].length == 2) { + if (!(children[1][1].typ == EnumToken.PercentageTokenType || (children[1][1].typ == EnumToken.NumberTokenType && children[1][1].val == '0'))) { + return false; + } + } + if (children[2].length == 2) { + if (!(children[2][1].typ == EnumToken.PercentageTokenType || (children[2][1].typ == EnumToken.NumberTokenType && children[2][1].val == '0'))) { + return false; + } + } + return true; } - return true; - } - return false; - } - else { - const keywords = ['from', 'none']; - if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { - keywords.push('alpha', ...token.val.slice(-3).split('')); + return false; } - // @ts-ignore - for (const v of token.chi) { - if (v.typ == EnumToken.CommaTokenType) { - isLegacySyntax = true; + else { + const keywords = ['from', 'none']; + // @ts-ignore + if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { + // @ts-ignore + keywords.push('alpha', ...token.val.slice(-3).split('')); } - if (v.typ == EnumToken.IdenTokenType) { - if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { - return false; + // @ts-ignore + for (const v of token.chi) { + if (v.typ == EnumToken.CommaTokenType) { + isLegacySyntax = true; } - if (keywords.includes(v.val)) { - if (isLegacySyntax) { + if (v.typ == EnumToken.IdenTokenType) { + if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { return false; } - if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { - return false; + if (keywords.includes(v.val)) { + if (isLegacySyntax) { + return false; + } + // @ts-ignore + if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { + return false; + } } + continue; + } + if (v.typ == EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { + continue; + } + if (![EnumToken.ColorTokenType, EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.LiteralTokenType].includes(v.typ)) { + return false; } - continue; - } - if (v.typ == EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { - continue; - } - if (![EnumToken.ColorTokenType, EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.LiteralTokenType].includes(v.typ)) { - return false; } } } diff --git a/dist/lib/validation/at-rules/container.js b/dist/lib/validation/at-rules/container.js index 48d06d97..1c129d1b 100644 --- a/dist/lib/validation/at-rules/container.js +++ b/dist/lib/validation/at-rules/container.js @@ -95,10 +95,10 @@ function validateAtRuleContainerQueryList(tokens, atRule) { break; } token = queries[0]; - if (token.typ == EnumToken.MediaFeatureNotTokenType) { + if (token?.typ == EnumToken.MediaFeatureNotTokenType) { token = token.val; } - if (token.typ != EnumToken.ParensTokenType && (token.typ != EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { + if (token?.typ != EnumToken.ParensTokenType && (token?.typ != EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { return { valid: ValidationLevel.Drop, matches: [], @@ -108,7 +108,7 @@ function validateAtRuleContainerQueryList(tokens, atRule) { tokens }; } - if (token.typ == EnumToken.ParensTokenType) { + if (token?.typ == EnumToken.ParensTokenType) { result = validateContainerSizeFeature(token.chi, atRule); } else if (token.val == 'scroll-state') { @@ -126,7 +126,7 @@ function validateAtRuleContainerQueryList(tokens, atRule) { break; } token = queries[0]; - if (token.typ != EnumToken.MediaFeatureAndTokenType && token.typ != EnumToken.MediaFeatureOrTokenType) { + if (token?.typ != EnumToken.MediaFeatureAndTokenType && token?.typ != EnumToken.MediaFeatureOrTokenType) { return { valid: ValidationLevel.Drop, matches: [], @@ -137,9 +137,9 @@ function validateAtRuleContainerQueryList(tokens, atRule) { }; } if (tokenType == null) { - tokenType = token.typ; + tokenType = token?.typ; } - if (tokenType != token.typ) { + if (tokenType == null || tokenType != token?.typ) { return { valid: ValidationLevel.Drop, matches: [], diff --git a/dist/lib/validation/at-rules/document.js b/dist/lib/validation/at-rules/document.js index 4cd2b0bc..01374602 100644 --- a/dist/lib/validation/at-rules/document.js +++ b/dist/lib/validation/at-rules/document.js @@ -59,7 +59,7 @@ function validateAtRuleDocument(atRule, options, root) { } if (t[0].typ == EnumToken.UrlFunctionTokenType) { result = validateURL(t[0]); - if (result.valid == ValidationLevel.Drop) { + if (result?.valid == ValidationLevel.Drop) { return result; } continue; diff --git a/dist/lib/validation/at-rules/media.js b/dist/lib/validation/at-rules/media.js index 84cdb84b..32e6f8fe 100644 --- a/dist/lib/validation/at-rules/media.js +++ b/dist/lib/validation/at-rules/media.js @@ -241,7 +241,6 @@ function validateMediaCondition(token, atRule) { if (chi[0].typ == EnumToken.MediaQueryConditionTokenType) { return chi[0].l.typ == EnumToken.IdenTokenType; } - // console.error(chi[0].parent); return false; } function validateMediaFeature(token) { diff --git a/src/@types/ast.d.ts b/src/@types/ast.d.ts index 6fe5208b..a71d91c4 100644 --- a/src/@types/ast.d.ts +++ b/src/@types/ast.d.ts @@ -1,4 +1,4 @@ -import {EnumToken} from "../lib"; +import {EnumToken} from "../lib/index.ts"; import {Token} from "./token.d.ts"; export declare interface Position { @@ -49,7 +49,7 @@ export declare interface AstInvalidRule extends BaseToken { typ: EnumToken.InvalidRuleTokenType; sel: string; - chi: Array; + chi: Array; } export declare interface AstInvalidAtRule extends BaseToken { diff --git a/src/@types/index.d.ts b/src/@types/index.d.ts index fd001d61..04ab3f3c 100644 --- a/src/@types/index.d.ts +++ b/src/@types/index.d.ts @@ -2,7 +2,7 @@ import {VisitorNodeMap} from "./visitor.d.ts"; import {AstAtRule, AstDeclaration, AstRule, AstRuleStyleSheet, Position} from "./ast.d.ts"; import {SourceMap} from "../lib/renderer/sourcemap/index.ts"; import {PropertyListOptions} from "./parse.d.ts"; -import {EnumToken} from "../lib"; +import {EnumToken} from "../lib/index.ts"; export * from './ast.d.ts'; export * from './token.d.ts'; diff --git a/src/@types/shorthand.d.ts b/src/@types/shorthand.d.ts index aeac6add..09422e0a 100644 --- a/src/@types/shorthand.d.ts +++ b/src/@types/shorthand.d.ts @@ -1,4 +1,4 @@ -import {EnumToken} from "../lib"; +import {EnumToken} from "../lib/index.ts"; export interface PropertyType { diff --git a/src/@types/token.d.ts b/src/@types/token.d.ts index 0e686128..5e3d5d2c 100644 --- a/src/@types/token.d.ts +++ b/src/@types/token.d.ts @@ -1,5 +1,5 @@ import type {AstDeclaration, BaseToken} from "./ast.d.ts"; -import {EnumToken} from "../lib"; +import {EnumToken} from "../lib/index.ts"; export declare interface LiteralToken extends BaseToken { diff --git a/src/@types/visitor.d.ts b/src/@types/visitor.d.ts index bbf3d586..211de110 100644 --- a/src/@types/visitor.d.ts +++ b/src/@types/visitor.d.ts @@ -1,6 +1,6 @@ -import {AstRule, AstDeclaration, AstAtRule} from "./ast.d.ts"; +import {AstAtRule, AstDeclaration, AstRule} from "./ast.d.ts"; import {Token} from "./token.d.ts"; -import {EnumToken} from "../lib"; +import {EnumToken} from "../lib/index.ts"; /** * Declaration visitor handler diff --git a/src/lib/ast/expand.ts b/src/lib/ast/expand.ts index 7ee0ddd4..015afde4 100644 --- a/src/lib/ast/expand.ts +++ b/src/lib/ast/expand.ts @@ -2,7 +2,15 @@ import {combinators, splitRule} from "./minify.ts"; import {parseString} from "../parser/index.ts"; import {walkValues} from "./walk.ts"; import {renderToken} from "../renderer/index.ts"; -import type {AstAtRule, AstNode, AstRule, AstRuleStyleSheet, Token} from "../../@types/index.d.ts"; +import type { + AstAtRule, + AstNode, + AstRule, + AstRuleStyleSheet, + IdentToken, + LiteralToken, + Token +} from "../../@types/index.d.ts"; import {EnumToken} from "./types.ts"; /** @@ -311,7 +319,7 @@ export function replaceCompound(input: string, replace: string): string { if (t.value.typ == EnumToken.LiteralTokenType) { - if (t.value.val == '&') { + if ((t.value as LiteralToken).val == '&') { if (tokens.length == 2) { @@ -323,11 +331,11 @@ export function replaceCompound(input: string, replace: string): string { if (tokens[1].typ == EnumToken.IdenTokenType) { - t.value.val = (replacement as Token[]).length == 1 || (!replace.includes(' ') && replace.charAt(0).match(/[:.]/)) ? tokens[1].val + replace : replaceCompoundLiteral(tokens[1].val + '&', replace); + (t.value as LiteralToken).val = (replacement as Token[]).length == 1 || (!replace.includes(' ') && replace.charAt(0).match(/[:.]/)) ? (tokens[1] as IdentToken).val + replace : replaceCompoundLiteral((tokens[1] as IdentToken).val + '&', replace); tokens.splice(1, 1); } else { - t.value.val = replaceCompoundLiteral(t.value.val, replace); + (t.value as LiteralToken).val = replaceCompoundLiteral((t.value as LiteralToken).val, replace); } continue; @@ -335,15 +343,15 @@ export function replaceCompound(input: string, replace: string): string { const rule: string[][] = splitRule(replace); - t.value.val = rule.length > 1 ? ':is(' + replace + ')' : replace; - } else if (t.value.val.length > 1 && t.value.val.charAt(0) == '&') { + (t.value as LiteralToken).val = rule.length > 1 ? ':is(' + replace + ')' : replace; + } else if ((t.value as LiteralToken).val.length > 1 && (t.value as LiteralToken).val.charAt(0) == '&') { - t.value.val = replaceCompoundLiteral(t.value.val, replace); + (t.value as LiteralToken).val = replaceCompoundLiteral((t.value as LiteralToken).val, replace); } } } - return tokens.reduce((acc, curr) => acc + renderToken(curr), ''); + return tokens.reduce((acc: string, curr: Token) => acc + renderToken(curr), ''); } function replaceCompoundLiteral(selector: string, replace: string) { diff --git a/src/lib/ast/features/calc.ts b/src/lib/ast/features/calc.ts index 1caf04cd..10f39c8c 100644 --- a/src/lib/ast/features/calc.ts +++ b/src/lib/ast/features/calc.ts @@ -11,7 +11,7 @@ import type { Token, WalkerOption } from "../../../@types/index.d.ts"; -import {EnumToken} from "../types"; +import {EnumToken} from "../types.ts"; import {WalkerValueEvent, walkValues} from "../walk.ts"; import {evaluate} from "../math/index.ts"; import {renderToken} from "../../renderer/index.ts"; diff --git a/src/lib/ast/features/inlinecssvariables.ts b/src/lib/ast/features/inlinecssvariables.ts index 8a844736..8defec1e 100644 --- a/src/lib/ast/features/inlinecssvariables.ts +++ b/src/lib/ast/features/inlinecssvariables.ts @@ -12,7 +12,7 @@ import type { ParserOptions, Token, VariableScopeInfo -} from "../../../@types/index"; +} from "../../../@types/index.d.ts"; import {EnumToken} from "../types.ts"; import {walkValues} from "../walk.ts"; import {renderToken} from "../../renderer/index.ts"; diff --git a/src/lib/ast/features/prefix.ts b/src/lib/ast/features/prefix.ts index c01c2fe2..79b146f5 100644 --- a/src/lib/ast/features/prefix.ts +++ b/src/lib/ast/features/prefix.ts @@ -1,5 +1,12 @@ -import {EnumToken} from "../types"; -import type {AstAtRule, AstDeclaration, AstRule, MinifyFeatureOptions, Token} from "../../../@types"; +import {EnumToken} from "../types.ts"; +import type { + AstAtRule, + AstDeclaration, + AstRule, + IdentToken, + MinifyFeatureOptions, + Token +} from "../../../@types/index.d.ts"; import { getSyntaxConfig, ValidationAmpersandToken, @@ -10,8 +17,8 @@ import { ValidationPropertyToken, ValidationToken, ValidationTokenEnum -} from '../../validation' -import {walkValues} from "../walk"; +} from '../../validation/index.ts' +import {walkValues} from "../walk.ts"; const config = getSyntaxConfig(); @@ -62,13 +69,13 @@ export class ComputePrefixFeature { for (const {value} of walkValues((node).val)) { - if (value.typ == EnumToken.IdenTokenType && value.val.charAt(0) == '-' && value.val.charAt(1) != '-') { + if (value.typ == EnumToken.IdenTokenType && (value as IdentToken).val.charAt(0) == '-' && (value as IdentToken).val.charAt(1) != '-') { // @ts-ignore - const values: ValidationToken[] = config.declarations[(node).nam].ast.slice() as ValidationToken[]; - const match = value.val.match(/^-(.*?)-(.*)$/); + const values: ValidationToken[] = config.declarations[(node).nam].ast?.slice?.() as ValidationToken[]; + const match = (value as IdentToken).val.match(/^-(.*?)-(.*)$/); - if (match != null) { + if (values != null && match != null) { const val = matchToken({...value, val: match[2]} as Token, values); @@ -102,7 +109,7 @@ function matchToken(token: Token, matches: ValidationToken[]): null | Token { case ValidationTokenEnum.Keyword: - if (token.typ == EnumToken.IdenTokenType && token.val == (matches[i] as ValidationKeywordToken).val) { + if (token.typ == EnumToken.IdenTokenType && (token as IdentToken).val == (matches[i] as ValidationKeywordToken).val) { return token; } @@ -113,7 +120,7 @@ function matchToken(token: Token, matches: ValidationToken[]): null | Token { if (['ident', 'custom-ident'].includes((matches[i] as ValidationPropertyToken).val)) { - if (token.typ == EnumToken.IdenTokenType && token.val == (matches[i] as ValidationPropertyToken).val) { + if (token.typ == EnumToken.IdenTokenType && (token as IdentToken).val == (matches[i] as ValidationPropertyToken).val) { return token; } diff --git a/src/lib/ast/features/transform.ts b/src/lib/ast/features/transform.ts index ad1edd4e..94d11189 100644 --- a/src/lib/ast/features/transform.ts +++ b/src/lib/ast/features/transform.ts @@ -7,10 +7,10 @@ import type { MinifyFeatureOptions, Token } from "../../../@types/index.d.ts"; -import {EnumToken} from "../types"; -import {consumeWhitespace} from "../../validation/utils"; +import {EnumToken} from "../types.ts"; +import {consumeWhitespace} from "../../validation/utils/index.ts"; import {compute} from "../transform/compute.ts"; -import {filterValues, renderToken} from "../../renderer"; +import {filterValues, renderToken} from "../../renderer/index.ts"; import {eqMatrix} from "../transform/minify.ts"; export class TransformCssFeature { @@ -46,8 +46,7 @@ export class TransformCssFeature { node = ast.chi[i] as AstNode | AstDeclaration; if ( - node.typ != EnumToken.DeclarationNodeType || - (!node.nam.startsWith('--') && !node.nam.match(/^(-[a-z]+-)?transform$/))) { + node.typ != EnumToken.DeclarationNodeType || !(node as AstDeclaration).nam.match(/^(-[a-z]+-)?transform$/)) { continue; } @@ -63,7 +62,7 @@ export class TransformCssFeature { return; } - let r : Token[][] = [filterValues((node as AstDeclaration).val.slice())]; + let r: Token[][] = [filterValues((node as AstDeclaration).val.slice())]; if (eqMatrix(matrix as FunctionToken, cumulative)) { @@ -75,19 +74,11 @@ export class TransformCssFeature { r.push(minified); } - // console.error(JSON.stringify({ - // matrix: renderToken(matrix), - // cumulative: cumulative.reduce((acc, curr) => acc + renderToken(curr), ''), - // minified: minified.reduce((acc, curr) => acc + renderToken(curr), ''), - // r: r[0].reduce((acc, curr) => acc + renderToken(curr), ''), - // all: r.map(r => r.reduce((acc, curr) => acc + renderToken(curr), '')) - // }, null, 1)); + const l: number = renderToken(matrix).length; - const l = renderToken(matrix).length; + (node as AstDeclaration).val = r.reduce((acc: Token[], curr: Token[]): Token[] => { - (node as AstDeclaration).val = r.reduce((acc, curr) => { - - if (curr.reduce((acc, t) => acc + renderToken(t), '').length < l) { + if (curr.reduce((acc: string, t: Token) => acc + renderToken(t), '').length < l) { return curr; } diff --git a/src/lib/ast/math/expression.ts b/src/lib/ast/math/expression.ts index d8185e4d..08d6d5ab 100644 --- a/src/lib/ast/math/expression.ts +++ b/src/lib/ast/math/expression.ts @@ -594,7 +594,7 @@ function inlineExpression(token: Token): Token[] { const result: Token[] = []; - if (token.typ == EnumToken.ParensTokenType && token.chi.length == 1) { + if (token.typ == EnumToken.ParensTokenType && (token as ParensToken).chi.length == 1) { result.push((token as ParensToken).chi[0]); } else if (token.typ == EnumToken.BinaryExpressionTokenType) { diff --git a/src/lib/ast/math/math.ts b/src/lib/ast/math/math.ts index 953df7d3..74f47bd3 100644 --- a/src/lib/ast/math/math.ts +++ b/src/lib/ast/math/math.ts @@ -1,6 +1,6 @@ import type {FractionToken} from "../../../@types/index.d.ts"; -import {EnumToken} from "../types"; -import {reduceNumber} from "../../renderer"; +import {EnumToken} from "../types.ts"; +import {reduceNumber} from "../../renderer/index.ts"; export function gcd (x: number, y: number): number { @@ -130,6 +130,7 @@ export function compute(a: number | FractionToken, b: number | FractionToken, op } + // @ts-ignore const a2: [number, number] = simplify(l2 as number, r2 as number); if (a2[1] == 1) { diff --git a/src/lib/ast/minify.ts b/src/lib/ast/minify.ts index 4f6e9dc9..64cffa0b 100644 --- a/src/lib/ast/minify.ts +++ b/src/lib/ast/minify.ts @@ -171,21 +171,21 @@ export function minify(ast: AstNode, options: ParserOptions | MinifyFeatureOptio let k: number; - for (k = 0; k < node.chi.length; k++) { + for (k = 0; k < (node as AstKeyFrameRule).chi.length; k++) { - if (node.chi[k].typ == EnumToken.DeclarationNodeType) { + if ((node as AstKeyFrameRule).chi[k].typ == EnumToken.DeclarationNodeType) { - let l: number = (node.chi[k] as AstDeclaration).val.length; + let l: number = ((node as AstKeyFrameRule).chi[k] as AstDeclaration).val.length; while (l--) { - if ((node.chi[k] as AstDeclaration).val[l].typ == EnumToken.ImportantTokenType) { + if (((node as AstKeyFrameRule).chi[k] as AstDeclaration).val[l].typ == EnumToken.ImportantTokenType) { - node.chi.splice(k--, 1); + (node as AstKeyFrameRule).chi.splice(k--, 1); break; } - if ([EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes((node.chi[k] as AstDeclaration).val[l].typ)) { + if ([EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(((node as AstKeyFrameRule).chi[k] as AstDeclaration).val[l].typ)) { continue; } @@ -1167,9 +1167,9 @@ function fixSelector(node: AstRule) { while (i--) { - if (attr.value.chi[i].typ == EnumToken.LiteralTokenType && (attr.value.chi[i]).val == '&') { + if ((attr.value as PseudoClassFunctionToken).chi[i].typ == EnumToken.LiteralTokenType && ((attr.value as PseudoClassFunctionToken).chi[i]).val == '&') { - attr.value.chi.splice(i, 1); + (attr.value as PseudoClassFunctionToken).chi.splice(i, 1); } } } diff --git a/src/lib/ast/transform/compute.ts b/src/lib/ast/transform/compute.ts index c69b2300..ae52361f 100644 --- a/src/lib/ast/transform/compute.ts +++ b/src/lib/ast/transform/compute.ts @@ -3,9 +3,9 @@ import {identity, Matrix, multiply} from "./utils.ts"; import {EnumToken} from "../types.ts"; import {length2Px} from "./convert.ts"; import {transformFunctions} from "../../syntax/index.ts"; -import {stripCommaToken} from "../../validation/utils"; +import {stripCommaToken} from "../../validation/utils/index.ts"; import {translate, translate3d, translateX, translateY, translateZ} from "./translate.ts"; -import {getAngle, getNumber} from "../../renderer/color"; +import {getAngle, getNumber} from "../../renderer/color/index.ts"; import {rotate, rotate3D} from "./rotate.ts"; import {scale, scale3d, scaleX, scaleY, scaleZ} from "./scale.ts"; import {minify} from "./minify.ts"; @@ -107,7 +107,7 @@ export function computeMatrix(transformList: Token[], matrixVar: Matrix): Matrix const valCount: number = (transformList[i] as FunctionToken).val == 'translate3d' || (transformList[i] as FunctionToken).val == 'translate' ? 3 : 1; - if (children.length == 1 && children[0].typ == EnumToken.IdenTokenType && children[0].val == 'none') { + if (children.length == 1 && children[0].typ == EnumToken.IdenTokenType && (children[0] as IdentToken).val == 'none') { values.fill(0, 0, valCount); @@ -248,7 +248,7 @@ export function computeMatrix(transformList: Token[], matrixVar: Matrix): Matrix return null; } - values.push(getNumber(child)); + values.push(getNumber(child as NumberToken)); } if (values.length == 0) { @@ -355,9 +355,9 @@ export function computeMatrix(transformList: Token[], matrixVar: Matrix): Matrix continue; } - if (child.typ == EnumToken.IdenTokenType && child.val == 'none') { + if (child.typ == EnumToken.IdenTokenType && (child as IdentToken).val == 'none') { - values.push(child); + values.push(child as IdentToken); continue; } @@ -382,7 +382,7 @@ export function computeMatrix(transformList: Token[], matrixVar: Matrix): Matrix break; case 'matrix3d': - // return null; + // return null; case 'matrix': { diff --git a/src/lib/ast/transform/convert.ts b/src/lib/ast/transform/convert.ts index c3e4273d..42f8ad95 100644 --- a/src/lib/ast/transform/convert.ts +++ b/src/lib/ast/transform/convert.ts @@ -9,27 +9,27 @@ export function length2Px(value: LengthToken | NumberToken): number | null { return +value.val; } - switch (value.unit) { + switch ((value as LengthToken).unit) { case 'cm': // @ts-ignore - return value.val * 37.8; + return (value as LengthToken).val * 37.8; case 'mm': // @ts-ignore - return value.val * 3.78; + return (value as LengthToken).val * 3.78; case 'Q': // @ts-ignore - return value.val * 37.8 / 40; + return (value as LengthToken).val * 37.8 / 40; case 'in': // @ts-ignore - return value.val / 96; + return (value as LengthToken).val / 96; case 'pc': // @ts-ignore - return value.val / 16; + return (value as LengthToken).val / 16; case 'pt': // @ts-ignore - return value.val * 4 / 3; + return (value as LengthToken).val * 4 / 3; case 'px': - return +value.val; + return +(value as LengthToken).val; } return null; diff --git a/src/lib/ast/transform/matrix.ts b/src/lib/ast/transform/matrix.ts index d8b94f85..e908c10a 100644 --- a/src/lib/ast/transform/matrix.ts +++ b/src/lib/ast/transform/matrix.ts @@ -1,9 +1,10 @@ -import {identity, is2DMatrix, Matrix, round, toZero} from "./utils.ts"; +import {identity, is2DMatrix, Matrix, toZero} from "./utils.ts"; import {EnumToken} from "../types.ts"; import type {FunctionToken, IdentToken, Token} from "../../../@types/index.d.ts"; import {reduceNumber} from "../../renderer/render.ts"; import {eq} from "../../parser/utils/eq.ts"; -import {getNumber} from "../../renderer/color"; +import {getNumber} from "../../renderer/color/index.ts"; +import {NumberToken} from "../../validation/index.ts"; export function parseMatrix(mat: FunctionToken | IdentToken): Matrix | null { @@ -12,7 +13,7 @@ export function parseMatrix(mat: FunctionToken | IdentToken): Matrix | null { return mat.val == 'none' ? identity() : null; } - const children = mat.chi.filter(t => t.typ == EnumToken.NumberTokenType || t.typ == EnumToken.IdenTokenType); + const children = (mat as FunctionToken).chi.filter((t: Token) => t.typ == EnumToken.NumberTokenType || t.typ == EnumToken.IdenTokenType); const values: number[] = []; @@ -23,7 +24,8 @@ export function parseMatrix(mat: FunctionToken | IdentToken): Matrix | null { return null; } - values.push(getNumber(child)); + // @ts-ignore + values.push(getNumber(child as NumberToken)); } // @ts-ignore @@ -37,7 +39,6 @@ export function matrix(values: [number, number, number, number, number, number] if (values.length === 6) { - // matrix(scaleX(), skewY(), skewX(), scaleY(), translateX(), translateY()) matrix[0][0] = values[0]; matrix[0][1] = values[1]; matrix[1][0] = values[2]; @@ -64,7 +65,7 @@ export function matrix(values: [number, number, number, number, number, number] matrix[3][3] = values[15]; } else { - throw new RangeError('expecting 6 or 16 values'); + return null; } return matrix; @@ -72,6 +73,9 @@ export function matrix(values: [number, number, number, number, number, number] export function serialize(matrix: Matrix): Token { + matrix = matrix.map(t => toZero(t.slice())) as Matrix; + + // @ts-ignore if (eq(matrix, identity())) { return { @@ -86,14 +90,14 @@ export function serialize(matrix: Matrix): Token { return { typ: EnumToken.FunctionTokenType, val: 'matrix', - chi: toZero([ + chi: [ matrix[0][0], matrix[0][1], matrix[1][0], matrix[1][1], matrix[3][0], matrix[3][1] - ]).reduce((acc, t) => { + ].reduce((acc, t) => { if (acc.length > 0) { @@ -102,7 +106,7 @@ export function serialize(matrix: Matrix): Token { acc.push({ typ: EnumToken.NumberTokenType, - val: reduceNumber(t.toPrecision(6)) + val: reduceNumber(t) }) return acc @@ -113,7 +117,7 @@ export function serialize(matrix: Matrix): Token { return { typ: EnumToken.FunctionTokenType, val: 'matrix3d', - chi: matrix.flat().reduce((acc, curr: number) => { + chi: matrix.flat().reduce((acc: Token[], curr: number) => { if (acc.length > 0) { @@ -122,7 +126,7 @@ export function serialize(matrix: Matrix): Token { acc.push({ typ: EnumToken.NumberTokenType, - val: reduceNumber(round(curr)) + val: reduceNumber(curr) }) return acc; diff --git a/src/lib/ast/transform/minify.ts b/src/lib/ast/transform/minify.ts index 88c275d8..e3b21828 100644 --- a/src/lib/ast/transform/minify.ts +++ b/src/lib/ast/transform/minify.ts @@ -1,14 +1,13 @@ import {decompose, epsilon, identity, Matrix, multiply, round, toZero} from "./utils.ts"; import {EnumToken} from "../types.ts"; -import {FunctionToken, Token} from "../../../@types"; -import {eq} from "../../parser/utils/eq.ts"; +import type {FunctionToken, Token} from "../../../@types/index.d.ts"; import {computeMatrix} from "./compute.ts"; import {parseMatrix} from "./matrix.ts"; // translate → rotate → skew → scale export function minify(matrix: Matrix): Token[] | null { - const decomposed = /* is2DMatrix(matrix) ? decompose2(matrix) : */ decompose(matrix); + const decomposed = decompose(matrix); if (decomposed == null) { @@ -314,7 +313,7 @@ export function minify(matrix: Matrix): Token[] | null { } // identity - return result.length == 0 || eq(result, identity()) ? [ + return result.length == 0 || (result.length == 1 && eqMatrix(identity(), result)) ? [ { typ: EnumToken.IdenTokenType, val: 'none' @@ -322,22 +321,18 @@ export function minify(matrix: Matrix): Token[] | null { ] : result; } -export function eqMatrix(a: FunctionToken, b: Token[]): boolean { +export function eqMatrix(a: FunctionToken | Matrix, b: Token[]): boolean { let mat: Matrix = identity(); let tmp: Matrix = identity(); // @ts-ignore - const data = parseMatrix(a) as Matrix; - - // console.error({data}); + const data = Array.isArray(a) ? a : parseMatrix(a) as Matrix; for (const transform of b) { tmp = computeMatrix([transform], identity()) as Matrix; - // console.error({transform: renderToken(transform), tmp}); - if (tmp == null) { return false; @@ -346,8 +341,6 @@ export function eqMatrix(a: FunctionToken, b: Token[]): boolean { mat = multiply(mat, tmp); } - // console.error({mat}); - if (mat == null) { return false; @@ -365,5 +358,4 @@ export function eqMatrix(a: FunctionToken, b: Token[]): boolean { } return true; - } diff --git a/src/lib/ast/transform/perspective.ts b/src/lib/ast/transform/perspective.ts index 31f22f8f..12960aeb 100644 --- a/src/lib/ast/transform/perspective.ts +++ b/src/lib/ast/transform/perspective.ts @@ -1,5 +1,5 @@ import {identity, Matrix, multiply} from "./utils.ts"; -import {IdentToken} from "../../../@types"; +import type {IdentToken} from "../../../@types/index.d.ts"; export function perspective(x: number | IdentToken, from: Matrix): Matrix { diff --git a/src/lib/ast/transform/translate.ts b/src/lib/ast/transform/translate.ts index 2df494e2..850e1e8f 100644 --- a/src/lib/ast/transform/translate.ts +++ b/src/lib/ast/transform/translate.ts @@ -11,7 +11,7 @@ export function translateX(x: number, from: Matrix): Matrix { export function translateY(y: number, from: Matrix): Matrix { - const matrix: Matrix = identity() + const matrix: Matrix = identity(); matrix[3][1] = y; return multiply(from, matrix) as Matrix; diff --git a/src/lib/ast/transform/utils.ts b/src/lib/ast/transform/utils.ts index cb49d7dc..a406d979 100644 --- a/src/lib/ast/transform/utils.ts +++ b/src/lib/ast/transform/utils.ts @@ -170,7 +170,7 @@ export function round(number: number): number { // translate → rotate → skew → scale export function decompose(original: Matrix): DecomposedMatrix3D | null { - const matrix = original.flat(); + const matrix: number[] = original.flat(); // Normalize last row if (matrix[15] === 0) { @@ -346,7 +346,7 @@ export function toZero(v: [number, number] | [number, number, number] | [number, for (let i = 0; i < v.length; i++) { - if (Math.abs(v[i]) <= 1e-5) { + if (Math.abs(v[i]) <= epsilon) { v[i] = 0; } else { diff --git a/src/lib/parser/declaration/list.ts b/src/lib/parser/declaration/list.ts index 71db3511..4dba70c5 100644 --- a/src/lib/parser/declaration/list.ts +++ b/src/lib/parser/declaration/list.ts @@ -6,7 +6,7 @@ import type { ShorthandMapType, ShorthandPropertyType, Token -} from "../../../@types/index"; +} from "../../../@types/index.d.ts"; import {PropertySet} from "./set.ts"; import {getConfig} from "../utils/index.ts"; import {PropertyMap} from "./map.ts"; diff --git a/src/lib/parser/declaration/map.ts b/src/lib/parser/declaration/map.ts index 272cf212..b7c7595a 100644 --- a/src/lib/parser/declaration/map.ts +++ b/src/lib/parser/declaration/map.ts @@ -615,7 +615,7 @@ export class PropertyMap { for (let v of values) { if (![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType, EnumToken.IdenTokenType].includes(v.typ) - || (v.typ == EnumToken.IdenTokenType && !this.config.properties[curr[0]].default.includes(v.val))) { + || (v.typ == EnumToken.IdenTokenType && !this.config.properties[curr[0]].default.includes((v as IdentToken).val))) { doFilterDefault = false; break; @@ -631,7 +631,7 @@ export class PropertyMap { return false; } - return !doFilterDefault || !(val.typ == EnumToken.IdenTokenType && props.default.includes(val.val)); + return !doFilterDefault || !(val.typ == EnumToken.IdenTokenType && props.default.includes((val as IdentToken).val)); }); if (filtered.length > 0 || !(this.requiredCount == requiredCount && this.config.properties[curr[0]].required)) { diff --git a/src/lib/parser/declaration/set.ts b/src/lib/parser/declaration/set.ts index 23b469d5..d3d0f17b 100644 --- a/src/lib/parser/declaration/set.ts +++ b/src/lib/parser/declaration/set.ts @@ -1,6 +1,7 @@ import type { AstDeclaration, DimensionToken, + IdentToken, LiteralToken, NumberToken, ShorthandPropertyType, @@ -93,7 +94,7 @@ export class PropertySet { if (token.typ != EnumToken.WhitespaceTokenType && token.typ != EnumToken.CommentTokenType) { - if (token.typ == EnumToken.IdenTokenType&& this.config.keywords.includes(token.val)) { + if (token.typ == EnumToken.IdenTokenType&& this.config.keywords.includes((token as IdentToken).val)) { if (tokens.length == 0) { diff --git a/src/lib/parser/parse.ts b/src/lib/parser/parse.ts index c58007bd..37f73383 100644 --- a/src/lib/parser/parse.ts +++ b/src/lib/parser/parse.ts @@ -94,6 +94,7 @@ import type { PseudoElementToken, SemiColonToken, StartMatchToken, + StringToken, SubsequentCombinatorToken, TimelineFunctionToken, TimingFunctionToken, @@ -107,8 +108,8 @@ import type { import {deprecatedSystemColors, systemColors} from "../renderer/color/utils/index.ts"; import {validateAtRule, validateSelector} from "../validation/index.ts"; import type {ValidationResult} from "../../@types/validation.d.ts"; -import {validateAtRuleKeyframes} from "../validation/at-rules"; -import {validateKeyframeSelector} from "../validation/syntaxes"; +import {validateAtRuleKeyframes} from "../validation/at-rules/index.ts"; +import {validateKeyframeSelector} from "../validation/syntaxes/index.ts"; export const urlTokenMatcher: RegExp = /^(["']?)[a-zA-Z0-9_/.-][a-zA-Z0-9_/:.#?-]+(\1)$/; const trimWhiteSpace: EnumToken[] = [EnumToken.CommentTokenType, EnumToken.GtTokenType, EnumToken.GteTokenType, EnumToken.LtTokenType, EnumToken.LteTokenType, EnumToken.ColumnCombinatorTokenType]; @@ -156,7 +157,7 @@ export async function doParse(iterator: string, options: ParserOptions = {}): Pr removeCharset: true, removeEmpty: true, removeDuplicateDeclarations: true, - computeTransform: false, + computeTransform: true, computeShorthand: true, computeCalcExpression: true, inlineCssVariables: false, @@ -323,11 +324,12 @@ export async function doParse(iterator: string, options: ParserOptions = {}): Pr if (context != null && context.typ == EnumToken.InvalidRuleTokenType) { + // @ts-ignore const index: number = context.chi.findIndex((node: AstNode): boolean => node == context); if (index > -1) { - context.chi.splice(index, 1); + (context as AstInvalidRule).chi.splice(index, 1); } } } @@ -858,7 +860,6 @@ async function parseNode(results: TokenizeResult[], context: AstRuleList | AstIn const node: AstInvalidRule = { typ: EnumToken.InvalidRuleTokenType, - // @ts-ignore sel: tokens.reduce((acc: string, curr: Token) => acc + renderToken(curr, {minify: false}), ''), chi: [] } @@ -901,10 +902,10 @@ async function parseNode(results: TokenizeResult[], context: AstRuleList | AstIn if (ruleType == EnumToken.KeyFrameRuleNodeType) { - if (curr.typ == EnumToken.IdenTokenType && curr.val == 'from') { + if (curr.typ == EnumToken.IdenTokenType && (curr as IdentToken).val == 'from') { Object.assign(curr, {typ: EnumToken.PercentageTokenType, val: '0'}) - } else if (curr.typ == EnumToken.PercentageTokenType && curr.val == '100') { + } else if (curr.typ == EnumToken.PercentageTokenType && (curr as PercentageToken).val == '100') { Object.assign(curr, {typ: EnumToken.IdenTokenType, val: 'to'}) } @@ -1139,7 +1140,7 @@ function parseAtRulePrelude(tokens: Token[], atRule: AtRuleToken): Token[] { if (parent == null && mediaTypes.some((t: string) => { - if (value.val.localeCompare(t, 'en', {sensitivity: 'base'}) == 0) { + if ((value as IdentToken).val.localeCompare(t, 'en', {sensitivity: 'base'}) == 0) { // @ts-ignore value.typ = EnumToken.MediaFeatureTokenType; @@ -1152,14 +1153,14 @@ function parseAtRulePrelude(tokens: Token[], atRule: AtRuleToken): Token[] { continue; } - if (value.typ == EnumToken.IdenTokenType && 'and'.localeCompare(value.val, 'en', {sensitivity: 'base'}) == 0) { + if (value.typ == EnumToken.IdenTokenType && 'and'.localeCompare((value as IdentToken).val, 'en', {sensitivity: 'base'}) == 0) { // @ts-ignore value.typ = EnumToken.MediaFeatureAndTokenType; continue; } - if (value.typ == EnumToken.IdenTokenType && 'or'.localeCompare(value.val, 'en', {sensitivity: 'base'}) == 0) { + if (value.typ == EnumToken.IdenTokenType && 'or'.localeCompare((value as IdentToken).val, 'en', {sensitivity: 'base'}) == 0) { // @ts-ignore value.typ = EnumToken.MediaFeatureOrTokenType; @@ -1167,7 +1168,7 @@ function parseAtRulePrelude(tokens: Token[], atRule: AtRuleToken): Token[] { } if (value.typ == EnumToken.IdenTokenType && - ['not', 'only'].some((t: string): boolean => t.localeCompare(value.val, 'en', {sensitivity: 'base'}) == 0)) { + ['not', 'only'].some((t: string): boolean => t.localeCompare((value as IdentToken).val, 'en', {sensitivity: 'base'}) == 0)) { // @ts-ignore const array: Token[] = parent?.chi ?? tokens as Token[]; @@ -1190,7 +1191,7 @@ function parseAtRulePrelude(tokens: Token[], atRule: AtRuleToken): Token[] { } Object.assign(array[startIndex], { - typ: value.val.toLowerCase() == 'not' ? EnumToken.MediaFeatureNotTokenType : EnumToken.MediaFeatureOnlyTokenType, + typ: (value as IdentToken).val.toLowerCase() == 'not' ? EnumToken.MediaFeatureNotTokenType : EnumToken.MediaFeatureOnlyTokenType, val: array[index] }); @@ -1201,16 +1202,13 @@ function parseAtRulePrelude(tokens: Token[], atRule: AtRuleToken): Token[] { if (value.typ == EnumToken.ParensTokenType || (value.typ == EnumToken.FunctionTokenType && ['media', 'supports', 'style', 'scroll-state'].includes((value).val))) { - // @todo parse range and declarations - // parseDeclaration(parent.chi); - let i: number; let nameIndex: number = -1; let valueIndex: number = -1; - const dashedIdent: boolean = value.typ == EnumToken.FunctionTokenType && value.val == 'style'; + const dashedIdent: boolean = value.typ == EnumToken.FunctionTokenType && (value as FunctionToken).val == 'style'; - for (let i = 0; i < value.chi.length; i++) { + for (let i = 0; i < (value as FunctionToken | ParensToken).chi.length; i++) { if ((value as FunctionToken | ParensToken).chi[i].typ == EnumToken.CommentTokenType || (value as FunctionToken | ParensToken).chi[i].typ == EnumToken.WhitespaceTokenType) { @@ -1743,16 +1741,16 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T if (t.typ == EnumToken.PseudoClassFuncTokenType) { - if (t.val.slice(1) in webkitPseudoAliasMap) { + if ((t as PseudoClassFunctionToken).val.slice(1) in webkitPseudoAliasMap) { - t.val = ':' + webkitPseudoAliasMap[t.val.slice(1)]; + (t as PseudoClassFunctionToken).val = ':' + webkitPseudoAliasMap[(t as PseudoClassFunctionToken).val.slice(1)]; } } else if (t.typ == EnumToken.PseudoClassTokenType) { - if (t.val.slice(1) in webkitPseudoAliasMap) { + if ((t as PseudoClassToken).val.slice(1) in webkitPseudoAliasMap) { - (t as PseudoClassToken).val = ':' + webkitPseudoAliasMap[t.val.slice(1)]; + (t as PseudoClassToken).val = ':' + webkitPseudoAliasMap[(t as PseudoClassToken).val.slice(1)]; } } @@ -1837,11 +1835,11 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T val = (attr.chi)[m]; if (val.typ == EnumToken.StringTokenType) { - const slice = val.val.slice(1, -1); + const slice = (val as StringToken).val.slice(1, -1); if ((slice.charAt(0) != '-' || (slice.charAt(0) == '-' && isIdentStart(slice.charCodeAt(1)))) && isIdent(slice)) { Object.assign(val, {typ: EnumToken.IdenTokenType, val: slice}); } - } else if (val.typ == EnumToken.LiteralTokenType && val.val == '|') { + } else if (val.typ == EnumToken.LiteralTokenType && (val as LiteralToken).val == '|') { let upper: number = m; let lower: number = m; @@ -1910,7 +1908,7 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T val = (attr.chi)[lower]; if (val.typ == EnumToken.StringTokenType) { - const slice: string = val.val.slice(1, -1); + const slice: string = (val as StringToken).val.slice(1, -1); if ((slice.charAt(0) != '-' || (slice.charAt(0) == '-' && isIdentStart(slice.charCodeAt(1)))) && isIdent(slice)) { Object.assign(val, {typ: EnumToken.IdenTokenType, val: slice}); } @@ -1919,7 +1917,7 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T val = (attr.chi)[upper]; if (val.typ == EnumToken.StringTokenType) { - const slice: string = val.val.slice(1, -1); + const slice: string = (val as StringToken).val.slice(1, -1); if ((slice.charAt(0) != '-' || (slice.charAt(0) == '-' && isIdentStart(slice.charCodeAt(1)))) && isIdent(slice)) { Object.assign(val, {typ: EnumToken.IdenTokenType, val: slice}); } @@ -1927,45 +1925,45 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T // @ts-ignore const typ = ((t.chi)[m]).typ; + EnumToken.EndMatchTokenType | EnumToken.IncludeMatchTokenType>(((t as AttrStartToken).chi)[m]).typ; // @ts-ignore - (t.chi)[m] = { + ((t as AttrStartToken).chi)[m] = { typ: EnumToken.MatchExpressionTokenType, op: { // @ts-ignore typ: typ == EnumToken.DelimTokenType ? EnumToken.EqualMatchTokenType : typ }, - l: (t.chi)[lower], - r: (t.chi)[upper] + l: ((t as AttrStartToken).chi)[lower], + r: ((t as AttrStartToken).chi)[upper] }; - (t.chi).splice(upper, 1); - (t.chi).splice(lower, 1); + ((t as AttrStartToken).chi).splice(upper, 1); + ((t as AttrStartToken).chi).splice(lower, 1); upper = m; m--; - while (upper < (t.chi).length && (t.chi)[upper].typ == EnumToken.WhitespaceTokenType) { + while (upper < ((t as AttrStartToken).chi as Token[]).length && ((t as AttrStartToken).chi)[upper].typ == EnumToken.WhitespaceTokenType) { upper++; } - if (upper < (t.chi).length && - (t.chi)[upper].typ == EnumToken.Iden && - ['i', 's'].includes(((t.chi)[upper]).val.toLowerCase())) { + if (upper < ((t as AttrStartToken).chi as Token[]).length && + ((t as AttrStartToken).chi)[upper].typ == EnumToken.IdenTokenType && + ['i', 's'].includes((((t as AttrStartToken).chi)[upper]).val.toLowerCase())) { - ((t.chi)[m]).attr = <'i' | 's'>((t.chi)[upper]).val; - (t.chi).splice(upper, 1); + (((t as AttrStartToken).chi)[m]).attr = <'i' | 's'>(((t as AttrStartToken).chi)[upper]).val; + ((t as AttrStartToken).chi).splice(upper, 1); } } } - m = (t.chi).length; + m = ((t as AttrStartToken).chi).length; - while ((t.chi).at(-1)?.typ == EnumToken.WhitespaceTokenType) { + while (((t as AttrStartToken).chi).at(-1)?.typ == EnumToken.WhitespaceTokenType) { - (t.chi).pop(); + ((t as AttrStartToken).chi).pop(); } continue; diff --git a/src/lib/parser/tokenize.ts b/src/lib/parser/tokenize.ts index d2624efd..4c4f935f 100644 --- a/src/lib/parser/tokenize.ts +++ b/src/lib/parser/tokenize.ts @@ -1,6 +1,6 @@ import type {Position, TokenizeResult} from "../../@types/index.d.ts"; -import {EnumToken} from "../ast"; -import {isDigit, isNewLine, isNonPrintable, isWhiteSpace} from "../syntax"; +import {EnumToken} from "../ast/index.ts"; +import {isDigit, isNewLine, isNonPrintable, isWhiteSpace} from "../syntax/index.ts"; declare type InputStream = string; diff --git a/src/lib/parser/utils/config.ts b/src/lib/parser/utils/config.ts index a2f7cf46..c9af1afb 100644 --- a/src/lib/parser/utils/config.ts +++ b/src/lib/parser/utils/config.ts @@ -1,5 +1,5 @@ import config from '../../../config.json' with {type: 'json'}; -import type {PropertiesConfig} from "../../../@types"; +import type {PropertiesConfig} from "../../../@types/index.d.ts"; Object.freeze(config); diff --git a/src/lib/parser/utils/type.ts b/src/lib/parser/utils/type.ts index c57b0e97..a0c3ac2a 100644 --- a/src/lib/parser/utils/type.ts +++ b/src/lib/parser/utils/type.ts @@ -1,7 +1,7 @@ -import {EnumToken} from "../../ast"; +import {EnumToken} from "../../ast/index.ts"; import type {FunctionToken, IdentToken, NumberToken, PropertyMapType, Token} from "../../../@types/index.d.ts"; -import {mathFuncs} from "../../syntax"; +import {mathFuncs} from "../../syntax/index.ts"; export function matchType(val: Token, properties: PropertyMapType): boolean { diff --git a/src/lib/renderer/color/a98rgb.ts b/src/lib/renderer/color/a98rgb.ts index 3226244b..15cd3351 100644 --- a/src/lib/renderer/color/a98rgb.ts +++ b/src/lib/renderer/color/a98rgb.ts @@ -1,6 +1,6 @@ -import {xyz2srgb} from "./srgb"; -import {multiplyMatrices} from "./utils"; -import {srgb2xyz} from "./xyz"; +import {xyz2srgb} from "./srgb.ts"; +import {multiplyMatrices} from "./utils/index.ts"; +import {srgb2xyz} from "./xyz.ts"; export function a98rgb2srgbvalues(r: number, g: number, b: number, a: number | null = null): number[] { diff --git a/src/lib/renderer/color/color.ts b/src/lib/renderer/color/color.ts index 78765964..8e417cd0 100644 --- a/src/lib/renderer/color/color.ts +++ b/src/lib/renderer/color/color.ts @@ -8,15 +8,15 @@ import type { PercentageToken, Token } from "../../../@types/index.d.ts"; -import {EnumToken} from "../../ast"; -import {hex2rgb, hsl2rgb, hwb2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb, srgb2rgb} from "./rgb"; -import {hex2hsl, hwb2hsl, lab2hsl, lch2hsl, oklab2hsl, oklch2hsl, rgb2hsl, srgb2hsl} from "./hsl"; -import {hsl2hwb, lab2hwb, lch2hwb, oklab2hwb, oklch2hwb, rgb2hwb} from "./hwb"; -import {hex2lab, hsl2lab, hwb2lab, lch2lab, oklab2lab, oklch2lab, rgb2lab, srgb2lab} from "./lab"; -import {hex2lch, hsl2lch, hwb2lch, lab2lch, oklab2lch, oklch2lch, rgb2lch, srgb2lch} from "./lch"; -import {hex2oklab, hsl2oklab, hwb2oklab, lab2oklab, lch2oklab, oklch2oklab, rgb2oklab, srgb2oklab} from "./oklab"; -import {hex2oklch, hsl2oklch, hwb2oklch, lab2oklch, lch2oklch, oklab2oklch, rgb2oklch, srgb2oklch,} from "./oklch"; -import {colorFuncColorSpace, getComponents} from "./utils"; +import {EnumToken} from "../../ast/index.ts"; +import {hex2rgb, hsl2rgb, hwb2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb, srgb2rgb} from "./rgb.ts"; +import {hex2hsl, hwb2hsl, lab2hsl, lch2hsl, oklab2hsl, oklch2hsl, rgb2hsl, srgb2hsl} from "./hsl.ts"; +import {hsl2hwb, lab2hwb, lch2hwb, oklab2hwb, oklch2hwb, rgb2hwb} from "./hwb.ts"; +import {hex2lab, hsl2lab, hwb2lab, lch2lab, oklab2lab, oklch2lab, rgb2lab, srgb2lab} from "./lab.ts"; +import {hex2lch, hsl2lch, hwb2lch, lab2lch, oklab2lch, oklch2lch, rgb2lch, srgb2lch} from "./lch.ts"; +import {hex2oklab, hsl2oklab, hwb2oklab, lab2oklab, lch2oklab, oklch2oklab, rgb2oklab, srgb2oklab} from "./oklab.ts"; +import {hex2oklch, hsl2oklch, hwb2oklch, lab2oklch, lch2oklch, oklab2oklch, rgb2oklch,} from "./oklch.ts"; +import {colorFuncColorSpace, getComponents} from "./utils/index.ts"; import { hex2srgb, hsl2srgb, @@ -28,13 +28,13 @@ import { rgb2srgb, srgb2lsrgbvalues, xyz2srgb -} from "./srgb"; -import {prophotorgb2srgbvalues, srgb2prophotorgbvalues} from "./prophotorgb"; -import {a98rgb2srgbvalues, srgb2a98values} from "./a98rgb"; -import {rec20202srgb, srgb2rec2020values} from "./rec2020"; -import {srgb2xyz} from "./xyz"; -import {p32srgbvalues, srgb2p3values} from "./p3"; -import {XYZ_D65_to_D50, xyzd502srgb} from "./xyzd50"; +} from "./srgb.ts"; +import {prophotorgb2srgbvalues, srgb2prophotorgbvalues} from "./prophotorgb.ts"; +import {a98rgb2srgbvalues, srgb2a98values} from "./a98rgb.ts"; +import {rec20202srgb, srgb2rec2020values} from "./rec2020.ts"; +import {srgb2xyz} from "./xyz.ts"; +import {p32srgbvalues, srgb2p3values} from "./p3.ts"; +import {XYZ_D65_to_D50, xyzd502srgb} from "./xyzd50.ts"; export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorToken | null { @@ -57,12 +57,21 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok if (to == 'hsl') { + let t: number[] | null; + switch (token.kin) { case 'rgb': case 'rgba': - values.push(...rgb2hsl(token)); + t = rgb2hsl(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'hex': case 'lit': @@ -76,12 +85,26 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok case 'oklab': - values.push(...oklab2hsl(token)); + t = oklab2hsl(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'oklch': - values.push(...oklch2hsl(token)); + t = oklch2hsl(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'lab': @@ -151,7 +174,7 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok return values2hwbtoken(values); } } else if (to == 'rgb') { - + let t : number[] | null; switch (token.kin) { case 'hex': @@ -161,31 +184,73 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok break case 'hsl': - values.push(...hsl2rgb(token)); + t = hsl2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break case 'hwb': - values.push(...hwb2rgb(token)); + t = hwb2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'oklab': - values.push(...oklab2rgb(token)); + t =oklab2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'oklch': - values.push(...oklch2rgb(token)); + t = oklch2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'lab': - values.push(...lab2rgb(token)); + t = lab2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'lch': - values.push(...lch2rgb(token)); + t = lch2rgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'color': @@ -444,6 +509,8 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok else if (colorFuncColorSpace.includes(to)) { + let t : number[] | null; + switch (token.kin) { case 'hex': @@ -454,38 +521,95 @@ export function convert(token: ColorToken, to: ColorKind | ColorSpace): ColorTok case 'rgb': case 'rgba': - values.push(...rgb2srgb(token)); + t = rgb2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'hsl': case 'hsla': - values.push(...hsl2srgb(token)); + t = hsl2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'hwb': - values.push(...hwb2srgb(token)); + + t = hwb2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'lab': - values.push(...lab2srgb(token)); + t = lab2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'oklab': - values.push(...oklab2srgb(token)); + + t = oklab2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'lch': - values.push(...lch2srgb(token)); + + t = lch2srgb(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'oklch': - // @ts-ignore - values.push(...srgb2oklch(...color2srgbvalues(token))); + + t = color2srgbvalues(token); + + if (t == null) { + + return null; + } + + values.push(...t); break; case 'color': - const val: number[] = color2srgbvalues(token); + const val: number[] | null = color2srgbvalues(token); + + if (val == null) { + + return null; + } switch (to) { @@ -561,9 +685,14 @@ export function minmax(value: number, min: number, max: number): number { return value; } -export function color2srgbvalues(token: ColorToken): number[] { +export function color2srgbvalues(token: ColorToken): number[] | null { + + const components: Token[] = getComponents(token) as Token[]; - const components: Token[] = getComponents(token); + if (components == null) { + + return null; + } const colorSpace: IdentToken = components.shift(); let values: number[] = components.map((val: Token) => getNumber(val)); @@ -718,21 +847,21 @@ export function clamp(token: ColorToken): ColorToken { if (token.typ == EnumToken.NumberTokenType) { - token.val = String(minmax(+token.val, 0, 255)); + (token as NumberToken).val = String(minmax(+(token as NumberToken).val, 0, 255)); } else if (token.typ == EnumToken.PercentageTokenType) { - token.val = String(minmax(+token.val, 0, 100)); + (token as PercentageToken).val = String(minmax(+(token as PercentageToken).val, 0, 100)); } } else { if (token.typ == EnumToken.NumberTokenType) { - token.val = String(minmax(+token.val, 0, 1)); + (token as NumberToken).val = String(minmax(+(token as NumberToken).val, 0, 1)); } else if (token.typ == EnumToken.PercentageTokenType) { - token.val = String(minmax(+token.val, 0, 100)); + (token as PercentageToken).val = String(minmax(+(token as PercentageToken).val, 0, 100)); } } }); @@ -768,7 +897,7 @@ export function getAngle(token: NumberToken | AngleToken | IdentToken): number { if (token.typ == EnumToken.AngleTokenType) { - switch (token.unit) { + switch ((token as AngleToken).unit) { case 'deg': diff --git a/src/lib/renderer/color/colormix.ts b/src/lib/renderer/color/colormix.ts index c6649d1f..8f4737e9 100644 --- a/src/lib/renderer/color/colormix.ts +++ b/src/lib/renderer/color/colormix.ts @@ -1,22 +1,22 @@ -import type {ColorToken, IdentToken, PercentageToken, Token} from "../../../@types"; -import {EnumToken} from "../../ast"; -import {getNumber} from "./color"; -import {srgb2lsrgbvalues, srgbvalues} from "./srgb"; -import {srgb2lch, xyz2lchvalues} from "./lch"; -import {srgb2rgb} from "./rgb"; -import {srgb2hsl} from "./hsl"; -import {srgb2hwb} from "./hwb"; -import {srgb2lab} from "./lab"; -import {srgb2p3values} from "./p3"; -import {getComponents} from "./utils"; -import {srgb2oklch} from "./oklch"; -import {srgb2oklab} from "./oklab"; -import {srgb2a98values} from "./a98rgb"; -import {srgb2prophotorgbvalues} from "./prophotorgb"; -import {srgb2xyz} from "./xyz"; -import {XYZ_D65_to_D50, xyzd502lch} from "./xyzd50"; -import {srgb2rec2020values} from "./rec2020"; -import {isPolarColorspace, isRectangularOrthogonalColorspace} from "../../syntax"; +import type {ColorToken, IdentToken, PercentageToken, Token} from "../../../@types/index.d.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {getNumber} from "./color.ts"; +import {srgb2lsrgbvalues, srgbvalues} from "./srgb.ts"; +import {srgb2lch, xyz2lchvalues} from "./lch.ts"; +import {srgb2rgb} from "./rgb.ts"; +import {srgb2hsl} from "./hsl.ts"; +import {srgb2hwb} from "./hwb.ts"; +import {srgb2lab} from "./lab.ts"; +import {srgb2p3values} from "./p3.ts"; +import {getComponents} from "./utils/index.ts"; +import {srgb2oklch} from "./oklch.ts"; +import {srgb2oklab} from "./oklab.ts"; +import {srgb2a98values} from "./a98rgb.ts"; +import {srgb2prophotorgbvalues} from "./prophotorgb.ts"; +import {srgb2xyz} from "./xyz.ts"; +import {XYZ_D65_to_D50, xyzd502lch} from "./xyzd50.ts"; +import {srgb2rec2020values} from "./rec2020.ts"; +import {isPolarColorspace, isRectangularOrthogonalColorspace} from "../../syntax/index.ts"; function interpolateHue(interpolationMethod: IdentToken, h1: number, h2: number): number[] { @@ -146,15 +146,20 @@ export function colorMix(colorSpace: IdentToken, hueInterpolationMethod: IdentTo return null; } - const components1: Token[] = getComponents(color1); - const components2: Token[] = getComponents(color2); + const components1: Token[] | null = getComponents(color1); + const components2: Token[] | null = getComponents(color2); - if ((components1[3] != null && components1[3].typ == EnumToken.IdenTokenType && components1[3].val == 'none') && values2.length == 4) { + if (components1 == null || components2 == null) { + + return null; + } + + if ((components1[3] != null && components1[3].typ == EnumToken.IdenTokenType && (components1[3] as IdentToken).val == 'none') && values2.length == 4) { values1[3] = values2[3]; } - if ((components2[3] != null && components2[3].typ == EnumToken.IdenTokenType && components2[3].val == 'none') && values1.length == 4) { + if ((components2[3] != null && components2[3].typ == EnumToken.IdenTokenType && (components2[3] as IdentToken).val == 'none') && values1.length == 4) { values2[3] = values1[3]; } @@ -307,7 +312,7 @@ export function colorMix(colorSpace: IdentToken, hueInterpolationMethod: IdentTo // powerless if (lchSpaces.includes(color1.kin) || lchSpaces.includes(colorSpace.val)) { - if ((components1[2].typ == EnumToken.IdenTokenType && components1[2].val == 'none') || values1[2] == 0) { + if ((components1[2].typ == EnumToken.IdenTokenType &&( components1[2] as IdentToken).val == 'none') || values1[2] == 0) { values1[2] = values2[2]; } @@ -316,7 +321,7 @@ export function colorMix(colorSpace: IdentToken, hueInterpolationMethod: IdentTo // powerless if (lchSpaces.includes(color1.kin) || lchSpaces.includes(colorSpace.val)) { - if ((components2[2].typ == EnumToken.IdenTokenType && components2[2].val == 'none') || values2[2] == 0) { + if ((components2[2].typ == EnumToken.IdenTokenType && (components2[2] as IdentToken).val == 'none') || values2[2] == 0) { values2[2] = values1[2]; } diff --git a/src/lib/renderer/color/hex.ts b/src/lib/renderer/color/hex.ts index 68099c48..d0ddf568 100644 --- a/src/lib/renderer/color/hex.ts +++ b/src/lib/renderer/color/hex.ts @@ -1,8 +1,8 @@ import type {ColorToken, IdentToken, NumberToken, PercentageToken} from "../../../@types/index.d.ts"; -import {EnumToken} from "../../ast"; -import {getNumber, minmax} from "./color"; -import {cmyk2rgb, hsl2rgb, hwb2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb} from "./rgb"; -import {getComponents, NAMES_COLORS} from "./utils"; +import {EnumToken} from "../../ast/index.ts"; +import {getNumber, minmax} from "./color.ts"; +import {cmyk2rgb, hsl2rgb, hwb2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb} from "./rgb.ts"; +import {getComponents, NAMES_COLORS} from "./utils/index.ts"; function toHexString(acc: string, value: number): string { @@ -91,37 +91,50 @@ export function rgb2hex(token: ColorToken) { export function hsl2hex(token: ColorToken) { - return `${hsl2rgb(token).reduce(toHexString, '#')}`; + const t = hsl2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } -export function hwb2hex(token: ColorToken): string { +export function hwb2hex(token: ColorToken): string | null{ - return `${hwb2rgb(token).reduce(toHexString, '#')}`; + const t = hwb2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } -export function cmyk2hex(token: ColorToken): string { +export function cmyk2hex(token: ColorToken): string | null { - return `#${cmyk2rgb(token).reduce(toHexString, '')}`; + const t = cmyk2rgb(token); + return t == null ? null : `#${t.reduce(toHexString, '')}`; } -export function oklab2hex(token: ColorToken): string { +export function oklab2hex(token: ColorToken): string | null { - return `${oklab2rgb(token).reduce(toHexString, '#')}`; + const t = oklab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } -export function oklch2hex(token: ColorToken): string { +export function oklch2hex(token: ColorToken): string | null { - return `${oklch2rgb(token).reduce(toHexString, '#')}`; + const value = oklch2rgb(token); + + if (value == null) { + + return null; + } + + return `${value.reduce(toHexString, '#')}`; } -export function lab2hex(token: ColorToken): string { +export function lab2hex(token: ColorToken): string | null{ - return `${lab2rgb(token).reduce(toHexString, '#')}`; + const t = lab2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } -export function lch2hex(token: ColorToken): string { +export function lch2hex(token: ColorToken): string | null { - return `${lch2rgb(token).reduce(toHexString, '#')}`; + const t = lch2rgb(token); + return t == null ? null : `${t.reduce(toHexString, '#')}`; } export function srgb2hexvalues(r: number, g: number, b: number, alpha?: number | null): string { diff --git a/src/lib/renderer/color/hsl.ts b/src/lib/renderer/color/hsl.ts index 968f4562..bc13c8b7 100644 --- a/src/lib/renderer/color/hsl.ts +++ b/src/lib/renderer/color/hsl.ts @@ -1,20 +1,25 @@ -import {hwb2hsv} from "./hsv"; +import {hwb2hsv} from "./hsv.ts"; import type {ColorToken, IdentToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {getNumber} from "./color"; -import {hex2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb} from "./rgb"; -import {getComponents} from "./utils"; -import {EnumToken} from "../../ast"; -import {hslvalues} from './srgb'; +import {getNumber} from "./color.ts"; +import {hex2rgb, lab2rgb, lch2rgb, oklab2rgb, oklch2rgb} from "./rgb.ts"; +import {getComponents} from "./utils/index.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {hslvalues} from './srgb.ts'; -export function hex2hsl(token: ColorToken): number[] { +export function hex2hsl(token: ColorToken): number[] { // @ts-ignore return rgb2hslvalues(...hex2rgb(token)); } -export function rgb2hsl(token: ColorToken): number[] { +export function rgb2hsl(token: ColorToken): number[] | null { - const chi: Token[] = getComponents(token); + const chi: Token[] | null = getComponents(token); + + if (chi == null) { + + return null; + } // @ts-ignore let t: NumberToken | PercentageToken | IdentToken = chi[0]; @@ -43,7 +48,7 @@ export function rgb2hsl(token: ColorToken): number[] { a = getNumber(t) / 255; } - const values: number[] = [r, g, b]; + const values: number[] = [r, g, b]; if (a != null && a != 1) { @@ -98,16 +103,18 @@ export function lch2hsl(token: ColorToken): number[] { return rgb2hslvalues(...lch2rgb(token)); } -export function oklab2hsl(token: ColorToken): number[] { +export function oklab2hsl(token: ColorToken): number[] | null { + const t: number[] | null = oklab2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklab2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } -export function oklch2hsl(token: ColorToken): number[] { +export function oklch2hsl(token: ColorToken): number[] | null{ + const t: number[] | null = oklch2rgb(token); // @ts-ignore - return rgb2hslvalues(...oklch2rgb(token)); + return t == null ? null : rgb2hslvalues(...t); } export function rgb2hslvalues(r: number, g: number, b: number, a: number | null = null): number[] { diff --git a/src/lib/renderer/color/hwb.ts b/src/lib/renderer/color/hwb.ts index 28d64a3a..75452d54 100644 --- a/src/lib/renderer/color/hwb.ts +++ b/src/lib/renderer/color/hwb.ts @@ -1,16 +1,16 @@ -import {hsl2hsv} from "./hsv"; +import {hsl2hsv} from "./hsv.ts"; import type {AngleToken, ColorToken, IdentToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {getComponents} from "./utils"; -import {getAngle, getNumber} from "./color"; -import {EnumToken} from "../../ast"; -import {lab2srgb, lch2srgb, oklab2srgb, oklch2srgb} from "./srgb"; +import {getComponents} from "./utils/index.ts"; +import {getAngle, getNumber} from "./color.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {lab2srgb, lch2srgb, oklab2srgb, oklch2srgb} from "./srgb.ts"; export function rgb2hwb(token: ColorToken): number[] { // @ts-ignore return srgb2hwb(...getComponents(token).map((t: Token, index: number): number => { - if (index == 3 && t.typ == EnumToken.IdenTokenType && t.val == 'none') { + if (index == 3 && t.typ == EnumToken.IdenTokenType && (t as IdentToken).val == 'none') { return 1; } @@ -23,7 +23,7 @@ export function hsl2hwb(token: ColorToken): number[] { // @ts-ignore return hsl2hwbvalues(...getComponents(token).map((t: Token, index: number) => { - if (index == 3 && (t.typ == EnumToken.IdenTokenType && t.val == 'none')) { + if (index == 3 && (t.typ == EnumToken.IdenTokenType && (t as IdentToken).val == 'none')) { return 1; } diff --git a/src/lib/renderer/color/lab.ts b/src/lib/renderer/color/lab.ts index 5db33cc3..79c167aa 100644 --- a/src/lib/renderer/color/lab.ts +++ b/src/lib/renderer/color/lab.ts @@ -1,12 +1,12 @@ -import {D50, e, getComponents, k} from "./utils"; -import {srgb2xyz} from "./xyz"; +import {D50, e, getComponents, k} from "./utils/index.ts"; +import {srgb2xyz} from "./xyz.ts"; import type {ColorToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {hex2srgb, hsl2srgb, hwb2srgb, oklch2srgb, rgb2srgb} from "./srgb"; -import {getLCHComponents} from "./lch"; -import {getOKLABComponents, OKLab_to_XYZ} from "./oklab"; -import {getNumber} from "./color"; -import {EnumToken} from "../../ast"; -import {xyzd502srgb} from "./xyzd50"; +import {hex2srgb, hsl2srgb, hwb2srgb, oklch2srgb, rgb2srgb} from "./srgb.ts"; +import {getLCHComponents} from "./lch.ts"; +import {getOKLABComponents, OKLab_to_XYZ} from "./oklab.ts"; +import {getNumber} from "./color.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {xyzd502srgb} from "./xyzd50.ts"; // L: 0% = 0.0, 100% = 100.0 // for a and b: -100% = -125, 100% = 125 @@ -108,9 +108,14 @@ export function lch2labvalues(l: number, c: number, h: number, a: number | null return result; } -export function getLABComponents(token: ColorToken) { +export function getLABComponents(token: ColorToken): number[] | null { - const components: Token[] = getComponents(token); + const components: Token[] | null = getComponents(token); + + if (components == null) { + + return null; + } for (let i = 0; i < components.length; i++) { diff --git a/src/lib/renderer/color/lch.ts b/src/lib/renderer/color/lch.ts index 275c1d88..a1cd5515 100644 --- a/src/lib/renderer/color/lch.ts +++ b/src/lib/renderer/color/lch.ts @@ -1,8 +1,8 @@ import type {ColorToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {getComponents} from "./utils"; -import {getAngle, getNumber} from "./color"; -import {EnumToken} from "../../ast"; -import {getLABComponents, hex2lab, hsl2lab, hwb2lab, oklab2lab, oklch2lab, rgb2lab, srgb2lab, xyz2lab} from "./lab"; +import {getComponents} from "./utils/index.ts"; +import {getAngle, getNumber} from "./color.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {getLABComponents, hex2lab, hsl2lab, hwb2lab, oklab2lab, oklch2lab, rgb2lab, srgb2lab, xyz2lab} from "./lab.ts"; export function hex2lch(token: ColorToken): number[] { @@ -84,15 +84,20 @@ export function srgb2lchvalues(r: number, g: number, blue: number, alpha?: numbe return alpha == null || alpha == 1 ? [l, c, h] : [l, c, h, alpha]; } -export function getLCHComponents(token: ColorToken): number[] { +export function getLCHComponents(token: ColorToken): number[] | null { - const components: Token[] = getComponents(token); + const components: Token[] | null = getComponents(token); + + if (components == null) { + + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } diff --git a/src/lib/renderer/color/oklab.ts b/src/lib/renderer/color/oklab.ts index 5119a650..11126c6a 100644 --- a/src/lib/renderer/color/oklab.ts +++ b/src/lib/renderer/color/oklab.ts @@ -1,10 +1,19 @@ -import {getComponents, multiplyMatrices} from "./utils"; -import {hex2srgb, hsl2srgb, hwb2srgb, lab2srgb, lch2srgb, lsrgb2srgbvalues, rgb2srgb, srgb2lsrgbvalues} from "./srgb"; +import {getComponents, multiplyMatrices} from "./utils/index.ts"; +import { + hex2srgb, + hsl2srgb, + hwb2srgb, + lab2srgb, + lch2srgb, + lsrgb2srgbvalues, + rgb2srgb, + srgb2lsrgbvalues +} from "./srgb.ts"; import type {ColorToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {getNumber} from "./color"; -import {EnumToken} from "../../ast"; -import {getOKLCHComponents} from "./oklch"; -import {lch2labvalues} from "./lab"; +import {getNumber} from "./color.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {getOKLCHComponents} from "./oklch.ts"; +import {lch2labvalues} from "./lab.ts"; export function hex2oklab(token: ColorToken) { @@ -72,15 +81,20 @@ export function srgb2oklab(r: number, g: number, blue: number, alpha: number | n return alpha == null ? [l, a, b] : [l, a, b, alpha]; } -export function getOKLABComponents(token: ColorToken): number[] { +export function getOKLABComponents(token: ColorToken): number[] | null{ - const components: Token[] = getComponents(token); + const components: Token[] | null = getComponents(token); + + if (components == null) { + + return null; + } for (let i = 0; i < components.length; i++) { if (![EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.IdenTokenType].includes(components[i].typ)) { - return []; + return null; } } diff --git a/src/lib/renderer/color/oklch.ts b/src/lib/renderer/color/oklch.ts index 77f289f1..56e383e9 100644 --- a/src/lib/renderer/color/oklch.ts +++ b/src/lib/renderer/color/oklch.ts @@ -1,8 +1,8 @@ import type {ColorToken, NumberToken, PercentageToken, Token} from "../../../@types/index.d.ts"; -import {getComponents} from "./utils"; -import {getAngle, getNumber} from "./color"; -import {EnumToken} from "../../ast"; -import {lab2lchvalues} from "./lch"; +import {getComponents} from "./utils/index.ts"; +import {getAngle, getNumber} from "./color.ts"; +import {EnumToken} from "../../ast/index.ts"; +import {lab2lchvalues} from "./lch.ts"; import { getOKLABComponents, hex2oklab, @@ -12,7 +12,7 @@ import { lch2oklab, rgb2oklab, srgb2oklab -} from "./oklab"; +} from "./oklab.ts"; export function hex2oklch(token: ColorToken): number[] { @@ -62,9 +62,14 @@ export function srgb2oklch(r: number, g: number, blue: number, alpha: number | n return lab2lchvalues(...srgb2oklab(r, g, blue, alpha)); } -export function getOKLCHComponents(token: ColorToken): number[] { +export function getOKLCHComponents(token: ColorToken): number[] | null { - const components: Token[] = getComponents(token); + const components: Token[] | null = getComponents(token); + + if (components == null) { + + return null; + } for (let i = 0; i < components.length; i++) { diff --git a/src/lib/renderer/color/p3.ts b/src/lib/renderer/color/p3.ts index 1ab2ae54..80be39ed 100644 --- a/src/lib/renderer/color/p3.ts +++ b/src/lib/renderer/color/p3.ts @@ -1,6 +1,6 @@ -import {lsrgb2srgbvalues, srgb2lsrgbvalues, xyz2srgb} from "./srgb"; -import {multiplyMatrices} from "./utils"; -import {srgb2xyz} from "./xyz"; +import {lsrgb2srgbvalues, srgb2lsrgbvalues, xyz2srgb} from "./srgb.ts"; +import {multiplyMatrices} from "./utils/index.ts"; +import {srgb2xyz} from "./xyz.ts"; export function p32srgbvalues(r: number, g: number, b: number, alpha?: number) { diff --git a/src/lib/renderer/color/prophotorgb.ts b/src/lib/renderer/color/prophotorgb.ts index f77ac9b8..baae4e9f 100644 --- a/src/lib/renderer/color/prophotorgb.ts +++ b/src/lib/renderer/color/prophotorgb.ts @@ -1,5 +1,5 @@ -import {srgb2xyz} from "./xyz"; -import {XYZ_D65_to_D50, xyzd502srgb} from "./xyzd50"; +import {srgb2xyz} from "./xyz.ts"; +import {XYZ_D65_to_D50, xyzd502srgb} from "./xyzd50.ts"; export function prophotorgb2srgbvalues(r: number, g: number, b: number, a: number | null = null): number[] { diff --git a/src/lib/renderer/color/rec2020.ts b/src/lib/renderer/color/rec2020.ts index 44de0ed1..cfdb7739 100644 --- a/src/lib/renderer/color/rec2020.ts +++ b/src/lib/renderer/color/rec2020.ts @@ -1,6 +1,6 @@ -import {xyz2srgb} from "./srgb"; -import {multiplyMatrices} from "./utils"; -import {srgb2xyz} from "./xyz"; +import {xyz2srgb} from "./srgb.ts"; +import {multiplyMatrices} from "./utils/index.ts"; +import {srgb2xyz} from "./xyz.ts"; export function rec20202srgb(r: number, g: number, b: number, a?: number): number[] { diff --git a/src/lib/renderer/color/relativecolor.ts b/src/lib/renderer/color/relativecolor.ts index cd80e9b4..5be1dcdb 100644 --- a/src/lib/renderer/color/relativecolor.ts +++ b/src/lib/renderer/color/relativecolor.ts @@ -1,4 +1,5 @@ import type { + AngleToken, BinaryExpressionToken, ColorToken, FunctionToken, @@ -7,12 +8,12 @@ import type { PercentageToken, Token } from "../../../@types/index.d.ts"; -import {convert, getNumber} from "./color"; -import {EnumToken, walkValues} from "../../ast"; -import {reduceNumber} from "../render"; -import {evaluate, evaluateFunc} from "../../ast/math"; -import {colorRange} from "./utils"; -import {mathFuncs} from "../../syntax"; +import {convert, getNumber} from "./color.ts"; +import {EnumToken, walkValues} from "../../ast/index.ts"; +import {reduceNumber} from "../render.ts"; +import {evaluate, evaluateFunc} from "../../ast/math/index.ts"; +import {colorRange} from "./utils/index.ts"; +import {mathFuncs} from "../../syntax/index.ts"; type RGBKeyType = 'r' | 'g' | 'b' | 'alpha'; type HSLKeyType = 'h' | 's' | 'l' | 'alpha'; @@ -120,7 +121,7 @@ function computeComponentValue(expr: Record, converte // @ts-ignore for (const k of walkValues([object.h])) { - if (k.value.typ == EnumToken.AngleTokenType && k.value.unit == 'deg') { + if (k.value.typ == EnumToken.AngleTokenType && (k.value as AngleToken).unit == 'deg') { // @ts-ignore k.value.typ = EnumToken.NumberTokenType; @@ -151,16 +152,20 @@ function computeComponentValue(expr: Record, converte } else if ([EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.LengthTokenType].includes(exp.typ)) { // expr[key] = exp; + // @ts-ignore } else if (exp.typ == EnumToken.IdenTokenType && exp.val in values) { + // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: EnumToken.NumberTokenType, + // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { + // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == EnumToken.FunctionTokenType && mathFuncs.includes((exp as FunctionToken).val)) { @@ -169,6 +174,7 @@ function computeComponentValue(expr: Record, converte if (parent == null) { + // @ts-ignore parent = exp; } diff --git a/src/lib/renderer/color/rgb.ts b/src/lib/renderer/color/rgb.ts index c1092d7e..8a501f9d 100644 --- a/src/lib/renderer/color/rgb.ts +++ b/src/lib/renderer/color/rgb.ts @@ -1,8 +1,8 @@ import type {ColorToken} from "../../../@types/index.d.ts"; -import {minmax} from "./color"; -import {COLORS_NAMES} from "./utils"; -import {expandHexValue} from "./hex"; -import {cmyk2srgb, hsl2srgbvalues, hslvalues, hwb2srgb, lab2srgb, lch2srgb, oklab2srgb, oklch2srgb} from "./srgb"; +import {minmax} from "./color.ts"; +import {COLORS_NAMES} from "./utils/index.ts"; +import {expandHexValue} from "./hex.ts"; +import {cmyk2srgb, hsl2srgbvalues, hslvalues, hwb2srgb, lab2srgb, lch2srgb, oklab2srgb, oklch2srgb} from "./srgb.ts"; export function srgb2rgb(value: number): number { @@ -22,40 +22,45 @@ export function hex2rgb(token: ColorToken): number[] { return rgb; } -export function hwb2rgb(token: ColorToken): number[] { +export function hwb2rgb(token: ColorToken): number[] | null { - return hwb2srgb(token).map(srgb2rgb); + return hwb2srgb(token)?.map?.(srgb2rgb) ?? null; } -export function hsl2rgb(token: ColorToken): number[] { +export function hsl2rgb(token: ColorToken): number[] | null { - let {h, s, l, a} = hslvalues(token); + let {h, s, l, a} = hslvalues(token) ?? {}; + + if (h == null || s == null || l == null) { + + return null; + } return hsl2srgbvalues(h, s, l, a).map((t: number) => minmax(Math.round(t * 255), 0, 255)); } -export function cmyk2rgb(token: ColorToken): number[] { +export function cmyk2rgb(token: ColorToken): number[] | null { - return cmyk2srgb(token).map(srgb2rgb); + return cmyk2srgb(token)?.map?.(srgb2rgb) ?? null; } -export function oklab2rgb(token: ColorToken): number[] { +export function oklab2rgb(token: ColorToken): number[] | null { - return oklab2srgb(token).map(srgb2rgb); + return oklab2srgb(token)?.map?.(srgb2rgb) ?? null; } -export function oklch2rgb(token: ColorToken): number[] { +export function oklch2rgb(token: ColorToken): number[] | null { - return oklch2srgb(token).map(srgb2rgb); + return oklch2srgb(token)?.map?.(srgb2rgb) ?? null; } -export function lab2rgb(token: ColorToken): number[] { +export function lab2rgb(token: ColorToken): number[] | null { - return lab2srgb(token).map(srgb2rgb); + return lab2srgb(token)?.map?.(srgb2rgb) ?? null; } -export function lch2rgb(token: ColorToken): number[] { +export function lch2rgb(token: ColorToken): number[] | null { - return lch2srgb(token).map(srgb2rgb); + return lch2srgb(token)?.map?.(srgb2rgb) ?? null; } \ No newline at end of file diff --git a/src/lib/renderer/color/srgb.ts b/src/lib/renderer/color/srgb.ts index 0b00f81f..ae235b0d 100644 --- a/src/lib/renderer/color/srgb.ts +++ b/src/lib/renderer/color/srgb.ts @@ -58,9 +58,9 @@ export function srgbvalues(token: ColorToken): number[] | null { return null; } -export function rgb2srgb(token: ColorToken): number[] { +export function rgb2srgb(token: ColorToken): number[] | null { - return getComponents(token).map((t: Token, index: number): number => index == 3 ? ((t.typ == EnumToken.IdenTokenType && (t as IdentToken).val == 'none') ? 1 : getNumber(t)) : (t.typ == EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255); + return getComponents(token)?.map?.((t: Token, index: number): number => index == 3 ? ((t.typ == EnumToken.IdenTokenType && (t as IdentToken).val == 'none') ? 1 : getNumber(t)) : (t.typ == EnumToken.PercentageTokenType ? 255 : 1) * getNumber(t) / 255) ?? null; } export function hex2srgb(token: ColorToken): number[] { @@ -82,9 +82,14 @@ export function xyz2srgb(x: number, y: number, z: number): number[] { return lsrgb2srgbvalues(...XYZ_to_lin_sRGB(x, y, z)); } -export function hwb2srgb(token: ColorToken): number[] { +export function hwb2srgb(token: ColorToken): number[] | null { - const {h: hue, s: white, l: black, a: alpha} = hslvalues(token); + const {h: hue, s: white, l: black, a: alpha} = hslvalues(token) ?? {}; + + if (hue == null || white == null || black == null) { + + return []; + } const rgb: number[] = hsl2srgbvalues(hue, 1, .5); @@ -102,17 +107,27 @@ export function hwb2srgb(token: ColorToken): number[] { return rgb; } -export function hsl2srgb(token: ColorToken): number[] { +export function hsl2srgb(token: ColorToken): number[] | null{ + + let {h, s, l, a} = hslvalues(token) ?? {}; - let {h, s, l, a} = hslvalues(token); + if (h == null || s == null || l == null) { + + return null; + } return hsl2srgbvalues(h, s, l, a); } -export function cmyk2srgb(token: ColorToken): number[] { +export function cmyk2srgb(token: ColorToken): number[] | null { - const components: Token[] = getComponents(token); + const components: Token[] | null= getComponents(token); + + if (components == null) { + + return null; + } // @ts-ignore let t: NumberToken | PercentageToken = components[0]; @@ -157,9 +172,14 @@ export function cmyk2srgb(token: ColorToken): number[] { return rgb; } -export function oklab2srgb(token: ColorToken): number[] { +export function oklab2srgb(token: ColorToken): number[] | null{ - const [l, a, b, alpha] = getOKLABComponents(token); + const [l, a, b, alpha] = getOKLABComponents(token) ?? []; + + if (l == null || a == null || b == null) { + + return null; + } const rgb: number[] = OKLab_to_sRGB(l, a, b); @@ -173,7 +193,7 @@ export function oklab2srgb(token: ColorToken): number[] { export function oklch2srgb(token: ColorToken): number[] | null { - const [l, c, h, alpha] = getOKLCHComponents(token) ?? {}; + const [l, c, h, alpha] = getOKLCHComponents(token) ?? []; if (l == null || c == null || h == null) { @@ -191,9 +211,14 @@ export function oklch2srgb(token: ColorToken): number[] | null { return rgb; } -export function hslvalues(token: ColorToken): { h: number, s: number, l: number, a?: number | null } { +export function hslvalues(token: ColorToken): { h: number, s: number, l: number, a?: number | null } | null { + + const components: Token[] | null = getComponents(token); - const components: Token[] = getComponents(token); + if (components == null) { + + return null; + } let t: PercentageToken | NumberToken; @@ -286,9 +311,15 @@ export function hsl2srgbvalues(h: number, s: number, l: number, a: number | null return values; } -export function lab2srgb(token: ColorToken): number[] { +export function lab2srgb(token: ColorToken): number[] | null{ const [l, a, b, alpha] = getLABComponents(token); + + if (l == null || a == null || b == null) { + + return null; + } + const rgb: number[] = Lab_to_sRGB(l, a, b); if (alpha != null && alpha != 1) { @@ -299,11 +330,16 @@ export function lab2srgb(token: ColorToken): number[] { return rgb; } -export function lch2srgb(token: ColorToken): number[] { +export function lch2srgb(token: ColorToken): number[] | null { // @ts-ignore const [l, a, b, alpha] = lch2labvalues(...getLCHComponents(token)); + if (l == null || a == null || b == null) { + + return null; + } + // https://www.w3.org/TR/css-color-4/#lab-to-lch const rgb: number[] = Lab_to_sRGB(l, a, b); diff --git a/src/lib/renderer/color/utils/components.ts b/src/lib/renderer/color/utils/components.ts index c593fb60..b8d5a2f3 100644 --- a/src/lib/renderer/color/utils/components.ts +++ b/src/lib/renderer/color/utils/components.ts @@ -3,7 +3,7 @@ import {EnumToken} from "../../../ast/index.ts"; import {COLORS_NAMES} from "./constants.ts"; import {expandHexValue} from "../hex.ts"; -export function getComponents(token: ColorToken): Token[] { +export function getComponents(token: ColorToken): Token[] | null { if (token.kin == 'hex' || token.kin == 'lit') { @@ -15,6 +15,23 @@ export function getComponents(token: ColorToken): Token[] { }); } - return (token.chi) - .filter((t: Token) => ![EnumToken.LiteralTokenType, EnumToken.CommentTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType].includes(t.typ)); + const result: Token[] = []; + + for (const child of (token.chi) as Token[]) { + + if ([ + EnumToken.LiteralTokenType, EnumToken.CommentTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType].includes(child.typ)) { + + continue; + } + + if (child.typ == EnumToken.ColorTokenType && (child as ColorToken).val == 'currentcolor') { + + return null; + } + + result.push(child); + } + + return result; } \ No newline at end of file diff --git a/src/lib/renderer/color/utils/constants.ts b/src/lib/renderer/color/utils/constants.ts index fa781530..c2f8f770 100644 --- a/src/lib/renderer/color/utils/constants.ts +++ b/src/lib/renderer/color/utils/constants.ts @@ -1,5 +1,5 @@ import type {ColorSpace, IdentToken} from "../../../../@types/index.d.ts"; -import {EnumToken} from "../../../ast"; +import {EnumToken} from "../../../ast/index.ts"; export const colorRange = { diff --git a/src/lib/renderer/color/xyz.ts b/src/lib/renderer/color/xyz.ts index c12dd028..9898961c 100644 --- a/src/lib/renderer/color/xyz.ts +++ b/src/lib/renderer/color/xyz.ts @@ -1,6 +1,6 @@ -import {multiplyMatrices} from "./utils"; -import {srgb2lsrgbvalues} from "./srgb"; -import {Lab_to_XYZ} from "./lab"; +import {multiplyMatrices} from "./utils/index.ts"; +import {srgb2lsrgbvalues} from "./srgb.ts"; +import {Lab_to_XYZ} from "./lab.ts"; export function lab2xyz(l: number, a: number, b: number, alpha?: number): number[] { diff --git a/src/lib/renderer/color/xyzd50.ts b/src/lib/renderer/color/xyzd50.ts index f85fd13e..a3e5d16f 100644 --- a/src/lib/renderer/color/xyzd50.ts +++ b/src/lib/renderer/color/xyzd50.ts @@ -1,8 +1,8 @@ -import {lsrgb2srgbvalues} from "./srgb"; -import {multiplyMatrices} from "./utils"; -import {xyz2lab} from "./lab"; -import {XYZ_D50_to_D65} from "./xyz"; -import {lab2lchvalues} from "./lch"; +import {lsrgb2srgbvalues} from "./srgb.ts"; +import {multiplyMatrices} from "./utils/index.ts"; +import {xyz2lab} from "./lab.ts"; +import {XYZ_D50_to_D65} from "./xyz.ts"; +import {lab2lchvalues} from "./lch.ts"; /* */ diff --git a/src/lib/renderer/render.ts b/src/lib/renderer/render.ts index a4f5ae36..fcb18e03 100644 --- a/src/lib/renderer/render.ts +++ b/src/lib/renderer/render.ts @@ -7,25 +7,43 @@ import type { AstRule, AstRuleList, AstRuleStyleSheet, + AtRuleToken, AttrToken, + BinaryExpressionToken, + ClassSelectorToken, ColorSpace, ColorToken, + CommentToken, + DashedIdentToken, ErrorDescription, FractionToken, FunctionToken, + HashToken, + IdentListToken, IdentToken, InvalidAttrToken, + InvalidClassSelectorToken, + LengthToken, + ListToken, + LiteralToken, Location, + MatchExpressionToken, MediaFeatureNotToken, MediaFeatureOnlyToken, MediaFeatureToken, + MediaQueryConditionToken, + NameSpaceAttributeToken, NumberToken, PercentageToken, Position, + PseudoElementToken, + PseudoPageToken, RenderOptions, RenderResult, - Token -} from "../../@types"; + StringToken, + Token, + UrlToken +} from "../../@types/index.d.ts"; import { clamp, cmyk2hex, @@ -44,11 +62,11 @@ import { RelativeColorTypes, rgb2hex, srgb2hexvalues -} from "./color"; -import {EnumToken, expand, funcLike} from "../ast"; -import {SourceMap} from "./sourcemap"; -import {colorFuncColorSpace, getComponents} from "./color/utils"; -import {isColor, isNewLine, mathFuncs, pseudoElements} from "../syntax"; +} from "./color/index.ts"; +import {EnumToken, expand, funcLike} from "../ast/index.ts"; +import {SourceMap} from "./sourcemap/index.ts"; +import {colorFuncColorSpace, getComponents} from "./color/utils/index.ts"; +import {isColor, isNewLine, mathFuncs, pseudoElements} from "../syntax/index.ts"; export const colorsFunc: string[] = ['rgb', 'rgba', 'hsl', 'hsla', 'hwb', 'device-cmyk', 'color-mix', 'color', 'oklab', 'lab', 'oklch', 'lch', 'light-dark']; @@ -164,12 +182,12 @@ export function doRender(data: AstNode, options: RenderOptions = {}): RenderResu if (curr.typ == EnumToken.CommentTokenType && options.removeComments) { - if (!options.preserveLicense || !curr.val.startsWith('/*!')) { + if (!options.preserveLicense || !(curr as AstComment).val.startsWith('/*!')) { return acc; } - return acc + curr.val; + return acc + (curr as AstComment).val; } return acc + renderToken(curr, options, cache, reducer, errors); @@ -401,41 +419,46 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { if (curr.typ == EnumToken.CommentTokenType && options.removeComments) { - if (!options.preserveLicense || !curr.val.startsWith('/*!')) { + if (!options.preserveLicense || !(curr as AstComment).val.startsWith('/*!')) { return acc; } - return acc + curr.val; + return acc + (curr as AstComment).val; } return acc + renderToken(curr, options, cache, reducer, errors); } } - if (token.typ == EnumToken.FunctionTokenType && colorsFunc.includes(token.val)) { + if (token.typ == EnumToken.FunctionTokenType && colorsFunc.includes((token as FunctionToken).val)) { if (isColor(token)) { // @ts-ignore token.typ = EnumToken.ColorTokenType; - if (token.chi[0].typ == EnumToken.IdenTokenType && token.chi[0].val == 'from') { + // @ts-ignore + if ((token as ColorToken)!.chi[0]!.typ == EnumToken.IdenTokenType && ((token as ColorToken)!.chi[0] as IdentToken).val == 'from') { // @ts-ignore (token).cal = 'rel'; - } else if (token.val == 'color-mix' && token.chi[0].typ == EnumToken.IdenTokenType && token.chi[0].val == 'in') { + } else { // @ts-ignore + if ((token as ColorToken).val == 'color-mix' && (token as ColorToken).chi[0].typ == EnumToken.IdenTokenType && ((token as ColorToken).chi[0] as IdentToken).val == 'in') { - // @ts-ignore - (token).cal = 'mix'; - } else { + // @ts-ignore + (token).cal = 'mix'; + } else { - if (token.val == 'color') { // @ts-ignore - token.cal = 'col'; - } + if ((token as ColorToken).val == 'color') { + // @ts-ignore + token.cal = 'col'; + } - token.chi = token.chi.filter((t: Token) => ![EnumToken.WhitespaceTokenType, EnumToken.CommaTokenType, EnumToken.CommentTokenType].includes(t.typ)); + // @ts-ignore + (token as ColorToken).chi = (token as ColorToken).chi!.filter((t: Token) => ![EnumToken.WhitespaceTokenType, EnumToken.CommaTokenType, EnumToken.CommentTokenType].includes(t.typ)); + } } } } @@ -444,50 +467,50 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.ListToken: - return token.chi.reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache), ''); + return (token as ListToken).chi.reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache), ''); case EnumToken.BinaryExpressionTokenType: - if ([EnumToken.Mul, EnumToken.Div].includes(token.op)) { + if ([EnumToken.Mul, EnumToken.Div].includes((token as BinaryExpressionToken).op)) { let result: string = ''; if ( - token.l.typ == EnumToken.BinaryExpressionTokenType && - [EnumToken.Add, EnumToken.Sub].includes(token.l.op) + (token as BinaryExpressionToken).l.typ == EnumToken.BinaryExpressionTokenType && + [EnumToken.Add, EnumToken.Sub].includes(((token as BinaryExpressionToken).l as BinaryExpressionToken).op) ) { - result = '(' + renderToken(token.l, options, cache) + ')'; + result = '(' + renderToken((token as BinaryExpressionToken).l, options, cache) + ')'; } else { - result = renderToken(token.l, options, cache); + result = renderToken((token as BinaryExpressionToken).l, options, cache); } - result += token.op == EnumToken.Mul ? '*' : '/'; + result += (token as BinaryExpressionToken).op == EnumToken.Mul ? '*' : '/'; if ( - token.r.typ == EnumToken.BinaryExpressionTokenType && - [EnumToken.Add, EnumToken.Sub].includes(token.r.op) + (token as BinaryExpressionToken).r.typ == EnumToken.BinaryExpressionTokenType && + [EnumToken.Add, EnumToken.Sub].includes(((token as BinaryExpressionToken).r as BinaryExpressionToken).op) ) { - result += '(' + renderToken(token.r, options, cache) + ')'; + result += '(' + renderToken((token as BinaryExpressionToken).r, options, cache) + ')'; } else { - result += renderToken(token.r, options, cache); + result += renderToken((token as BinaryExpressionToken).r, options, cache); } return result; } - return renderToken(token.l, options, cache) + (token.op == EnumToken.Add ? ' + ' : (token.op == EnumToken.Sub ? ' - ' : (token.op == EnumToken.Mul ? '*' : '/'))) + renderToken(token.r, options, cache); + return renderToken((token as BinaryExpressionToken).l, options, cache) + ((token as BinaryExpressionToken).op == EnumToken.Add ? ' + ' : ((token as BinaryExpressionToken).op == EnumToken.Sub ? ' - ' : ((token as BinaryExpressionToken).op == EnumToken.Mul ? '*' : '/'))) + renderToken((token as BinaryExpressionToken).r, options, cache); case EnumToken.FractionTokenType: - const fraction: string = renderToken(token.l) + '/' + renderToken(token.r); + const fraction: string = renderToken((token as FractionToken).l) + '/' + renderToken((token as FractionToken).r); - if (+token.r.val != 0) { + if (+(token as FractionToken).r.val != 0) { - const value: string = reduceNumber(+token.l.val / +token.r.val); + const value: string = reduceNumber(+(token as FractionToken).l.val / +(token as FractionToken).r.val); if (value.length <= fraction.length) { @@ -513,16 +536,16 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.ColorTokenType: - if (token.kin == 'light-dark') { + if ((token as ColorToken).kin == 'light-dark') { - return token.val + '(' + (token.chi as Token[]).reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache), '') + ')'; + return (token as ColorToken).val + '(' + ((token as ColorToken).chi as Token[]).reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache), '') + ')'; } if (options.convertColor) { - if (token.cal == 'mix' && token.val == 'color-mix') { + if ((token as ColorToken).cal == 'mix' && (token as ColorToken).val == 'color-mix') { - const children: Token[][] = (token.chi).reduce((acc: Token[][], t: Token) => { + const children: Token[][] = ((token as ColorToken).chi).reduce((acc: Token[][], t: Token) => { if (t.typ == EnumToken.ColorTokenType) { @@ -543,15 +566,13 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { if (value != null) { token = value; - } - - else { + } else { - token.chi = children.reduce((acc, curr, index) => { + (token as ColorToken).chi = children.reduce((acc, curr, index) => { if (acc.length > 0) { - acc.push({ typ: EnumToken.CommaTokenType }); + acc.push({typ: EnumToken.CommaTokenType}); } acc.push(...curr); @@ -560,52 +581,55 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { } } - if (token.cal == 'rel' && ['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch', 'color'].includes(token.val)) { + if ((token as ColorToken).cal == 'rel' && ['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch', 'color'].includes((token as ColorToken).val)) { - const chi: Token[] = getComponents(token); - const offset: number = token.val == 'color' ? 2 : 1; + const chi: Token[] | null = getComponents(token as ColorToken); + const offset: number = (token as ColorToken).val == 'color' ? 2 : 1; - // @ts-ignore - const color: ColorToken = chi[1]; - const components: Record = >parseRelativeColor(token.val == 'color' ? (chi[offset]).val : token.val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); + if (chi != null) { + + // @ts-ignore + const color: ColorToken = chi[1]; + const components: Record = >parseRelativeColor((token as ColorToken).val == 'color' ? (chi[offset]).val : (token as ColorToken).val, color, chi[offset + 1], chi[offset + 2], chi[offset + 3], chi[offset + 4]); - if (components != null) { + if (components != null) { - token.chi = [...(token.val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; + (token as ColorToken).chi = [...((token as ColorToken).val == 'color' ? [chi[offset]] : []), ...Object.values(components)]; - delete token.cal; + delete (token as ColorToken).cal; + } } } - if (token.val == 'color') { + if ((token as ColorToken).val == 'color') { - if (((token.chi)[0]).typ == EnumToken.IdenTokenType && colorFuncColorSpace.includes(((token.chi)[0]).val.toLowerCase())) { + if ((((token as ColorToken).chi)[0]).typ == EnumToken.IdenTokenType && colorFuncColorSpace.includes((((token as ColorToken).chi)[0] as IdentToken).val.toLowerCase())) { // @ts-ignore - return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token))); + return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token as ColorToken))); } } - if (token.cal != null) { + if ((token as ColorToken).cal != null) { let slice: boolean = false; - if (token.cal == 'rel') { + if ((token as ColorToken).cal == 'rel') { - const last: Token = (token.chi).at(-1); + const last: Token = ((token as ColorToken).chi).at(-1); - if ((last.typ == EnumToken.NumberTokenType && last.val == '1') || (last.typ == EnumToken.IdenTokenType && last.val == 'none')) { + if ((last.typ == EnumToken.NumberTokenType && (last as NumberToken).val == '1') || (last.typ == EnumToken.IdenTokenType && (last as IdentToken).val == 'none')) { - const prev: Token = (token.chi).at(-2); + const prev: Token = ((token as ColorToken).chi).at(-2); - if (prev.typ == EnumToken.LiteralTokenType && prev.val == '/') { + if (prev.typ == EnumToken.LiteralTokenType && (prev as LiteralToken).val == '/') { slice = true; } } } - return clamp(token).val + '(' + (slice ? (token.chi).slice(0, -2) : token.chi).reduce((acc: string, curr: Token): string => { + return clamp(token as ColorToken).val + '(' + (slice ? ((token as ColorToken).chi).slice(0, -2) : (token as ColorToken).chi).reduce((acc: string, curr: Token): string => { const val: string = renderToken(curr, options, cache); @@ -623,61 +647,62 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { }, '') + ')'; } - if (token.kin == 'lit' && token.val.localeCompare('currentcolor', undefined, {sensitivity: 'base'}) == 0) { + if ((token as ColorToken).kin == 'lit' && (token as ColorToken).val.localeCompare('currentcolor', undefined, {sensitivity: 'base'}) == 0) { return 'currentcolor'; } - clamp(token); + clamp(token as ColorToken); - if (Array.isArray(token.chi) && token.chi.some((t: Token): boolean => t.typ == EnumToken.FunctionTokenType || (t.typ == EnumToken.ColorTokenType && Array.isArray(t.chi)))) { + if (Array.isArray((token as ColorToken).chi) && (token as ColorToken).chi!.some((t: Token): boolean => t.typ == EnumToken.FunctionTokenType || (t.typ == EnumToken.ColorTokenType && Array.isArray((t as ColorToken).chi)))) { - return (token.val.endsWith('a') ? token.val.slice(0, -1) : token.val) + '(' + token.chi.reduce((acc: string, curr: Token) => acc + (acc.length > 0 && !(acc.endsWith('/') || curr.typ == EnumToken.LiteralTokenType) ? ' ' : '') + renderToken(curr, options, cache), '') + ')'; + return ((token as ColorToken).val.endsWith('a') ? (token as ColorToken).val.slice(0, -1) : (token as ColorToken).val) + '(' + (token as ColorToken).chi!.reduce((acc: string, curr: Token) => acc + (acc.length > 0 && !(acc.endsWith('/') || curr.typ == EnumToken.LiteralTokenType) ? ' ' : '') + renderToken(curr, options, cache), '') + ')'; } - let value: string = token.kin == 'hex' ? token.val.toLowerCase() : (token.kin == 'lit' ? COLORS_NAMES[token.val.toLowerCase()] : ''); + let value: string | null = (token as ColorToken).kin == 'hex' ? (token as ColorToken).val.toLowerCase() : ((token as ColorToken).kin == 'lit' ? COLORS_NAMES[(token as ColorToken).val.toLowerCase()] : ''); - if (token.val == 'rgb' || token.val == 'rgba') { + if ((token as ColorToken).val == 'rgb' || (token as ColorToken).val == 'rgba') { - value = rgb2hex(token); - } else if (token.val == 'hsl' || token.val == 'hsla') { + value = rgb2hex(token as ColorToken); + } else if ((token as ColorToken).val == 'hsl' || (token as ColorToken).val == 'hsla') { - value = hsl2hex(token); + value = hsl2hex(token as ColorToken); - } else if (token.val == 'hwb') { + } else if ((token as ColorToken).val == 'hwb') { - value = hwb2hex(token); - } else if (token.val == 'device-cmyk') { + value = hwb2hex(token as ColorToken); + } else if ((token as ColorToken).val == 'device-cmyk') { - value = cmyk2hex(token); - } else if (token.val == 'oklab') { + value = cmyk2hex(token as ColorToken); + } else if ((token as ColorToken).val == 'oklab') { - value = oklab2hex(token); - } else if (token.val == 'oklch') { + value = oklab2hex(token as ColorToken); + ; + } else if ((token as ColorToken).val == 'oklch') { - value = oklch2hex(token); - } else if (token.val == 'lab') { + value = oklch2hex(token as ColorToken); + } else if ((token as ColorToken).val == 'lab') { - value = lab2hex(token); - } else if (token.val == 'lch') { + value = lab2hex(token as ColorToken); + } else if ((token as ColorToken).val == 'lch') { - value = lch2hex(token); + value = lch2hex(token as ColorToken); } - if (value !== '') { + if (value !== '' && value != null) { return reduceHexValue(value); } } - if (['hex', 'lit', 'sys', 'dpsys'].includes(token.kin)) { + if (['hex', 'lit', 'sys', 'dpsys'].includes((token as ColorToken).kin)) { - return token.val; + return (token as ColorToken).val; } - if (Array.isArray(token.chi)) { + if (Array.isArray((token as ColorToken).chi)) { - return (token.val.endsWith('a') ? token.val.slice(0, -1) : token.val) + '(' + token.chi.reduce((acc: string, curr: Token) => acc + (acc.length > 0 && !(acc.endsWith('/') || curr.typ == EnumToken.LiteralTokenType) ? ' ' : '') + renderToken(curr, options, cache), '') + ')'; + return ((token as ColorToken).val.endsWith('a') ? (token as ColorToken).val.slice(0, -1) : (token as ColorToken).val) + '(' + (token as ColorToken).chi!.reduce((acc: string, curr: Token) => acc + (acc.length > 0 && !(acc.endsWith('/') || curr.typ == EnumToken.LiteralTokenType) ? ' ' : '') + renderToken(curr, options, cache), '') + ')'; } case EnumToken.ParensTokenType: @@ -691,38 +716,28 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { if ( token.typ == EnumToken.FunctionTokenType && - mathFuncs.includes(token.val) && - token.chi.length == 1 && - ![EnumToken.BinaryExpressionTokenType, EnumToken.FractionTokenType, EnumToken.IdenTokenType].includes(token.chi[0].typ) && + mathFuncs.includes((token as FunctionToken).val) && + (token as FunctionToken).chi.length == 1 && + ![EnumToken.BinaryExpressionTokenType, EnumToken.FractionTokenType, EnumToken.IdenTokenType].includes((token as FunctionToken).chi[0].typ) && // @ts-ignore - ((token.chi[0]).val)?.typ != EnumToken.FractionTokenType) { + (((token as FunctionToken).chi[0] as NumberToken).val as FractionToken)?.typ != EnumToken.FractionTokenType) { - return token.chi.reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache, reducer), '') + return (token as FunctionToken).chi.reduce((acc: string, curr: Token) => acc + renderToken(curr, options, cache, reducer), '') } - // if (token.typ == EnumToken.FunctionTokenType && transformFunctions.includes(token.val)) { - // - // const children = token.val.startsWith('matrix') ? null : stripCommaToken(token.chi.slice()) as Token[]; - // - // if (children != null) { - // - // return token.val + '(' + children.reduce((acc: string, curr: Token) => acc + (acc.length > 0 ? ' ' : '') + renderToken(curr, options, cache, reducer), '') + ')'; - // } - // } - // @ts-ignore - return (/* options.minify && 'Pseudo-class-func' == token.typ && token.val.slice(0, 2) == '::' ? token.val.slice(1) :*/ token.val ?? '') + '(' + token.chi.reduce(reducer, '') + ')'; + return (/* options.minify && 'Pseudo-class-func' == token.typ && token.val.slice(0, 2) == '::' ? token.val.slice(1) :*/ (token as FunctionToken).val ?? '') + '(' + (token as FunctionToken).chi.reduce(reducer, '') + ')'; case EnumToken.MatchExpressionTokenType: - return renderToken(token.l, options, cache, reducer, errors) + - renderToken(token.op, options, cache, reducer, errors) + - renderToken(token.r, options, cache, reducer, errors) + - (token.attr ? ' ' + token.attr : ''); + return renderToken((token as MatchExpressionToken).l as Token, options, cache, reducer, errors) + + renderToken((token as MatchExpressionToken).op, options, cache, reducer, errors) + + renderToken((token as MatchExpressionToken).r, options, cache, reducer, errors) + + ((token as MatchExpressionToken).attr ? ' ' + (token as MatchExpressionToken).attr : ''); case EnumToken.NameSpaceAttributeTokenType: - return (token.l == null ? '' : renderToken(token.l, options, cache, reducer, errors)) + '|' + - renderToken(token.r, options, cache, reducer, errors); + return ((token as NameSpaceAttributeToken).l == null ? '' : renderToken((token as NameSpaceAttributeToken).l as Token, options, cache, reducer, errors)) + '|' + + renderToken((token as NameSpaceAttributeToken).r, options, cache, reducer, errors); case EnumToken.BlockStartTokenType: return '{'; @@ -805,7 +820,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.AttrTokenType: case EnumToken.IdenListTokenType: - return '[' + (token).chi.reduce(reducer, '') + ']'; + return '[' + ((token as AttrToken | IdentListToken)).chi.reduce(reducer, '') + ']'; case EnumToken.TimeTokenType: case EnumToken.AngleTokenType: @@ -814,8 +829,8 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.FrequencyTokenType: case EnumToken.ResolutionTokenType: - let val: string = (token.val).typ == EnumToken.FractionTokenType ? renderToken(token.val, options, cache) : reduceNumber(token.val); - let unit: string = token.unit; + let val: string = ((token as LengthToken).val).typ == EnumToken.FractionTokenType ? renderToken((token as LengthToken).val, options, cache) : reduceNumber((token as LengthToken).val); + let unit: string = (token as LengthToken).unit; if (token.typ == EnumToken.AngleTokenType && !val.includes('/')) { @@ -826,7 +841,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { for (const u of ['turn', 'deg', 'rad', 'grad']) { - if (token.unit == u) { + if ((token as AngleToken).unit == u) { continue; } @@ -934,16 +949,16 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { const uni: string = token.typ == EnumToken.PercentageTokenType ? '%' : 'fr'; - const perc: string = (token.val).typ == EnumToken.FractionTokenType ? renderToken(token.val, options, cache) : reduceNumber(token.val); + const perc: string = ((token as PercentageToken).val).typ == EnumToken.FractionTokenType ? renderToken((token as PercentageToken).val, options, cache) : reduceNumber((token as PercentageToken).val); return options.minify && perc == '0' ? '0' : (perc.includes('/') ? perc.replace('/', uni + '/') : perc + uni); case EnumToken.NumberTokenType: - return (token.val).typ == EnumToken.FractionTokenType ? renderToken(token.val, options, cache) : reduceNumber(token.val); + return ((token as NumberToken).val).typ == EnumToken.FractionTokenType ? renderToken((token as NumberToken).val, options, cache) : reduceNumber((token as NumberToken).val); case EnumToken.CommentTokenType: - if (options.removeComments && (!options.preserveLicense || !token.val.startsWith('/*!'))) { + if (options.removeComments && (!options.preserveLicense || !(token as CommentToken).val.startsWith('/*!'))) { return ''; } @@ -952,9 +967,9 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.PseudoElementTokenType: // https://www.w3.org/TR/selectors-4/#single-colon-pseudos - if (token.typ == EnumToken.PseudoElementTokenType && pseudoElements.includes(token.val.slice(1))) { + if (token.typ == EnumToken.PseudoElementTokenType && pseudoElements.includes((token as PseudoElementToken).val.slice(1))) { - return token.val.slice(1); + return (token as PseudoElementToken).val.slice(1); } case EnumToken.UrlTokenTokenType: @@ -967,7 +982,11 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { // do not modify original token token = {...token}; - Object.defineProperty(token, 'original', {enumerable: false, writable: false, value: token.val}) + Object.defineProperty(token, 'original', { + enumerable: false, + writable: false, + value: (token as UrlToken).val + }) } // @ts-ignore @@ -1000,7 +1019,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.PseudoPageTokenType: case EnumToken.ClassSelectorTokenType: - return /* options.minify && 'Pseudo-class' == token.typ && '::' == token.val.slice(0, 2) ? token.val.slice(1) : */token.val; + return /* options.minify && 'Pseudo-class' == token.typ && '::' == token.val.slice(0, 2) ? token.val.slice(1) : */(token as ClassSelectorToken | StringToken | LiteralToken | AtRuleToken | HashToken | DashedIdentToken | PseudoPageToken | IdentToken).val; case EnumToken.NestingSelectorTokenType: @@ -1012,7 +1031,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.InvalidClassSelectorTokenType: - return token.val; + return (token as InvalidClassSelectorToken).val; case EnumToken.DeclarationNodeType: @@ -1020,7 +1039,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { case EnumToken.MediaQueryConditionTokenType: - return renderToken(token.l, options, cache, reducer, errors) + renderToken(token.op, options, cache, reducer, errors) + token.r.reduce((acc: string, curr: Token): string => acc + renderToken(curr, options, cache), ''); + return renderToken((token as MediaQueryConditionToken).l, options, cache, reducer, errors) + renderToken((token as MediaQueryConditionToken).op, options, cache, reducer, errors) + (token as MediaQueryConditionToken).r.reduce((acc: string, curr: Token): string => acc + renderToken(curr, options, cache), ''); case EnumToken.MediaFeatureTokenType: @@ -1057,8 +1076,7 @@ export function filterValues(values: Token[]): Token[] { if (values[i].typ == EnumToken.ImportantTokenType && values[i - 1]?.typ == EnumToken.WhitespaceTokenType) { values.splice(i - 1, 1); - } - else if (funcLike.includes(values[i].typ) && !['var', 'calc'].includes((values[i] as FunctionToken).val) && values[i + 1]?.typ == EnumToken.WhitespaceTokenType) { + } else if (funcLike.includes(values[i].typ) && !['var', 'calc'].includes((values[i] as FunctionToken).val) && values[i + 1]?.typ == EnumToken.WhitespaceTokenType) { values.splice(i + 1, 1); } diff --git a/src/lib/syntax/syntax.ts b/src/lib/syntax/syntax.ts index 86b43338..6bec3fc2 100644 --- a/src/lib/syntax/syntax.ts +++ b/src/lib/syntax/syntax.ts @@ -1,10 +1,11 @@ // https://www.w3.org/TR/CSS21/syndata.html#syntax // https://www.w3.org/TR/2021/CRD-css-syntax-3-20211224/#typedef-ident-token -import {colorsFunc} from "../renderer"; -import {COLORS_NAMES} from "../renderer/color"; +import {colorsFunc} from "../renderer/index.ts"; +import {COLORS_NAMES} from "../renderer/color/index.ts"; import type { AngleToken, + ColorToken, DimensionToken, FunctionToken, IdentToken, @@ -12,8 +13,8 @@ import type { NumberToken, PercentageToken, Token -} from "../../@types"; -import {EnumToken} from "../ast"; +} from "../../@types/index.d.ts"; +import {EnumToken} from "../ast/index.ts"; // '\\' const REVERSE_SOLIDUS = 0x5c; @@ -417,7 +418,7 @@ export function isColorspace(token: Token): boolean { return false; } - return ['srgb', 'srgb-linear', 'lab', 'oklab', 'lch', 'oklch', 'xyz', 'xyz-d50', 'xyz-d65', 'display-p3', 'a98-rgb', 'prophoto-rgb', 'rec2020', 'rgb', 'hsl', 'hwb'].includes(token.val.toLowerCase()); + return ['srgb', 'srgb-linear', 'lab', 'oklab', 'lch', 'oklch', 'xyz', 'xyz-d50', 'xyz-d65', 'display-p3', 'a98-rgb', 'prophoto-rgb', 'rec2020', 'rgb', 'hsl', 'hwb'].includes((token as IdentToken).val.toLowerCase()); } export function isRectangularOrthogonalColorspace(token: Token): boolean { @@ -427,7 +428,7 @@ export function isRectangularOrthogonalColorspace(token: Token): boolean { return false; } - return ['srgb', 'srgb-linear', 'display-p3', 'a98-rgb', 'prophoto-rgb', 'rec2020', 'lab', 'oklab', 'xyz', 'xyz-d50', 'xyz-d65'].includes(token.val.toLowerCase()); + return ['srgb', 'srgb-linear', 'display-p3', 'a98-rgb', 'prophoto-rgb', 'rec2020', 'lab', 'oklab', 'xyz', 'xyz-d50', 'xyz-d65'].includes((token as IdentToken).val.toLowerCase()); } export function isPolarColorspace(token: Token): boolean { @@ -437,7 +438,7 @@ export function isPolarColorspace(token: Token): boolean { return false; } - return ['hsl', 'hwb', 'lch', 'oklch'].includes(token.val); + return ['hsl', 'hwb', 'lch', 'oklch'].includes((token as IdentToken).val); } export function isHueInterpolationMethod(token: Token): boolean { @@ -447,7 +448,7 @@ export function isHueInterpolationMethod(token: Token): boolean { return false; } - return ['shorter', 'longer', 'increasing', 'decreasing'].includes(token.val); + return ['shorter', 'longer', 'increasing', 'decreasing'].includes((token as IdentToken).val); } export function isColor(token: Token): boolean { @@ -459,16 +460,18 @@ export function isColor(token: Token): boolean { if (token.typ == EnumToken.IdenTokenType) { // named color - return token.val.toLowerCase() in COLORS_NAMES; + return (token as IdentToken).val.toLowerCase() in COLORS_NAMES; } let isLegacySyntax: boolean = false; - if (token.typ == EnumToken.FunctionTokenType && token.chi.length > 0 && colorsFunc.includes(token.val)) { + if (token.typ == EnumToken.FunctionTokenType && (token as FunctionToken).chi.length > 0 && colorsFunc.includes((token as FunctionToken).val)) { - if (token.val == 'light-dark') { + // @ts-ignore + if ((token as ColorToken).val == 'light-dark') { - const children: Token[] = (token.chi).filter((t: Token) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); + // @ts-ignore + const children: Token[] = ((token as ColorToken).chi).filter((t: Token) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); if (children.length != 2) { @@ -481,11 +484,13 @@ export function isColor(token: Token): boolean { } } - if (token.val == 'color') { + // @ts-ignore + if ((token as ColorToken).val == 'color') { - const children: Token[] = (token.chi).filter((t: Token) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); + // @ts-ignore + const children: Token[] = ((token as ColorToken).chi).filter((t: Token) => [EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.LiteralTokenType, EnumToken.ColorTokenType, EnumToken.FunctionTokenType, EnumToken.PercentageTokenType].includes(t.typ)); - const isRelative: boolean = children[0].typ == EnumToken.IdenTokenType && children[0].val == 'from'; + const isRelative: boolean = children[0].typ == EnumToken.IdenTokenType && (children[0] as IdentToken).val == 'from'; if (children.length < 4 || children.length > 8) { return false; @@ -552,109 +557,115 @@ export function isColor(token: Token): boolean { } return true; - } else if (token.val == 'color-mix') { + } else { // @ts-ignore + if ((token as ColorToken).val == 'color-mix') { - const children: Token[][] = (token.chi).reduce((acc: Token[][], t: Token) => { + // @ts-ignore + const children: Token[][] = ((token as ColorToken).chi).reduce((acc: Token[][], t: Token) => { - if (t.typ == EnumToken.CommaTokenType) { + if (t.typ == EnumToken.CommaTokenType) { - acc.push([]); - } else { + acc.push([]); + } else { - if (![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)) { + if (![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)) { - acc[acc.length - 1].push(t); - } - } + acc[acc.length - 1].push(t); + } + } - return acc; - }, [[]]); + return acc; + }, [[]]); - if (children.length == 3) { + if (children.length == 3) { - if (children[0].length > 3 || - children[0][0].typ != EnumToken.IdenTokenType || - children[0][0].val != 'in' || - !isColorspace(children[0][1]) || - (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || - children[1].length > 2 || - children[1][0].typ != EnumToken.ColorTokenType || - children[2].length > 2 || - children[2][0].typ != EnumToken.ColorTokenType) { + if (children[0].length > 3 || + children[0][0].typ != EnumToken.IdenTokenType || + (children[0][0] as IdentToken).val != 'in' || + !isColorspace(children[0][1]) || + (children[0].length == 3 && !isHueInterpolationMethod(children[0][2])) || + children[1].length > 2 || + children[1][0].typ != EnumToken.ColorTokenType || + children[2].length > 2 || + children[2][0].typ != EnumToken.ColorTokenType) { - return false; - } + return false; + } - if (children[1].length == 2) { + if (children[1].length == 2) { - if (!(children[1][1].typ == EnumToken.PercentageTokenType || (children[1][1].typ == EnumToken.NumberTokenType && children[1][1].val == '0'))) { + if (!(children[1][1].typ == EnumToken.PercentageTokenType || (children[1][1].typ == EnumToken.NumberTokenType && (children[1][1] as NumberToken).val == '0'))) { - return false; - } - } + return false; + } + } - if (children[2].length == 2) { + if (children[2].length == 2) { - if (!(children[2][1].typ == EnumToken.PercentageTokenType || (children[2][1].typ == EnumToken.NumberTokenType && children[2][1].val == '0'))) { + if (!(children[2][1].typ == EnumToken.PercentageTokenType || (children[2][1].typ == EnumToken.NumberTokenType && (children[2][1] as NumberToken).val == '0'))) { - return false; - } - } + return false; + } + } - return true; - } + return true; + } - return false; - } else { + return false; + } else { - const keywords: string[] = ['from', 'none']; + const keywords: string[] = ['from', 'none']; - if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes(token.val)) { + // @ts-ignore + if (['rgb', 'hsl', 'hwb', 'lab', 'lch', 'oklab', 'oklch'].includes((token as ColorToken).val)) { - keywords.push('alpha', ...token.val.slice(-3).split('')); - } + // @ts-ignore + keywords.push('alpha', ...(token as ColorToken).val.slice(-3).split('')); + } - // @ts-ignore - for (const v of token.chi) { + // @ts-ignore + for (const v of token.chi) { - if (v.typ == EnumToken.CommaTokenType) { + if (v.typ == EnumToken.CommaTokenType) { - isLegacySyntax = true; - } + isLegacySyntax = true; + } - if (v.typ == EnumToken.IdenTokenType) { + if (v.typ == EnumToken.IdenTokenType) { - if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { + if (!(keywords.includes(v.val) || v.val.toLowerCase() in COLORS_NAMES)) { - return false; - } + return false; + } - if (keywords.includes(v.val)) { + if (keywords.includes(v.val)) { - if (isLegacySyntax) { + if (isLegacySyntax) { - return false; - } + return false; + } - if (v.val == 'from' && ['rgba', 'hsla'].includes(token.val)) { + // @ts-ignore + if (v.val == 'from' && ['rgba', 'hsla'].includes((token as ColorToken).val)) { - return false; - } - } + return false; + } + } - continue; - } + continue; + } - if (v.typ == EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { + if (v.typ == EnumToken.FunctionTokenType && (mathFuncs.includes(v.val) || v.val == 'var' || colorsFunc.includes(v.val))) { - continue; - } + continue; + } - if (![EnumToken.ColorTokenType, EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.LiteralTokenType].includes(v.typ)) { + if (![EnumToken.ColorTokenType, EnumToken.IdenTokenType, EnumToken.NumberTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.LiteralTokenType].includes(v.typ)) { - return false; - } - } + return false; + } + } + } } return true; diff --git a/src/lib/validation/at-rules/container.ts b/src/lib/validation/at-rules/container.ts index 00f63804..593308a1 100644 --- a/src/lib/validation/at-rules/container.ts +++ b/src/lib/validation/at-rules/container.ts @@ -1,7 +1,16 @@ -import type {AstAtRule, AstNode, MediaFeatureNotToken, Token, ValidationOptions} from "../../../@types"; -import type {ValidationSyntaxResult} from "../../../@types/validation"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace, splitTokenList} from "../utils"; +import type { + AstAtRule, + AstNode, + FunctionToken, + MediaFeatureNotToken, + MediaQueryConditionToken, + ParensToken, + Token, + ValidationOptions +} from "../../../@types/index.d.ts"; +import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; const validateContainerScrollStateFeature = validateContainerSizeFeature; @@ -118,12 +127,12 @@ function validateAtRuleContainerQueryList(tokens: Token[], atRule: AstAtRule): V token = queries[0]; - if (token.typ == EnumToken.MediaFeatureNotTokenType) { + if (token?.typ == EnumToken.MediaFeatureNotTokenType) { - token = token.val; + token = (token as MediaFeatureNotToken).val; } - if (token.typ != EnumToken.ParensTokenType && (token.typ != EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes(token.val))) { + if (token?.typ != EnumToken.ParensTokenType && (token?.typ != EnumToken.FunctionTokenType || !['scroll-state', 'style'].includes((token as FunctionToken).val))) { return { valid: ValidationLevel.Drop, @@ -135,15 +144,15 @@ function validateAtRuleContainerQueryList(tokens: Token[], atRule: AstAtRule): V } } - if (token.typ == EnumToken.ParensTokenType) { + if (token?.typ == EnumToken.ParensTokenType) { - result = validateContainerSizeFeature(token.chi, atRule); - } else if (token.val == 'scroll-state') { + result = validateContainerSizeFeature((token as ParensToken).chi, atRule); + } else if ((token as FunctionToken).val == 'scroll-state') { - result = validateContainerScrollStateFeature(token.chi, atRule); + result = validateContainerScrollStateFeature((token as FunctionToken).chi, atRule); } else { - result = validateContainerStyleFeature(token.chi, atRule); + result = validateContainerStyleFeature((token as FunctionToken).chi, atRule); } if (result.valid == ValidationLevel.Drop) { @@ -161,7 +170,7 @@ function validateAtRuleContainerQueryList(tokens: Token[], atRule: AstAtRule): V token = queries[0]; - if (token.typ != EnumToken.MediaFeatureAndTokenType && token.typ != EnumToken.MediaFeatureOrTokenType) { + if (token?.typ != EnumToken.MediaFeatureAndTokenType && token?.typ != EnumToken.MediaFeatureOrTokenType) { return { valid: ValidationLevel.Drop, @@ -175,10 +184,10 @@ function validateAtRuleContainerQueryList(tokens: Token[], atRule: AstAtRule): V if (tokenType == null) { - tokenType = token.typ; + tokenType = token?.typ; } - if (tokenType != token.typ) { + if (tokenType == null ||tokenType != token?.typ) { return { valid: ValidationLevel.Drop, @@ -227,11 +236,11 @@ function validateContainerStyleFeature(tokens: Token[], atRule: AstAtRule): Vali if (tokens[0].typ == EnumToken.ParensTokenType) { - return validateContainerStyleFeature(tokens[0].chi, atRule); + return validateContainerStyleFeature((tokens[0] as ParensToken).chi, atRule); } if ([EnumToken.DashedIdenTokenType, EnumToken.IdenTokenType].includes(tokens[0].typ) || - ( tokens[0].typ == EnumToken.MediaQueryConditionTokenType &&tokens[0].op.typ == EnumToken.ColonTokenType)) { + (tokens[0].typ == EnumToken.MediaQueryConditionTokenType && (tokens[0] as MediaQueryConditionToken).op.typ == EnumToken.ColonTokenType)) { return { valid: ValidationLevel.Valid, @@ -282,7 +291,7 @@ function validateContainerSizeFeature(tokens: Token[], atRule: AstAtRule): Valid if (token.typ == EnumToken.ParensTokenType) { - return validateAtRuleContainerQueryStyleInParams(token.chi, atRule); + return validateAtRuleContainerQueryStyleInParams((token as ParensToken).chi, atRule); } if (![EnumToken.DashedIdenTokenType, EnumToken.MediaQueryConditionTokenType].includes(tokens[0].typ)) { @@ -337,7 +346,7 @@ function validateAtRuleContainerQueryStyleInParams(tokens: Token[], atRule: AstA if (token.typ == EnumToken.MediaFeatureNotTokenType) { - token = token.val; + token = (token as MediaFeatureNotToken).val; } if (tokens[0].typ != EnumToken.ParensTokenType) { @@ -352,7 +361,7 @@ function validateAtRuleContainerQueryStyleInParams(tokens: Token[], atRule: AstA } } - const slices = tokens[0].chi.slice(); + const slices = (tokens[0] as ParensToken).chi.slice(); consumeWhitespace(slices); diff --git a/src/lib/validation/at-rules/counter-style.ts b/src/lib/validation/at-rules/counter-style.ts index a168f03e..b928976f 100644 --- a/src/lib/validation/at-rules/counter-style.ts +++ b/src/lib/validation/at-rules/counter-style.ts @@ -1,6 +1,6 @@ -import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; export function validateAtRuleCounterStyle(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/custom-media.ts b/src/lib/validation/at-rules/custom-media.ts index bb9bffcf..0699f78b 100644 --- a/src/lib/validation/at-rules/custom-media.ts +++ b/src/lib/validation/at-rules/custom-media.ts @@ -1,8 +1,8 @@ -import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types"; -import type {ValidationSyntaxResult} from "../../../@types/validation"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace} from "../utils"; -import {validateAtRuleMediaQueryList} from "./media"; +import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types/index.d.ts"; +import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace} from "../utils/index.ts"; +import {validateAtRuleMediaQueryList} from "./media.ts"; export function validateAtRuleCustomMedia(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/document.ts b/src/lib/validation/at-rules/document.ts index 6ab49bd8..2ccae4ad 100644 --- a/src/lib/validation/at-rules/document.ts +++ b/src/lib/validation/at-rules/document.ts @@ -1,8 +1,8 @@ -import type {AstAtRule, AstNode, FunctionToken, Token, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, FunctionToken, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace, splitTokenList} from "../utils"; -import {validateURL} from "../syntaxes/url"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; +import {validateURL} from "../syntaxes/url.ts"; export function validateAtRuleDocument(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { @@ -70,9 +70,9 @@ export function validateAtRuleDocument(atRule: AstAtRule, options: ValidationOpt result = validateURL(t[0]); - if (result.valid == ValidationLevel.Drop) { + if (result?.valid == ValidationLevel.Drop) { - return result; + return result as ValidationSyntaxResult; } continue; diff --git a/src/lib/validation/at-rules/else.ts b/src/lib/validation/at-rules/else.ts index 8c15e8fb..4a5991f5 100644 --- a/src/lib/validation/at-rules/else.ts +++ b/src/lib/validation/at-rules/else.ts @@ -1,4 +1,4 @@ -import {validateAtRuleWhen} from "./when"; +import {validateAtRuleWhen} from "./when.ts"; export const validateAtRuleElse = validateAtRuleWhen; \ No newline at end of file diff --git a/src/lib/validation/at-rules/font-feature-values.ts b/src/lib/validation/at-rules/font-feature-values.ts index 9ccdce23..476fdbf6 100644 --- a/src/lib/validation/at-rules/font-feature-values.ts +++ b/src/lib/validation/at-rules/font-feature-values.ts @@ -1,7 +1,7 @@ -import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {ValidationLevel} from "../../ast"; -import {validateFamilyName} from "../syntaxes"; +import {ValidationLevel} from "../../ast/index.ts"; +import {validateFamilyName} from "../syntaxes/index.ts"; export function validateAtRuleFontFeatureValues(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/import.ts b/src/lib/validation/at-rules/import.ts index 1e217055..e2dd0b2a 100644 --- a/src/lib/validation/at-rules/import.ts +++ b/src/lib/validation/at-rules/import.ts @@ -1,10 +1,17 @@ -import type {AstAtRule, AstNode, FunctionToken, Token, ValidationOptions} from "../../../@types"; +import type { + AstAtRule, + AstNode, + FunctionToken, + FunctionURLToken, + Token, + ValidationOptions +} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {validateAtRuleMediaQueryList} from "./media"; -import {consumeWhitespace} from "../utils"; -import {validateLayerName} from "../syntaxes"; -import {validateAtRuleSupportsConditions} from "./supports"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {validateAtRuleMediaQueryList} from "./media.ts"; +import {consumeWhitespace} from "../utils/index.ts"; +import {validateLayerName} from "../syntaxes/index.ts"; +import {validateAtRuleSupportsConditions} from "./supports.ts"; export function validateAtRuleImport(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { @@ -56,7 +63,7 @@ export function validateAtRuleImport(atRule: AstAtRule, options: ValidationOptio consumeWhitespace(tokens); } else if (tokens[0].typ == EnumToken.UrlFunctionTokenType) { - const slice = tokens[0].chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); + const slice = (tokens[0] as FunctionURLToken).chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); if (slice.length != 1 || ![EnumToken.StringTokenType, EnumToken.UrlTokenTokenType].includes(slice[0].typ)) { diff --git a/src/lib/validation/at-rules/index.ts b/src/lib/validation/at-rules/index.ts index 2637115c..d9ba889a 100644 --- a/src/lib/validation/at-rules/index.ts +++ b/src/lib/validation/at-rules/index.ts @@ -1,14 +1,14 @@ -export * from './media'; -export * from './counter-style'; -export * from './page'; -export * from './page-margin-box'; -export * from './supports'; -export * from './import'; -export * from './layer'; -export * from './font-feature-values'; -export * from './namespace'; -export * from './document'; -export * from './keyframes'; -export * from './when'; -export * from './else'; -export * from './container'; \ No newline at end of file +export * from './media.ts'; +export * from './counter-style.ts'; +export * from './page.ts'; +export * from './page-margin-box.ts'; +export * from './supports.ts'; +export * from './import.ts'; +export * from './layer.ts'; +export * from './font-feature-values.ts'; +export * from './namespace.ts'; +export * from './document.ts'; +export * from './keyframes.ts'; +export * from './when.ts'; +export * from './else.ts'; +export * from './container.ts'; \ No newline at end of file diff --git a/src/lib/validation/at-rules/keyframes.ts b/src/lib/validation/at-rules/keyframes.ts index 7cdba221..d47d5101 100644 --- a/src/lib/validation/at-rules/keyframes.ts +++ b/src/lib/validation/at-rules/keyframes.ts @@ -1,7 +1,7 @@ -import type {AstKeyframAtRule, AstNode, Token, ValidationOptions} from "../../../@types"; +import type {AstKeyframAtRule, AstNode, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace} from "../utils"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace} from "../utils/index.ts"; export function validateAtRuleKeyframes(atRule: AstKeyframAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/layer.ts b/src/lib/validation/at-rules/layer.ts index efa67c48..c00d399c 100644 --- a/src/lib/validation/at-rules/layer.ts +++ b/src/lib/validation/at-rules/layer.ts @@ -1,7 +1,7 @@ -import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {ValidationLevel} from "../../ast"; -import {validateLayerName} from "../syntaxes"; +import {ValidationLevel} from "../../ast/index.ts"; +import {validateLayerName} from "../syntaxes/index.ts"; export function validateAtRuleLayer(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/media.ts b/src/lib/validation/at-rules/media.ts index cca892c1..dcb34580 100644 --- a/src/lib/validation/at-rules/media.ts +++ b/src/lib/validation/at-rules/media.ts @@ -2,14 +2,17 @@ import type { AstAtRule, AstNode, FunctionToken, + MediaFeatureNotToken, + MediaFeatureOnlyToken, MediaFeatureToken, + MediaQueryConditionToken, ParensToken, Token, ValidationOptions } from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace, splitTokenList} from "../utils"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; export function validateAtRuleMedia(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { @@ -116,7 +119,7 @@ export function validateAtRuleMediaQueryList(tokenList: Token[], atRule: AstAtRu if (tokens[0].typ == EnumToken.ParensTokenType) { - result = validateAtRuleMediaQueryList(tokens[0].chi, atRule); + result = validateAtRuleMediaQueryList((tokens[0] as ParensToken).chi, atRule); } else { result = { @@ -280,7 +283,7 @@ function validateCustomMediaCondition(token: Token, atRule: AstAtRule): boolean if (token.typ == EnumToken.MediaFeatureNotTokenType) { - return validateMediaCondition(token.val, atRule); + return validateMediaCondition((token as MediaFeatureNotToken).val, atRule); } if (token.typ != EnumToken.ParensTokenType) { @@ -288,7 +291,7 @@ function validateCustomMediaCondition(token: Token, atRule: AstAtRule): boolean return false; } - const chi: Token[] = token.chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); + const chi: Token[] = (token as ParensToken).chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); if (chi.length != 1) { @@ -302,10 +305,10 @@ export function validateMediaCondition(token: Token, atRule: AstAtRule): boolean if (token.typ == EnumToken.MediaFeatureNotTokenType) { - return validateMediaCondition(token.val, atRule); + return validateMediaCondition((token as MediaFeatureNotToken).val, atRule); } - if (token.typ != EnumToken.ParensTokenType && !(['when', 'else', 'import'].includes(atRule.nam) && token.typ == EnumToken.FunctionTokenType && ['media', 'supports', 'selector'].includes(token.val))) { + if (token.typ != EnumToken.ParensTokenType && !(['when', 'else', 'import'].includes(atRule.nam) && token.typ == EnumToken.FunctionTokenType && ['media', 'supports', 'selector'].includes((token as FunctionToken).val))) { return false; } @@ -324,16 +327,14 @@ export function validateMediaCondition(token: Token, atRule: AstAtRule): boolean if (chi[0].typ == EnumToken.MediaFeatureNotTokenType) { - return validateMediaCondition(chi[0].val, atRule); + return validateMediaCondition((chi[0] as MediaFeatureNotToken).val, atRule); } if (chi[0].typ == EnumToken.MediaQueryConditionTokenType) { - return chi[0].l.typ == EnumToken.IdenTokenType; + return (chi[0] as MediaQueryConditionToken).l.typ == EnumToken.IdenTokenType; } - // console.error(chi[0].parent); - return false; } @@ -343,7 +344,7 @@ export function validateMediaFeature(token: Token): boolean { if (token.typ == EnumToken.MediaFeatureOnlyTokenType || token.typ == EnumToken.MediaFeatureNotTokenType) { - val = token.val + val = (token as MediaFeatureOnlyToken | MediaFeatureNotToken).val } return val.typ == EnumToken.MediaFeatureTokenType; diff --git a/src/lib/validation/at-rules/namespace.ts b/src/lib/validation/at-rules/namespace.ts index 5f2cf0b8..04f49dc6 100644 --- a/src/lib/validation/at-rules/namespace.ts +++ b/src/lib/validation/at-rules/namespace.ts @@ -1,8 +1,8 @@ -import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace} from "../utils"; -import {validateURL} from "../syntaxes/url"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace} from "../utils/index.ts"; +import {validateURL} from "../syntaxes/url.ts"; export function validateAtRuleNamespace(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/page-margin-box.ts b/src/lib/validation/at-rules/page-margin-box.ts index 7f9a2561..d53d9e49 100644 --- a/src/lib/validation/at-rules/page-margin-box.ts +++ b/src/lib/validation/at-rules/page-margin-box.ts @@ -1,6 +1,6 @@ -import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; export function validateAtRulePageMarginBox(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/page.ts b/src/lib/validation/at-rules/page.ts index a5468a17..d190bca5 100644 --- a/src/lib/validation/at-rules/page.ts +++ b/src/lib/validation/at-rules/page.ts @@ -1,7 +1,7 @@ -import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types"; +import type {AstAtRule, AstNode, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {splitTokenList} from "../utils"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {splitTokenList} from "../utils/index.ts"; export function validateAtRulePage(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { diff --git a/src/lib/validation/at-rules/supports.ts b/src/lib/validation/at-rules/supports.ts index a03121da..0f86493b 100644 --- a/src/lib/validation/at-rules/supports.ts +++ b/src/lib/validation/at-rules/supports.ts @@ -1,10 +1,20 @@ -import type {AstAtRule, AstNode, IdentToken, MediaQueryConditionToken, Token, ValidationOptions} from "../../../@types"; +import type { + AstAtRule, + AstNode, + FunctionToken, + IdentToken, + MediaFeatureNotToken, + MediaQueryConditionToken, + ParensToken, + Token, + ValidationOptions +} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace, splitTokenList} from "../utils"; -import {colorFontTech, fontFeaturesTech, fontFormat} from "../../syntax"; -import {validateComplexSelector} from "../syntaxes/complex-selector"; -import {parseSelector} from "../../parser"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; +import {colorFontTech, fontFeaturesTech, fontFormat} from "../../syntax/index.ts"; +import {validateComplexSelector} from "../syntaxes/complex-selector.ts"; +import {parseSelector} from "../../parser/index.ts"; export function validateAtRuleSupports(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { @@ -100,7 +110,7 @@ export function validateAtRuleSupportsConditions(atRule: AstAtRule, tokenList: T if (tokens[0].typ == EnumToken.ParensTokenType) { - result = validateAtRuleSupportsConditions(atRule, tokens[0].chi); + result = validateAtRuleSupportsConditions(atRule, (tokens[0] as ParensToken).chi); if (/* result == null || */ result.valid == ValidationLevel.Valid) { @@ -213,10 +223,10 @@ export function validateSupportCondition(atRule: AstAtRule, token: Token): Valid if (token.typ == EnumToken.MediaFeatureNotTokenType) { - return validateSupportCondition(atRule, token.val); + return validateSupportCondition(atRule, (token as MediaFeatureNotToken).val); } - if (token.typ != EnumToken.ParensTokenType && !(['when', 'else'].includes(atRule.nam) && token.typ == EnumToken.FunctionTokenType && ['supports', 'font-format', 'font-tech'].includes(token.val))) { + if (token.typ != EnumToken.ParensTokenType && !(['when', 'else'].includes(atRule.nam) && token.typ == EnumToken.FunctionTokenType && ['supports', 'font-format', 'font-tech'].includes((token as FunctionToken).val))) { // @ts-ignore return { @@ -229,10 +239,10 @@ export function validateSupportCondition(atRule: AstAtRule, token: Token): Valid }; } - const chi: Token[] = token.chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); + const chi: Token[] = (token as FunctionToken).chi.filter((t: Token): boolean => t.typ != EnumToken.CommentTokenType && t.typ != EnumToken.WhitespaceTokenType); if (chi.length != 1) { - return validateAtRuleSupportsConditions(atRule, token.chi); + return validateAtRuleSupportsConditions(atRule, (token as FunctionToken).chi); } if (chi[0].typ == EnumToken.IdenTokenType) { @@ -250,7 +260,7 @@ export function validateSupportCondition(atRule: AstAtRule, token: Token): Valid if (chi[0].typ == EnumToken.MediaFeatureNotTokenType) { - return validateSupportCondition(atRule, chi[0].val); + return validateSupportCondition(atRule, (chi[0] as MediaFeatureNotToken).val); } if (chi[0].typ == EnumToken.MediaQueryConditionTokenType) { @@ -290,14 +300,14 @@ function validateSupportFeature(token: Token): ValidationSyntaxResult { if (token.typ == EnumToken.FunctionTokenType) { - if (token.val.localeCompare('selector', undefined, {sensitivity: 'base'}) == 0) { + if ((token as FunctionToken).val.localeCompare('selector', undefined, {sensitivity: 'base'}) == 0) { - return validateComplexSelector(parseSelector(token.chi)); + return validateComplexSelector(parseSelector((token as FunctionToken).chi)); } - if (token.val.localeCompare('font-tech', undefined, {sensitivity: 'base'}) == 0) { + if ((token as FunctionToken).val.localeCompare('font-tech', undefined, {sensitivity: 'base'}) == 0) { - const chi: Token[] = token.chi.filter((t) => ![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)); + const chi: Token[] = (token as FunctionToken).chi.filter((t: Token) => ![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)); // @ts-ignore return chi.length == 1 && chi[0].typ == EnumToken.IdenTokenType && colorFontTech.concat(fontFeaturesTech).some((t) => t.localeCompare((chi[0] as IdentToken).val, undefined, {sensitivity: 'base'}) == 0) ? @@ -319,9 +329,9 @@ function validateSupportFeature(token: Token): ValidationSyntaxResult { }; } - if (token.val.localeCompare('font-format', undefined, {sensitivity: 'base'}) == 0) { + if ((token as FunctionToken).val.localeCompare('font-format', undefined, {sensitivity: 'base'}) == 0) { - const chi: Token[] = token.chi.filter((t) => ![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)); + const chi: Token[] = (token as FunctionToken).chi.filter((t: Token): boolean => ![EnumToken.WhitespaceTokenType, EnumToken.CommentTokenType].includes(t.typ)); // @ts-ignore return chi.length == 1 && chi[0].typ == EnumToken.IdenTokenType && fontFormat.some((t) => t.localeCompare((chi[0] as IdentToken).val, undefined, {sensitivity: 'base'}) == 0) ? diff --git a/src/lib/validation/at-rules/when.ts b/src/lib/validation/at-rules/when.ts index 24e8f441..d1438486 100644 --- a/src/lib/validation/at-rules/when.ts +++ b/src/lib/validation/at-rules/when.ts @@ -1,9 +1,9 @@ -import type {AstAtRule, AstNode, FunctionToken, Token, ValidationOptions} from "../../../@types"; -import type {ValidationSyntaxResult} from "../../../@types/validation"; -import {EnumToken, ValidationLevel} from "../../ast"; -import {consumeWhitespace, splitTokenList} from "../utils"; -import {validateMediaCondition, validateMediaFeature} from "./media"; -import {validateSupportCondition} from "./supports"; +import type {AstAtRule, AstNode, FunctionToken, Token, ValidationOptions} from "../../../@types/index.d.ts"; +import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; +import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; +import {validateMediaCondition, validateMediaFeature} from "./media.ts"; +import {validateSupportCondition} from "./supports.ts"; export function validateAtRuleWhen(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationSyntaxResult { @@ -90,11 +90,11 @@ export function validateAtRuleWhenQueryList(tokenList: Token[], atRule: AstAtRul break; } - const chi: Token[] = split[0].chi.slice() as Token[]; + const chi: Token[] = (split[0] as FunctionToken).chi.slice() as Token[]; consumeWhitespace(chi); - if (split[0].val == 'media') { + if ((split[0] as FunctionToken).val == 'media') { // result = valida if (chi.length != 1 || !(validateMediaFeature(chi[0]) || validateMediaCondition(split[0], atRule))) { @@ -111,7 +111,7 @@ export function validateAtRuleWhenQueryList(tokenList: Token[], atRule: AstAtRul break; } - } else if (['supports', 'font-tech', 'font-format'].includes(split[0].val)) { + } else if (['supports', 'font-tech', 'font-format'].includes((split[0] as FunctionToken).val)) { // result = valida if (!validateSupportCondition(atRule, split[0])) { diff --git a/src/lib/validation/atrule.ts b/src/lib/validation/atrule.ts index f9a3aafd..94165501 100644 --- a/src/lib/validation/atrule.ts +++ b/src/lib/validation/atrule.ts @@ -1,8 +1,8 @@ -import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../@types"; -import type {ValidationConfiguration, ValidationResult} from "../../@types/validation"; -import {EnumToken, ValidationLevel} from "../ast"; -import {getParsedSyntax, getSyntaxConfig} from "./config"; -import {ValidationSyntaxGroupEnum, ValidationToken} from "./parser"; +import type {AstAtRule, AstNode, Token, ValidationOptions} from "../../@types/index.d.ts"; +import type {ValidationConfiguration, ValidationResult} from "../../@types/validation.d.ts"; +import {EnumToken, ValidationLevel} from "../ast/index.ts"; +import {getParsedSyntax, getSyntaxConfig} from "./config.ts"; +import {ValidationSyntaxGroupEnum, ValidationToken} from "./parser/index.ts"; import { validateAtRuleContainer, validateAtRuleCounterStyle, @@ -17,8 +17,8 @@ import { validateAtRulePageMarginBox, validateAtRuleSupports, validateAtRuleWhen -} from "./at-rules"; -import {validateAtRuleCustomMedia} from "./at-rules/custom-media"; +} from "./at-rules/index.ts"; +import {validateAtRuleCustomMedia} from "./at-rules/custom-media.ts"; export function validateAtRule(atRule: AstAtRule, options: ValidationOptions, root?: AstNode): ValidationResult { diff --git a/src/lib/validation/config.ts b/src/lib/validation/config.ts index 9100334b..e2d4a9f2 100644 --- a/src/lib/validation/config.ts +++ b/src/lib/validation/config.ts @@ -1,6 +1,6 @@ import config from './config.json' with {type: 'json'}; -import type {ValidationConfiguration, ValidationSyntaxNode} from "../../@types/validation"; -import {parseSyntax, ValidationSyntaxGroupEnum, ValidationToken} from "./parser"; +import type {ValidationConfiguration, ValidationSyntaxNode} from "../../@types/validation.d.ts"; +import {parseSyntax, ValidationSyntaxGroupEnum, ValidationToken} from "./parser/index.ts"; const parsedSyntaxes = new Map(); diff --git a/src/lib/validation/declaration.ts b/src/lib/validation/declaration.ts index a9328b9c..ead8cd23 100644 --- a/src/lib/validation/declaration.ts +++ b/src/lib/validation/declaration.ts @@ -1,5 +1,5 @@ import type {AstAtRule, AstDeclaration, AstNode, ValidationOptions} from "../../@types/index.d.ts"; -import type {ValidationConfiguration, ValidationResult} from "../../@types/validation"; +import type {ValidationConfiguration, ValidationResult} from "../../@types/validation.d.ts"; import {EnumToken, ValidationLevel} from "../ast/index.ts"; import {getParsedSyntax, getSyntaxConfig} from "./config.ts"; import {ParsedSyntax, ValidationSyntaxGroupEnum, ValidationToken} from "./parser/index.ts"; diff --git a/src/lib/validation/selector.ts b/src/lib/validation/selector.ts index 52d84ca1..3269f8e0 100644 --- a/src/lib/validation/selector.ts +++ b/src/lib/validation/selector.ts @@ -14,7 +14,7 @@ export function validateSelector(selector: Token[], options: ValidationOptions, // @ts-ignore if (root.typ == EnumToken.AtRuleNodeType && root.nam.match(/^(-[a-z]+-)?keyframes$/)) { - return validateKeyframeBlockList(selector, root, options); + return validateKeyframeBlockList(selector, root as AstAtRule, options); } let isNested: number = root.typ == EnumToken.RuleNodeType ? 1 : 0; diff --git a/src/lib/validation/syntax.ts b/src/lib/validation/syntax.ts index afd4f611..9471f1b4 100644 --- a/src/lib/validation/syntax.ts +++ b/src/lib/validation/syntax.ts @@ -21,7 +21,7 @@ import { ValidationSyntaxGroupEnum, ValidationToken, ValidationTokenEnum -} from "./parser"; +} from "./parser/index.ts"; import type { AstAtRule, AstDeclaration, @@ -33,16 +33,17 @@ import type { IdentToken, LiteralToken, MatchExpressionToken, + NameSpaceAttributeToken, NumberToken, PseudoClassFunctionToken, PseudoClassToken, StringToken, Token, ValidationOptions -} from "../../@types"; -import {EnumToken, funcLike, ValidationLevel} from "../ast"; -import {getParsedSyntax, getSyntaxConfig} from "./config"; -import type {ValidationConfiguration, ValidationSyntaxResult} from "../../@types/validation"; +} from "../../@types/index.d.ts"; +import {EnumToken, funcLike, ValidationLevel} from "../ast/index.ts"; +import {getParsedSyntax, getSyntaxConfig} from "./config.ts"; +import type {ValidationConfiguration, ValidationSyntaxResult} from "../../@types/validation.d.ts"; import {isLength} from "../syntax/index.ts"; import {validateSelector} from "./selector.ts"; import {validateImage} from "./syntaxes/index.ts"; @@ -1160,9 +1161,9 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token valid = (token.typ == EnumToken.UniversalSelectorTokenType) || token.typ == EnumToken.IdenTokenType || (token.typ == EnumToken.NameSpaceAttributeTokenType && ( - token.l == null || token.l.typ == EnumToken.IdenTokenType || - (token.l.typ == EnumToken.LiteralTokenType && token.l.val == '*')) && - token.r.typ == EnumToken.IdenTokenType + (token as NameSpaceAttributeToken).l == null || ((token as NameSpaceAttributeToken).l as IdentToken).typ == EnumToken.IdenTokenType || + (((token as NameSpaceAttributeToken).l as LiteralToken).typ == EnumToken.LiteralTokenType && ((token as NameSpaceAttributeToken).l as LiteralToken).val == '*')) && + (token as NameSpaceAttributeToken).r.typ == EnumToken.IdenTokenType ); result = { @@ -1177,8 +1178,8 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if ('wq-name' == (syntax as ValidationPropertyToken).val) { valid = token.typ == EnumToken.IdenTokenType || (token.typ == EnumToken.NameSpaceAttributeTokenType && - (token.l == null || token.l.typ == EnumToken.IdenTokenType || (token.l.typ == EnumToken.LiteralTokenType && token.l.val == '*')) && - token.r.typ == EnumToken.IdenTokenType); + ((token as NameSpaceAttributeToken).l == null || ((token as NameSpaceAttributeToken).l as IdentToken).typ == EnumToken.IdenTokenType || (((token as NameSpaceAttributeToken).l as LiteralToken).typ == EnumToken.LiteralTokenType && ((token as NameSpaceAttributeToken).l as LiteralToken).val == '*')) && + (token as NameSpaceAttributeToken).r.typ == EnumToken.IdenTokenType); result = { valid: valid ? ValidationLevel.Valid : ValidationLevel.Drop, @@ -1332,7 +1333,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if ('angle' == (syntax as ValidationPropertyToken).val) { - valid = token.typ == EnumToken.AngleTokenType || (token.typ == EnumToken.NumberTokenType && token.val == '0'); + valid = token.typ == EnumToken.AngleTokenType || (token.typ == EnumToken.NumberTokenType && (token as NumberToken).val == '0'); result = { valid: valid ? ValidationLevel.Valid : ValidationLevel.Drop, @@ -1385,7 +1386,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if (['integer', 'number'].includes((syntax as ValidationPropertyToken).val)) { // valid = token.typ == EnumToken.NumberTokenType; - valid = token.typ == EnumToken.NumberTokenType && ('integer' != (syntax as ValidationPropertyToken).val || Number.isInteger(+token.val)); + valid = token.typ == EnumToken.NumberTokenType && ('integer' != (syntax as ValidationPropertyToken).val || Number.isInteger(+(token as NumberToken).val)); if (valid && 'range' in syntax) { @@ -1406,7 +1407,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if ('length' == (syntax as ValidationPropertyToken).val) { - valid = isLength(token as DimensionToken) || (token.typ == EnumToken.NumberTokenType && token.val == '0'); + valid = isLength(token as DimensionToken) || (token.typ == EnumToken.NumberTokenType && (token as NumberToken).val == '0'); // @ts-ignore result = { @@ -1420,7 +1421,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if ('percentage' == (syntax as ValidationPropertyToken).val) { - valid = token.typ == EnumToken.PercentageTokenType || (token.typ == EnumToken.NumberTokenType && token.val == '0'); + valid = token.typ == EnumToken.PercentageTokenType || (token.typ == EnumToken.NumberTokenType && (token as NumberToken).val == '0'); result = { valid: valid ? ValidationLevel.Valid : ValidationLevel.Drop, @@ -1571,7 +1572,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token if (token.typ == EnumToken.PseudoClassTokenType) { - let val: string = token.val; + let val: string = (token as PseudoClassToken).val; if (val == ':before' || val == ':after') { @@ -1582,7 +1583,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token if (!valid && val.match(/^:?:-/) != null) { - const match: RegExpMatchArray = token.val.match(/^(:?:)(-[^-]+-)(.*)$/) as RegExpMatchArray; + const match: RegExpMatchArray = (token as PseudoClassToken).val.match(/^(:?:)(-[^-]+-)(.*)$/) as RegExpMatchArray; if (match != null) { @@ -1601,13 +1602,13 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if (token.typ == EnumToken.PseudoClassFuncTokenType) { - let key: string = token.val in config.selectors ? token.val : token.val + '()'; + let key: string = (token as PseudoClassFunctionToken).val in config.selectors ? (token as PseudoClassFunctionToken).val : (token as PseudoClassFunctionToken).val + '()'; valid = key in config.selectors; - if (!valid && token.val.match(/^:?:-/)) { + if (!valid && (token as PseudoClassFunctionToken).val.match(/^:?:-/)) { - const match: RegExpMatchArray = token.val.match(/^(:?:)(-[^-]+-)(.*)$/) as RegExpMatchArray; + const match: RegExpMatchArray = (token as PseudoClassFunctionToken).val.match(/^(:?:)(-[^-]+-)(.*)$/) as RegExpMatchArray; if (match != null) { @@ -1627,7 +1628,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token valid = false; } else { - result = validateSyntax((s[0] as ValidationPseudoClassFunctionToken).chi, token.chi, root as AstNode, options, { + result = validateSyntax((s[0] as ValidationPseudoClassFunctionToken).chi, (token as PseudoClassFunctionToken).chi, root as AstNode, options, { ...context, tokens: null, level: context.level + 1 @@ -1717,8 +1718,8 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } else if ('wq-name' == (syntax as ValidationPropertyToken).val) { valid = token.typ == EnumToken.IdenTokenType || (token.typ == EnumToken.NameSpaceAttributeTokenType && - (token.l == null || token.l.typ == EnumToken.IdenTokenType || (token.l.typ == EnumToken.LiteralTokenType && token.l.val == '*')) && - token.r.typ == EnumToken.IdenTokenType); + ((token as NameSpaceAttributeToken).l == null || ((token as NameSpaceAttributeToken).l as Token).typ == EnumToken.IdenTokenType || (((token as NameSpaceAttributeToken).l as Token).typ == EnumToken.LiteralTokenType && ((token as NameSpaceAttributeToken).l as LiteralToken).val == '*')) && + (token as NameSpaceAttributeToken).r.typ == EnumToken.IdenTokenType); result = { valid: valid ? ValidationLevel.Valid : ValidationLevel.Drop, @@ -2062,7 +2063,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token case ValidationTokenEnum.DeclarationDefinitionToken: - if (token.typ != EnumToken.DeclarationNodeType || token.nam != (syntax as ValidationDeclarationDefinitionToken).nam) { + if (token.typ != EnumToken.DeclarationNodeType || (token as AstDeclaration).nam != (syntax as ValidationDeclarationDefinitionToken).nam) { return { valid: ValidationLevel.Drop, @@ -2074,7 +2075,7 @@ function doValidateSyntax(syntax: ValidationToken, token: Token | AstNode, token } } - return validateSyntax([(syntax as ValidationDeclarationDefinitionToken).val], token.val, root as AstNode, options, context); + return validateSyntax([(syntax as ValidationDeclarationDefinitionToken).val], (token as AstDeclaration).val, root as AstNode, options, context); default: diff --git a/src/lib/validation/syntaxes/bg-layer.ts b/src/lib/validation/syntaxes/bg-layer.ts index ddf5161d..a112c233 100644 --- a/src/lib/validation/syntaxes/bg-layer.ts +++ b/src/lib/validation/syntaxes/bg-layer.ts @@ -1,7 +1,7 @@ // [ ? ]* -import type {AstAtRule, AstRule, Token} from "../../../@types"; -import type {ValidationSelectorOptions, ValidationSyntaxResult} from "../../../@types/validation"; -import {ValidationLevel} from "../../ast"; +import type {AstAtRule, AstRule, Token} from "../../../@types/index.d.ts"; +import type {ValidationSelectorOptions, ValidationSyntaxResult} from "../../../@types/validation.d.ts"; +import {ValidationLevel} from "../../ast/index.ts"; export function validateBGLayers(tokens: Token[], root?: AstAtRule | AstRule, options?: ValidationSelectorOptions): ValidationSyntaxResult { diff --git a/src/lib/validation/syntaxes/complex-selector.ts b/src/lib/validation/syntaxes/complex-selector.ts index 7240029a..38e3e782 100644 --- a/src/lib/validation/syntaxes/complex-selector.ts +++ b/src/lib/validation/syntaxes/complex-selector.ts @@ -1,4 +1,4 @@ -import type {AstAtRule, AstRule, Token} from "../../../@types"; +import type {AstAtRule, AstRule, Token} from "../../../@types/index.d.ts"; import type {ValidationSelectorOptions, ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; import {EnumToken, ValidationLevel} from "../../ast/index.ts"; diff --git a/src/lib/validation/syntaxes/family-name.ts b/src/lib/validation/syntaxes/family-name.ts index ed24f4ab..54c7983b 100644 --- a/src/lib/validation/syntaxes/family-name.ts +++ b/src/lib/validation/syntaxes/family-name.ts @@ -1,4 +1,4 @@ -import type {AstAtRule, Token} from "../../../@types"; +import type {AstAtRule, Token} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {EnumToken, ValidationLevel} from "../../ast/index.ts"; import {consumeWhitespace} from "../utils/index.ts"; diff --git a/src/lib/validation/syntaxes/keyframe-block-list.ts b/src/lib/validation/syntaxes/keyframe-block-list.ts index 805236a6..2e3a8acd 100644 --- a/src/lib/validation/syntaxes/keyframe-block-list.ts +++ b/src/lib/validation/syntaxes/keyframe-block-list.ts @@ -1,4 +1,4 @@ -import type {AstAtRule, Token, ValidationOptions} from "../../../@types"; +import type {AstAtRule, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {EnumToken, ValidationLevel} from "../../ast/index.ts"; import {validateKeyframeSelector} from "./keyframe-selector.ts"; @@ -14,7 +14,7 @@ export function validateKeyframeBlockList(tokens: Token[], atRule: AstAtRule, op if (tokens[++i].typ == EnumToken.CommaTokenType) { - result = validateKeyframeSelector(tokens.slice(j, i), atRule, options) as ValidationSyntaxResult; + result = validateKeyframeSelector(tokens.slice(j, i), options) as ValidationSyntaxResult; if (result.valid == ValidationLevel.Drop) { @@ -26,5 +26,5 @@ export function validateKeyframeBlockList(tokens: Token[], atRule: AstAtRule, op } } - return validateKeyframeSelector(i == j ? tokens.slice(i) : tokens.slice(j, i + 1), atRule, options); + return validateKeyframeSelector(i == j ? tokens.slice(i) : tokens.slice(j, i + 1), options); } \ No newline at end of file diff --git a/src/lib/validation/syntaxes/keyframe-selector.ts b/src/lib/validation/syntaxes/keyframe-selector.ts index 5491167c..16054806 100644 --- a/src/lib/validation/syntaxes/keyframe-selector.ts +++ b/src/lib/validation/syntaxes/keyframe-selector.ts @@ -1,4 +1,4 @@ -import type {Token, ValidationOptions} from "../../../@types"; +import type {IdentToken, Token, ValidationOptions} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {consumeWhitespace, splitTokenList} from "../utils/index.ts"; import {EnumToken, ValidationLevel} from "../../ast/index.ts"; @@ -34,7 +34,7 @@ export function validateKeyframeSelector(tokens: Token[], options: ValidationOpt } } - if (t[0].typ != EnumToken.PercentageTokenType && !(t[0].typ == EnumToken.IdenTokenType && ['from', 'to', 'cover', 'contain', 'entry', 'exit', 'entry-crossing', 'exit-crossing'].includes(t[0].val))) { + if (t[0].typ != EnumToken.PercentageTokenType && !(t[0].typ == EnumToken.IdenTokenType && ['from', 'to', 'cover', 'contain', 'entry', 'exit', 'entry-crossing', 'exit-crossing'].includes((t[0] as IdentToken).val))) { return { valid: ValidationLevel.Drop, diff --git a/src/lib/validation/syntaxes/layer-name.ts b/src/lib/validation/syntaxes/layer-name.ts index 60297d50..957ddd9a 100644 --- a/src/lib/validation/syntaxes/layer-name.ts +++ b/src/lib/validation/syntaxes/layer-name.ts @@ -1,5 +1,5 @@ -import type {Token} from "../../../@types"; -import {EnumToken, ValidationLevel} from "../../ast"; +import type {Token} from "../../../@types/index.d.ts"; +import {EnumToken, ValidationLevel} from "../../ast/index.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; export function validateLayerName(tokens: Token[]): ValidationSyntaxResult { diff --git a/src/lib/validation/syntaxes/relative-selector-list.ts b/src/lib/validation/syntaxes/relative-selector-list.ts index 27a4750e..151dbc8b 100644 --- a/src/lib/validation/syntaxes/relative-selector-list.ts +++ b/src/lib/validation/syntaxes/relative-selector-list.ts @@ -1,4 +1,4 @@ -import type {AstAtRule, AstRule, Token} from "../../../@types"; +import type {AstAtRule, AstRule, Token} from "../../../@types/index.d.ts"; import type {ValidationSelectorOptions, ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {ValidationLevel} from "../../ast/index.ts"; import {validateRelativeSelector} from "./relative-selector.ts"; diff --git a/src/lib/validation/syntaxes/url.ts b/src/lib/validation/syntaxes/url.ts index 54059949..57b04dd3 100644 --- a/src/lib/validation/syntaxes/url.ts +++ b/src/lib/validation/syntaxes/url.ts @@ -1,4 +1,4 @@ -import type {Token} from "../../../@types"; +import type {FunctionURLToken, Token} from "../../../@types/index.d.ts"; import type {ValidationSyntaxResult} from "../../../@types/validation.d.ts"; import {EnumToken, ValidationLevel} from "../../ast/index.ts"; import {consumeWhitespace} from "../utils/index.ts"; @@ -34,7 +34,7 @@ export function validateURL(token: Token): ValidationSyntaxResult { } } - const children = token.chi.slice() as Token[]; + const children = (token as FunctionURLToken).chi.slice() as Token[]; consumeWhitespace(children); diff --git a/src/lib/validation/utils/list.ts b/src/lib/validation/utils/list.ts index ea7d98da..c6ef556b 100644 --- a/src/lib/validation/utils/list.ts +++ b/src/lib/validation/utils/list.ts @@ -1,5 +1,5 @@ -import {EnumToken} from "../../ast"; -import type {Token} from "../../../@types"; +import {EnumToken} from "../../ast/index.ts"; +import type {Token} from "../../../@types/index.d.ts"; export function stripCommaToken(tokenList: Token[]): Token[] | null { diff --git a/src/lib/validation/utils/whitespace.ts b/src/lib/validation/utils/whitespace.ts index 81033723..e6c7ceba 100644 --- a/src/lib/validation/utils/whitespace.ts +++ b/src/lib/validation/utils/whitespace.ts @@ -1,4 +1,4 @@ -import {EnumToken} from "../../ast"; +import {EnumToken} from "../../ast/index.ts"; import type {Token} from "../../../@types/token.d.ts"; export function consumeWhitespace(tokens: Token[]): boolean { diff --git a/src/web/load.ts b/src/web/load.ts index 56939696..3333be9f 100644 --- a/src/web/load.ts +++ b/src/web/load.ts @@ -1,4 +1,4 @@ -import {matchUrl, resolve} from "../lib/fs"; +import {matchUrl, resolve} from "../lib/fs/index.ts"; function parseResponse(response: Response) { diff --git a/test/allFiles.js b/test/allFiles.js index 43b567db..d0e51176 100644 --- a/test/allFiles.js +++ b/test/allFiles.js @@ -6,7 +6,10 @@ const baseDir = import.meta.dirname + '/files/css/'; for (const file of await readdir(baseDir)) { let message = `--> file ${file}: `; - const result = await load(baseDir + file, import.meta.dirname).then(css => transform(css, {src: baseDir + file, minify: true, sourcemap: true, nestingRules: true, resolveImport: true})); + const result = await load(baseDir + file, import.meta.dirname).then(css => transform(css, { + src: baseDir + file, minify: true, sourcemap: true, + removePrefix: true, + nestingRules: true, resolveImport: true})); message += `ratio ${(100 * (1 - result.stats.bytesOut / result.stats.bytesIn)).toFixed(2)}%`; diff --git a/test/specs/code/transform.js b/test/specs/code/transform.js index ecaad403..374ec0bb 100644 --- a/test/specs/code/transform.js +++ b/test/specs/code/transform.js @@ -445,7 +445,7 @@ export function run(describe, expect, transform, parse, render, dirname, readFil }`)); }); - it('matrix #25', function () { + it('matrix #26', function () { const nesting1 = ` .now { @@ -474,6 +474,22 @@ export function run(describe, expect, transform, parse, render, dirname, readFil computeTransform: true }).then((result) => expect(result.code).equals(`.now { transform: matrix3d(-.6,1.34788,0,0,-2.34788,-.6,0,0,0,0,1,0,0,0,10,1) +}`)); + }); + + it('matrix #27', function () { + const nesting1 = ` + + .now { + + transform: rotate3d(0,0,1,-10deg) +} +`; + return transform(nesting1, { + beautify: true, + computeTransform: true + }).then((result) => expect(result.code).equals(`.now { + transform: rotate3d(0,0,1,-10deg) }`)); }); }); From dff0df72a5b19ba23b6bf656a9adf4d3e3f064c3 Mon Sep 17 00:00:00 2001 From: Thierry Bela Nanga Date: Thu, 24 Apr 2025 02:42:14 -0400 Subject: [PATCH 2/5] providing type hint #75 --- src/@types/shorthand.d.ts | 2 +- src/lib/parser/utils/type.ts | 2 +- src/lib/renderer/color/colormix.ts | 2 +- src/lib/validation/at-rules/supports.ts | 10 +++------- src/lib/validation/selector.ts | 5 ++++- src/lib/validation/syntaxes/keyframe-selector.ts | 4 ++-- 6 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/@types/shorthand.d.ts b/src/@types/shorthand.d.ts index 09422e0a..5b4cf2a1 100644 --- a/src/@types/shorthand.d.ts +++ b/src/@types/shorthand.d.ts @@ -56,7 +56,7 @@ export interface ShorthandMapType { default: string[]; mapping?: Record; multiple?: boolean; - separator?: { typ: keyof EnumToken ; val?: string }; + separator?: { typ: keyof EnumToken; val?: string }; set?: Record properties: { [property: string]: PropertyMapType; diff --git a/src/lib/parser/utils/type.ts b/src/lib/parser/utils/type.ts index a0c3ac2a..0b3e0575 100644 --- a/src/lib/parser/utils/type.ts +++ b/src/lib/parser/utils/type.ts @@ -27,7 +27,7 @@ export function matchType(val: Token, properties: PropertyMapType): boolean { if (mathFuncs.includes((val as FunctionToken).val)) { - return (val as FunctionToken).chi.every(((t: Token): boolean => [EnumToken.Add,EnumToken.Mul,EnumToken.Div,EnumToken.Sub,EnumToken.LiteralTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.DimensionTokenType, EnumToken.NumberTokenType, EnumToken.LengthTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.ResolutionTokenType, EnumToken.TimeTokenType, EnumToken.BinaryExpressionTokenType].includes(t.typ) || matchType(t, properties))); + return (val as FunctionToken).chi.every(((t: Token): boolean => [EnumToken.Add, EnumToken.Mul, EnumToken.Div, EnumToken.Sub, EnumToken.LiteralTokenType, EnumToken.CommaTokenType, EnumToken.WhitespaceTokenType, EnumToken.DimensionTokenType, EnumToken.NumberTokenType, EnumToken.LengthTokenType, EnumToken.AngleTokenType, EnumToken.PercentageTokenType, EnumToken.ResolutionTokenType, EnumToken.TimeTokenType, EnumToken.BinaryExpressionTokenType].includes(t.typ) || matchType(t, properties))); } // match type defined like function 'symbols()', 'url()', 'attr()' etc. diff --git a/src/lib/renderer/color/colormix.ts b/src/lib/renderer/color/colormix.ts index 8f4737e9..7a778320 100644 --- a/src/lib/renderer/color/colormix.ts +++ b/src/lib/renderer/color/colormix.ts @@ -312,7 +312,7 @@ export function colorMix(colorSpace: IdentToken, hueInterpolationMethod: IdentTo // powerless if (lchSpaces.includes(color1.kin) || lchSpaces.includes(colorSpace.val)) { - if ((components1[2].typ == EnumToken.IdenTokenType &&( components1[2] as IdentToken).val == 'none') || values1[2] == 0) { + if ((components1[2].typ == EnumToken.IdenTokenType && (components1[2] as IdentToken).val == 'none') || values1[2] == 0) { values1[2] = values2[2]; } diff --git a/src/lib/validation/at-rules/supports.ts b/src/lib/validation/at-rules/supports.ts index 0f86493b..35cb196f 100644 --- a/src/lib/validation/at-rules/supports.ts +++ b/src/lib/validation/at-rules/supports.ts @@ -117,15 +117,11 @@ export function validateAtRuleSupportsConditions(atRule: AstAtRule, tokenList: T previousToken = tokens[0]; tokens.shift(); // continue; - } - - else { + } else { return result; } - } - - else { + } else { return result; } @@ -219,7 +215,7 @@ export function validateAtRuleSupportsConditions(atRule: AstAtRule, tokenList: T } } -export function validateSupportCondition(atRule: AstAtRule, token: Token): ValidationSyntaxResult{ +export function validateSupportCondition(atRule: AstAtRule, token: Token): ValidationSyntaxResult { if (token.typ == EnumToken.MediaFeatureNotTokenType) { diff --git a/src/lib/validation/selector.ts b/src/lib/validation/selector.ts index 3269f8e0..9617d78a 100644 --- a/src/lib/validation/selector.ts +++ b/src/lib/validation/selector.ts @@ -39,5 +39,8 @@ export function validateSelector(selector: Token[], options: ValidationOptions, const nestedSelector: boolean = isNested > 0; // @ts-ignore - return nestedSelector ? validateRelativeSelectorList(selector, root, {...(options ?? {}), nestedSelector}) : validateSelectorList(selector, root, {...(options ?? {}), nestedSelector}); + return nestedSelector ? validateRelativeSelectorList(selector, root, { + ...(options ?? {}), + nestedSelector + }) : validateSelectorList(selector, root as AstRule, {...(options ?? {}), nestedSelector}); } diff --git a/src/lib/validation/syntaxes/keyframe-selector.ts b/src/lib/validation/syntaxes/keyframe-selector.ts index 16054806..e357b928 100644 --- a/src/lib/validation/syntaxes/keyframe-selector.ts +++ b/src/lib/validation/syntaxes/keyframe-selector.ts @@ -22,7 +22,7 @@ export function validateKeyframeSelector(tokens: Token[], options: ValidationOpt for (const t of splitTokenList(tokens)) { - if (t.length!= 1) { + if (t.length != 1) { return { valid: ValidationLevel.Drop, @@ -34,7 +34,7 @@ export function validateKeyframeSelector(tokens: Token[], options: ValidationOpt } } - if (t[0].typ != EnumToken.PercentageTokenType && !(t[0].typ == EnumToken.IdenTokenType && ['from', 'to', 'cover', 'contain', 'entry', 'exit', 'entry-crossing', 'exit-crossing'].includes((t[0] as IdentToken).val))) { + if (t[0].typ != EnumToken.PercentageTokenType && !(t[0].typ == EnumToken.IdenTokenType && ['from', 'to', 'cover', 'contain', 'entry', 'exit', 'entry-crossing', 'exit-crossing'].includes((t[0] as IdentToken).val))) { return { valid: ValidationLevel.Drop, From 4be719c90f3858203a5663f4d0eedb1709be51be Mon Sep 17 00:00:00 2001 From: Thierry Bela Nanga Date: Sat, 26 Apr 2025 09:18:13 -0400 Subject: [PATCH 3/5] fix current color parsing #78 --- CHANGELOG.md | 1 + dist/index-umd-web.js | 139 ++++++++++-------- dist/index.cjs | 139 ++++++++++-------- dist/index.d.ts | 14 +- dist/lib/ast/features/calc.js | 12 +- dist/lib/ast/math/expression.js | 27 ++-- dist/lib/ast/walk.js | 40 ++--- dist/lib/parser/parse.js | 17 ++- .../color/{colormix.js => color-mix.js} | 0 dist/lib/renderer/color/relativecolor.js | 35 ++--- dist/lib/renderer/color/srgb.js | 2 +- dist/lib/renderer/render.js | 11 +- dist/lib/validation/selector.js | 5 +- jsr.json | 2 +- package.json | 2 +- src/@types/walker.d.ts | 12 +- src/lib/ast/features/calc.ts | 14 +- src/lib/ast/math/expression.ts | 28 ++-- src/lib/ast/walk.ts | 42 +++--- src/lib/parser/parse.ts | 23 +++ .../color/{colormix.ts => color-mix.ts} | 0 src/lib/renderer/color/index.ts | 2 +- src/lib/renderer/color/relativecolor.ts | 46 ++---- src/lib/renderer/color/srgb.ts | 2 +- src/lib/renderer/render.ts | 11 +- test/specs/code/color.js | 47 ++++++ 26 files changed, 411 insertions(+), 262 deletions(-) rename dist/lib/renderer/color/{colormix.js => color-mix.js} (100%) rename src/lib/renderer/color/{colormix.ts => color-mix.ts} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a2ba14d..afa1aaf9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ # v1.0.0 +- [x] current color parse error when used in color functions - [x] minification : CSS transform module level 2 - [x] translate - [x] scale diff --git a/dist/index-umd-web.js b/dist/index-umd-web.js index f56bf798..0bf0a9ac 100644 --- a/dist/index-umd-web.js +++ b/dist/index-umd-web.js @@ -1153,7 +1153,7 @@ return values; } function lab2srgb(token) { - const [l, a, b, alpha] = getLABComponents(token); + const [l, a, b, alpha] = getLABComponents(token) ?? []; if (l == null || a == null || b == null) { return null; } @@ -2759,14 +2759,19 @@ return tokens; } if (nodes.length <= 1) { - // @ts-ignore - if (nodes.length == 1 && nodes[0].typ == exports.EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { - return [{ - ...nodes[0], - // @ts-ignore - val: '' + Math[nodes[0].val.toUpperCase()], - typ: exports.EnumToken.NumberTokenType - }]; + if (nodes.length == 1) { + if (nodes[0].typ == exports.EnumToken.BinaryExpressionTokenType) { + return inlineExpression(nodes[0]); + } + // @ts-ignore + if (nodes[0].typ == exports.EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { + return [{ + ...nodes[0], + // @ts-ignore + val: '' + Math[nodes[0].val.toUpperCase()], + typ: exports.EnumToken.NumberTokenType + }]; + } } return nodes; } @@ -3147,6 +3152,10 @@ * @param token */ function evaluateExpression(token) { + // if (token.typ == EnumToken.ParensTokenType) { + // + // return evaluateExpression(buildExpression((token as ParensToken).chi)); + // } if (token.typ != exports.EnumToken.BinaryExpressionTokenType) { return token; } @@ -3322,23 +3331,19 @@ } else if ([exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == exports.EnumToken.IdenTokenType && exp.val in values) { - // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: exports.EnumToken.NumberTokenType, - // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { - // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { - // @ts-ignore parent = exp; } if (value.typ == exports.EnumToken.PercentageTokenType) { @@ -3370,30 +3375,19 @@ return expr; } function replaceValue(parent, value, newValue) { - if (parent.typ == exports.EnumToken.BinaryExpressionTokenType) { - if (parent.l == value) { - parent.l = newValue; - } - else { - parent.r = newValue; - } - } - else { - for (let i = 0; i < parent.chi.length; i++) { - if (parent.chi[i] == value) { - parent.chi.splice(i, 1, newValue); - break; - } - if (parent.chi[i].typ == exports.EnumToken.BinaryExpressionTokenType) { - if (parent.chi[i].l == value) { - parent.chi[i].l = newValue; - break; + for (const { value: val, parent: pr } of walkValues([parent])) { + if (val.typ == value.typ && val.val == value.val) { + if (pr.typ == exports.EnumToken.BinaryExpressionTokenType) { + if (pr.l == val) { + pr.l = newValue; } - else if (parent.chi[i].r == value) { - parent.chi[i].r = newValue; - break; + else { + pr.r = newValue; } } + else { + pr.chi.splice(pr.chi.indexOf(val), 1, newValue); + } } } } @@ -3822,7 +3816,7 @@ if (value != null) { token = value; } - else { + else if (!token.chi.some(t => t.typ == exports.EnumToken.CommaTokenType)) { token.chi = children.reduce((acc, curr, index) => { if (acc.length > 0) { acc.push({ typ: exports.EnumToken.CommaTokenType }); @@ -3847,8 +3841,11 @@ } if (token.val == 'color') { if (token.chi[0].typ == exports.EnumToken.IdenTokenType && colorFuncColorSpace.includes(token.chi[0].val.toLowerCase())) { - // @ts-ignore - return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token))); + const values = color2srgbvalues(token); + if (Array.isArray(values) && values.every(t => !Number.isNaN(t))) { + // @ts-ignore + return reduceHexValue(srgb2hexvalues(...values)); + } } } if (token.cal != null) { @@ -12650,7 +12647,10 @@ } const nestedSelector = isNested > 0; // @ts-ignore - return nestedSelector ? validateRelativeSelectorList(selector, root, { ...(options ?? {}), nestedSelector }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); + return nestedSelector ? validateRelativeSelectorList(selector, root, { + ...(options ?? {}), + nestedSelector + }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); } function validateAtRuleMedia(atRule, options, root) { @@ -15227,6 +15227,21 @@ }); return null; } + for (const { value: token } of walkValues(value, null, { + fn: (node) => node.typ == exports.EnumToken.FunctionTokenType && node.val == 'calc' ? WalkerOptionEnum.IgnoreChildren : null, + type: exports.EnumToken.FunctionTokenType + })) { + if (token.typ == exports.EnumToken.FunctionTokenType && token.val == 'calc') { + for (const { value: node, parent } of walkValues(token.chi, token)) { + // fix expressions starting with '/' or '*' such as '/4' in (1 + 1)/4 + if (node.typ == exports.EnumToken.LiteralTokenType && node.val.length > 0) { + if (node.val[0] == '/' || node.val[0] == '*') { + parent.chi.splice(parent.chi.indexOf(node), 1, { typ: node.val[0] == '/' ? exports.EnumToken.Div : exports.EnumToken.Mul }, ...parseString(node.val.slice(1))); + } + } + } + } + } const node = { typ: exports.EnumToken.DeclarationNodeType, // @ts-ignore @@ -16097,6 +16112,13 @@ return true; } + var WalkerOptionEnum; + (function (WalkerOptionEnum) { + WalkerOptionEnum[WalkerOptionEnum["Ignore"] = 0] = "Ignore"; + WalkerOptionEnum[WalkerOptionEnum["Stop"] = 1] = "Stop"; + WalkerOptionEnum[WalkerOptionEnum["Children"] = 2] = "Children"; + WalkerOptionEnum[WalkerOptionEnum["IgnoreChildren"] = 3] = "IgnoreChildren"; + })(WalkerOptionEnum || (WalkerOptionEnum = {})); var WalkerValueEvent; (function (WalkerValueEvent) { WalkerValueEvent[WalkerValueEvent["Enter"] = 0] = "Enter"; @@ -16115,10 +16137,10 @@ let option = null; if (filter != null) { option = filter(node); - if (option === 'ignore') { + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } } @@ -16127,7 +16149,7 @@ // @ts-ignore yield { node, parent: map.get(node), root }; } - if (option !== 'ignore-children' && 'chi' in node) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in node) { parents.unshift(...node.chi); for (const child of node.chi.slice()) { map.set(child, node); @@ -16159,19 +16181,20 @@ event: WalkerValueEvent.Enter }; } + const eventType = filter.event ?? WalkerValueEvent.Enter; while (stack.length > 0) { let value = reverse ? stack.pop() : stack.shift(); let option = null; - if (filter.fn != null && filter.event == WalkerValueEvent.Enter) { + if (filter.fn != null && eventType == WalkerValueEvent.Enter) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value) ?? root, WalkerValueEvent.Enter); - if (option === 'ignore') { + option = filter.fn(value, map.get(value) ?? root); + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } // @ts-ignore @@ -16180,8 +16203,7 @@ } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Enter && option !== 'children') { + if (eventType == WalkerValueEvent.Enter && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -16191,7 +16213,7 @@ root: root ?? null }; } - if (option !== 'ignore-children' && 'chi' in value) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in value) { const sliced = value.chi.slice(); for (const child of sliced) { map.set(child, value); @@ -16204,24 +16226,23 @@ } } else if (value.typ == exports.EnumToken.BinaryExpressionTokenType) { - map.set(value.l, map.get(value) ?? root); - map.set(value.r, map.get(value) ?? root); + map.set(value.l, value); + map.set(value.r, value); stack.unshift(value.l, value.r); } - if (filter.event == WalkerValueEvent.Leave && filter.fn != null) { + if (eventType == WalkerValueEvent.Leave && filter.fn != null) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value), WalkerValueEvent.Leave); + option = filter.fn(value, map.get(value)); // @ts-ignore if (option != null && 'typ' in option) { map.set(option, map.get(value) ?? root); } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Leave && option !== 'children') { + if (eventType == WalkerValueEvent.Leave && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -17675,7 +17696,6 @@ if (!('chi' in ast)) { return; } - // @ts-ignore for (const node of ast.chi) { if (node.typ != exports.EnumToken.DeclarationNodeType) { continue; @@ -17683,15 +17703,18 @@ const set = new Set; for (const { value, parent } of walkValues(node.val, node, { event: WalkerValueEvent.Enter, - fn(node, parent, event) { + // @ts-ignore + fn(node, parent) { if (parent != null && + // @ts-ignore parent.typ == exports.EnumToken.DeclarationNodeType && + // @ts-ignore parent.val.length == 1 && node.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(node.val) && node.chi.length == 1 && node.chi[0].typ == exports.EnumToken.IdenTokenType) { - return 'ignore'; + return WalkerOptionEnum.Ignore; } if ((node.typ == exports.EnumToken.FunctionTokenType && node.val == 'var') || (!mathFuncs.includes(parent.val) && [exports.EnumToken.ColorTokenType, exports.EnumToken.DeclarationNodeType, exports.EnumToken.RuleNodeType, exports.EnumToken.AtRuleNodeType, exports.EnumToken.StyleSheetNodeType].includes(parent?.typ))) { return null; @@ -17713,7 +17736,7 @@ // @ts-ignore node[key] = values; } - return 'ignore'; + return WalkerOptionEnum.Ignore; } return null; } diff --git a/dist/index.cjs b/dist/index.cjs index e7f65475..63f292eb 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -1152,7 +1152,7 @@ function hsl2srgbvalues(h, s, l, a = null) { return values; } function lab2srgb(token) { - const [l, a, b, alpha] = getLABComponents(token); + const [l, a, b, alpha] = getLABComponents(token) ?? []; if (l == null || a == null || b == null) { return null; } @@ -2758,14 +2758,19 @@ function evaluate(tokens) { return tokens; } if (nodes.length <= 1) { - // @ts-ignore - if (nodes.length == 1 && nodes[0].typ == exports.EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { - return [{ - ...nodes[0], - // @ts-ignore - val: '' + Math[nodes[0].val.toUpperCase()], - typ: exports.EnumToken.NumberTokenType - }]; + if (nodes.length == 1) { + if (nodes[0].typ == exports.EnumToken.BinaryExpressionTokenType) { + return inlineExpression(nodes[0]); + } + // @ts-ignore + if (nodes[0].typ == exports.EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { + return [{ + ...nodes[0], + // @ts-ignore + val: '' + Math[nodes[0].val.toUpperCase()], + typ: exports.EnumToken.NumberTokenType + }]; + } } return nodes; } @@ -3146,6 +3151,10 @@ function inlineExpression(token) { * @param token */ function evaluateExpression(token) { + // if (token.typ == EnumToken.ParensTokenType) { + // + // return evaluateExpression(buildExpression((token as ParensToken).chi)); + // } if (token.typ != exports.EnumToken.BinaryExpressionTokenType) { return token; } @@ -3321,23 +3330,19 @@ function computeComponentValue(expr, converted, values) { } else if ([exports.EnumToken.NumberTokenType, exports.EnumToken.PercentageTokenType, exports.EnumToken.AngleTokenType, exports.EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == exports.EnumToken.IdenTokenType && exp.val in values) { - // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: exports.EnumToken.NumberTokenType, - // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { - // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { - // @ts-ignore parent = exp; } if (value.typ == exports.EnumToken.PercentageTokenType) { @@ -3369,30 +3374,19 @@ function computeComponentValue(expr, converted, values) { return expr; } function replaceValue(parent, value, newValue) { - if (parent.typ == exports.EnumToken.BinaryExpressionTokenType) { - if (parent.l == value) { - parent.l = newValue; - } - else { - parent.r = newValue; - } - } - else { - for (let i = 0; i < parent.chi.length; i++) { - if (parent.chi[i] == value) { - parent.chi.splice(i, 1, newValue); - break; - } - if (parent.chi[i].typ == exports.EnumToken.BinaryExpressionTokenType) { - if (parent.chi[i].l == value) { - parent.chi[i].l = newValue; - break; + for (const { value: val, parent: pr } of walkValues([parent])) { + if (val.typ == value.typ && val.val == value.val) { + if (pr.typ == exports.EnumToken.BinaryExpressionTokenType) { + if (pr.l == val) { + pr.l = newValue; } - else if (parent.chi[i].r == value) { - parent.chi[i].r = newValue; - break; + else { + pr.r = newValue; } } + else { + pr.chi.splice(pr.chi.indexOf(val), 1, newValue); + } } } } @@ -3821,7 +3815,7 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (value != null) { token = value; } - else { + else if (!token.chi.some(t => t.typ == exports.EnumToken.CommaTokenType)) { token.chi = children.reduce((acc, curr, index) => { if (acc.length > 0) { acc.push({ typ: exports.EnumToken.CommaTokenType }); @@ -3846,8 +3840,11 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, } if (token.val == 'color') { if (token.chi[0].typ == exports.EnumToken.IdenTokenType && colorFuncColorSpace.includes(token.chi[0].val.toLowerCase())) { - // @ts-ignore - return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token))); + const values = color2srgbvalues(token); + if (Array.isArray(values) && values.every(t => !Number.isNaN(t))) { + // @ts-ignore + return reduceHexValue(srgb2hexvalues(...values)); + } } } if (token.cal != null) { @@ -12749,7 +12746,10 @@ function validateSelector(selector, options, root) { } const nestedSelector = isNested > 0; // @ts-ignore - return nestedSelector ? validateRelativeSelectorList(selector, root, { ...(options ?? {}), nestedSelector }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); + return nestedSelector ? validateRelativeSelectorList(selector, root, { + ...(options ?? {}), + nestedSelector + }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); } function validateAtRuleMedia(atRule, options, root) { @@ -15326,6 +15326,21 @@ async function parseNode(results, context, stats, options, errors, src, map, raw }); return null; } + for (const { value: token } of walkValues(value, null, { + fn: (node) => node.typ == exports.EnumToken.FunctionTokenType && node.val == 'calc' ? WalkerOptionEnum.IgnoreChildren : null, + type: exports.EnumToken.FunctionTokenType + })) { + if (token.typ == exports.EnumToken.FunctionTokenType && token.val == 'calc') { + for (const { value: node, parent } of walkValues(token.chi, token)) { + // fix expressions starting with '/' or '*' such as '/4' in (1 + 1)/4 + if (node.typ == exports.EnumToken.LiteralTokenType && node.val.length > 0) { + if (node.val[0] == '/' || node.val[0] == '*') { + parent.chi.splice(parent.chi.indexOf(node), 1, { typ: node.val[0] == '/' ? exports.EnumToken.Div : exports.EnumToken.Mul }, ...parseString(node.val.slice(1))); + } + } + } + } + } const node = { typ: exports.EnumToken.DeclarationNodeType, // @ts-ignore @@ -16196,6 +16211,13 @@ function eq(a, b) { return true; } +var WalkerOptionEnum; +(function (WalkerOptionEnum) { + WalkerOptionEnum[WalkerOptionEnum["Ignore"] = 0] = "Ignore"; + WalkerOptionEnum[WalkerOptionEnum["Stop"] = 1] = "Stop"; + WalkerOptionEnum[WalkerOptionEnum["Children"] = 2] = "Children"; + WalkerOptionEnum[WalkerOptionEnum["IgnoreChildren"] = 3] = "IgnoreChildren"; +})(WalkerOptionEnum || (WalkerOptionEnum = {})); var WalkerValueEvent; (function (WalkerValueEvent) { WalkerValueEvent[WalkerValueEvent["Enter"] = 0] = "Enter"; @@ -16214,10 +16236,10 @@ function* walk(node, filter) { let option = null; if (filter != null) { option = filter(node); - if (option === 'ignore') { + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } } @@ -16226,7 +16248,7 @@ function* walk(node, filter) { // @ts-ignore yield { node, parent: map.get(node), root }; } - if (option !== 'ignore-children' && 'chi' in node) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in node) { parents.unshift(...node.chi); for (const child of node.chi.slice()) { map.set(child, node); @@ -16258,19 +16280,20 @@ function* walkValues(values, root = null, filter, reverse) { event: WalkerValueEvent.Enter }; } + const eventType = filter.event ?? WalkerValueEvent.Enter; while (stack.length > 0) { let value = reverse ? stack.pop() : stack.shift(); let option = null; - if (filter.fn != null && filter.event == WalkerValueEvent.Enter) { + if (filter.fn != null && eventType == WalkerValueEvent.Enter) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value) ?? root, WalkerValueEvent.Enter); - if (option === 'ignore') { + option = filter.fn(value, map.get(value) ?? root); + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } // @ts-ignore @@ -16279,8 +16302,7 @@ function* walkValues(values, root = null, filter, reverse) { } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Enter && option !== 'children') { + if (eventType == WalkerValueEvent.Enter && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -16290,7 +16312,7 @@ function* walkValues(values, root = null, filter, reverse) { root: root ?? null }; } - if (option !== 'ignore-children' && 'chi' in value) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in value) { const sliced = value.chi.slice(); for (const child of sliced) { map.set(child, value); @@ -16303,24 +16325,23 @@ function* walkValues(values, root = null, filter, reverse) { } } else if (value.typ == exports.EnumToken.BinaryExpressionTokenType) { - map.set(value.l, map.get(value) ?? root); - map.set(value.r, map.get(value) ?? root); + map.set(value.l, value); + map.set(value.r, value); stack.unshift(value.l, value.r); } - if (filter.event == WalkerValueEvent.Leave && filter.fn != null) { + if (eventType == WalkerValueEvent.Leave && filter.fn != null) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value), WalkerValueEvent.Leave); + option = filter.fn(value, map.get(value)); // @ts-ignore if (option != null && 'typ' in option) { map.set(option, map.get(value) ?? root); } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Leave && option !== 'children') { + if (eventType == WalkerValueEvent.Leave && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -17774,7 +17795,6 @@ class ComputeCalcExpressionFeature { if (!('chi' in ast)) { return; } - // @ts-ignore for (const node of ast.chi) { if (node.typ != exports.EnumToken.DeclarationNodeType) { continue; @@ -17782,15 +17802,18 @@ class ComputeCalcExpressionFeature { const set = new Set; for (const { value, parent } of walkValues(node.val, node, { event: WalkerValueEvent.Enter, - fn(node, parent, event) { + // @ts-ignore + fn(node, parent) { if (parent != null && + // @ts-ignore parent.typ == exports.EnumToken.DeclarationNodeType && + // @ts-ignore parent.val.length == 1 && node.typ == exports.EnumToken.FunctionTokenType && mathFuncs.includes(node.val) && node.chi.length == 1 && node.chi[0].typ == exports.EnumToken.IdenTokenType) { - return 'ignore'; + return WalkerOptionEnum.Ignore; } if ((node.typ == exports.EnumToken.FunctionTokenType && node.val == 'var') || (!mathFuncs.includes(parent.val) && [exports.EnumToken.ColorTokenType, exports.EnumToken.DeclarationNodeType, exports.EnumToken.RuleNodeType, exports.EnumToken.AtRuleNodeType, exports.EnumToken.StyleSheetNodeType].includes(parent?.typ))) { return null; @@ -17812,7 +17835,7 @@ class ComputeCalcExpressionFeature { // @ts-ignore node[key] = values; } - return 'ignore'; + return WalkerOptionEnum.Ignore; } return null; } diff --git a/dist/index.d.ts b/dist/index.d.ts index 9448bfa3..704c86ee 100644 --- a/dist/index.d.ts +++ b/dist/index.d.ts @@ -153,7 +153,7 @@ declare function walk(node: AstNode, filter?: WalkerFilter): Generator boolean); }, reverse?: boolean): Generator; @@ -1006,13 +1006,13 @@ export declare interface PropertyListOptions { computeShorthand?: boolean; } -export declare type WalkerOption = 'ignore' | 'stop' | 'children' | 'ignore-children' | Token | null; +export declare type WalkerOption = WalkerOptionEnum | Token | null; /** * returned value: - * - 'ignore': ignore this node and its children - * - 'stop': stop walking the tree - * - 'children': walk the children and ignore the node itself - * - 'ignore-children': walk the node and ignore children + * - WalkerOptionEnum.Ignore: ignore this node and its children + * - WalkerOptionEnum.Stop: stop walking the tree + * - WalkerOptionEnum.Children: walk the children and ignore the node itself + * - WalkerOptionEnum.IgnoreChildren: walk the node and ignore children */ export declare type WalkerFilter = (node: AstNode) => WalkerOption; @@ -1023,7 +1023,7 @@ export declare type WalkerFilter = (node: AstNode) => WalkerOption; * - 'children': walk the children and ignore the node itself * - 'ignore-children': walk the node and ignore children */ -export declare type WalkerValueFilter = (node: AstNode | Token, parent: FunctionToken | ParensToken | BinaryExpressionToken, event?: WalkerValueEvent) => WalkerOption | null; +export declare type WalkerValueFilter = (node: AstNode | Token, parent?: FunctionToken | ParensToken | BinaryExpressionToken, event?: WalkerValueEvent) => WalkerOption | null; export declare interface WalkResult { node: AstNode; diff --git a/dist/lib/ast/features/calc.js b/dist/lib/ast/features/calc.js index 803a38e0..2c64ebfe 100644 --- a/dist/lib/ast/features/calc.js +++ b/dist/lib/ast/features/calc.js @@ -1,5 +1,5 @@ import { EnumToken } from '../types.js'; -import { walkValues, WalkerValueEvent } from '../walk.js'; +import { walkValues, WalkerValueEvent, WalkerOptionEnum } from '../walk.js'; import { evaluate } from '../math/expression.js'; import { renderToken } from '../../renderer/render.js'; import { mathFuncs } from '../../syntax/syntax.js'; @@ -18,7 +18,6 @@ class ComputeCalcExpressionFeature { if (!('chi' in ast)) { return; } - // @ts-ignore for (const node of ast.chi) { if (node.typ != EnumToken.DeclarationNodeType) { continue; @@ -26,15 +25,18 @@ class ComputeCalcExpressionFeature { const set = new Set; for (const { value, parent } of walkValues(node.val, node, { event: WalkerValueEvent.Enter, - fn(node, parent, event) { + // @ts-ignore + fn(node, parent) { if (parent != null && + // @ts-ignore parent.typ == EnumToken.DeclarationNodeType && + // @ts-ignore parent.val.length == 1 && node.typ == EnumToken.FunctionTokenType && mathFuncs.includes(node.val) && node.chi.length == 1 && node.chi[0].typ == EnumToken.IdenTokenType) { - return 'ignore'; + return WalkerOptionEnum.Ignore; } if ((node.typ == EnumToken.FunctionTokenType && node.val == 'var') || (!mathFuncs.includes(parent.val) && [EnumToken.ColorTokenType, EnumToken.DeclarationNodeType, EnumToken.RuleNodeType, EnumToken.AtRuleNodeType, EnumToken.StyleSheetNodeType].includes(parent?.typ))) { return null; @@ -56,7 +58,7 @@ class ComputeCalcExpressionFeature { // @ts-ignore node[key] = values; } - return 'ignore'; + return WalkerOptionEnum.Ignore; } return null; } diff --git a/dist/lib/ast/math/expression.js b/dist/lib/ast/math/expression.js index a3dd0a36..a1f37d89 100644 --- a/dist/lib/ast/math/expression.js +++ b/dist/lib/ast/math/expression.js @@ -39,14 +39,19 @@ function evaluate(tokens) { return tokens; } if (nodes.length <= 1) { - // @ts-ignore - if (nodes.length == 1 && nodes[0].typ == EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { - return [{ - ...nodes[0], - // @ts-ignore - val: '' + Math[nodes[0].val.toUpperCase()], - typ: EnumToken.NumberTokenType - }]; + if (nodes.length == 1) { + if (nodes[0].typ == EnumToken.BinaryExpressionTokenType) { + return inlineExpression(nodes[0]); + } + // @ts-ignore + if (nodes[0].typ == EnumToken.IdenTokenType && typeof Math[nodes[0].val.toUpperCase()] == 'number') { + return [{ + ...nodes[0], + // @ts-ignore + val: '' + Math[nodes[0].val.toUpperCase()], + typ: EnumToken.NumberTokenType + }]; + } } return nodes; } @@ -427,6 +432,10 @@ function inlineExpression(token) { * @param token */ function evaluateExpression(token) { + // if (token.typ == EnumToken.ParensTokenType) { + // + // return evaluateExpression(buildExpression((token as ParensToken).chi)); + // } if (token.typ != EnumToken.BinaryExpressionTokenType) { return token; } @@ -513,4 +522,4 @@ function factor(tokens, ops) { return tokens; } -export { evaluate, evaluateFunc }; +export { evaluate, evaluateFunc, inlineExpression }; diff --git a/dist/lib/ast/walk.js b/dist/lib/ast/walk.js index fa352b44..376a6ac7 100644 --- a/dist/lib/ast/walk.js +++ b/dist/lib/ast/walk.js @@ -1,5 +1,12 @@ import { EnumToken } from './types.js'; +var WalkerOptionEnum; +(function (WalkerOptionEnum) { + WalkerOptionEnum[WalkerOptionEnum["Ignore"] = 0] = "Ignore"; + WalkerOptionEnum[WalkerOptionEnum["Stop"] = 1] = "Stop"; + WalkerOptionEnum[WalkerOptionEnum["Children"] = 2] = "Children"; + WalkerOptionEnum[WalkerOptionEnum["IgnoreChildren"] = 3] = "IgnoreChildren"; +})(WalkerOptionEnum || (WalkerOptionEnum = {})); var WalkerValueEvent; (function (WalkerValueEvent) { WalkerValueEvent[WalkerValueEvent["Enter"] = 0] = "Enter"; @@ -18,10 +25,10 @@ function* walk(node, filter) { let option = null; if (filter != null) { option = filter(node); - if (option === 'ignore') { + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } } @@ -30,7 +37,7 @@ function* walk(node, filter) { // @ts-ignore yield { node, parent: map.get(node), root }; } - if (option !== 'ignore-children' && 'chi' in node) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in node) { parents.unshift(...node.chi); for (const child of node.chi.slice()) { map.set(child, node); @@ -62,19 +69,20 @@ function* walkValues(values, root = null, filter, reverse) { event: WalkerValueEvent.Enter }; } + const eventType = filter.event ?? WalkerValueEvent.Enter; while (stack.length > 0) { let value = reverse ? stack.pop() : stack.shift(); let option = null; - if (filter.fn != null && filter.event == WalkerValueEvent.Enter) { + if (filter.fn != null && eventType == WalkerValueEvent.Enter) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value) ?? root, WalkerValueEvent.Enter); - if (option === 'ignore') { + option = filter.fn(value, map.get(value) ?? root); + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } // @ts-ignore @@ -83,8 +91,7 @@ function* walkValues(values, root = null, filter, reverse) { } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Enter && option !== 'children') { + if (eventType == WalkerValueEvent.Enter && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -94,7 +101,7 @@ function* walkValues(values, root = null, filter, reverse) { root: root ?? null }; } - if (option !== 'ignore-children' && 'chi' in value) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in value) { const sliced = value.chi.slice(); for (const child of sliced) { map.set(child, value); @@ -107,24 +114,23 @@ function* walkValues(values, root = null, filter, reverse) { } } else if (value.typ == EnumToken.BinaryExpressionTokenType) { - map.set(value.l, map.get(value) ?? root); - map.set(value.r, map.get(value) ?? root); + map.set(value.l, value); + map.set(value.r, value); stack.unshift(value.l, value.r); } - if (filter.event == WalkerValueEvent.Leave && filter.fn != null) { + if (eventType == WalkerValueEvent.Leave && filter.fn != null) { const isValid = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || (typeof filter.type == 'function' && filter.type(value)); if (isValid) { - option = filter.fn(value, map.get(value), WalkerValueEvent.Leave); + option = filter.fn(value, map.get(value)); // @ts-ignore if (option != null && 'typ' in option) { map.set(option, map.get(value) ?? root); } } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Leave && option !== 'children') { + if (eventType == WalkerValueEvent.Leave && option !== WalkerOptionEnum.Children) { yield { value, parent: map.get(value) ?? root, @@ -138,4 +144,4 @@ function* walkValues(values, root = null, filter, reverse) { } } -export { WalkerValueEvent, walk, walkValues }; +export { WalkerOptionEnum, WalkerValueEvent, walk, walkValues }; diff --git a/dist/lib/parser/parse.js b/dist/lib/parser/parse.js index 365a4010..8aa1e29a 100644 --- a/dist/lib/parser/parse.js +++ b/dist/lib/parser/parse.js @@ -2,7 +2,7 @@ import { webkitPseudoAliasMap, isIdentStart, isIdent, mathFuncs, isColor, isHexC import './utils/config.js'; import { EnumToken, funcLike, ValidationLevel } from '../ast/types.js'; import { minify, definedPropertySettings, combinators } from '../ast/minify.js'; -import { walkValues, walk } from '../ast/walk.js'; +import { walkValues, walk, WalkerOptionEnum } from '../ast/walk.js'; import { expand } from '../ast/expand.js'; import { parseDeclarationNode } from './utils/declaration.js'; import { renderToken } from '../renderer/render.js'; @@ -711,6 +711,21 @@ async function parseNode(results, context, stats, options, errors, src, map, raw }); return null; } + for (const { value: token } of walkValues(value, null, { + fn: (node) => node.typ == EnumToken.FunctionTokenType && node.val == 'calc' ? WalkerOptionEnum.IgnoreChildren : null, + type: EnumToken.FunctionTokenType + })) { + if (token.typ == EnumToken.FunctionTokenType && token.val == 'calc') { + for (const { value: node, parent } of walkValues(token.chi, token)) { + // fix expressions starting with '/' or '*' such as '/4' in (1 + 1)/4 + if (node.typ == EnumToken.LiteralTokenType && node.val.length > 0) { + if (node.val[0] == '/' || node.val[0] == '*') { + parent.chi.splice(parent.chi.indexOf(node), 1, { typ: node.val[0] == '/' ? EnumToken.Div : EnumToken.Mul }, ...parseString(node.val.slice(1))); + } + } + } + } + } const node = { typ: EnumToken.DeclarationNodeType, // @ts-ignore diff --git a/dist/lib/renderer/color/colormix.js b/dist/lib/renderer/color/color-mix.js similarity index 100% rename from dist/lib/renderer/color/colormix.js rename to dist/lib/renderer/color/color-mix.js diff --git a/dist/lib/renderer/color/relativecolor.js b/dist/lib/renderer/color/relativecolor.js index dfe52c54..4f45a839 100644 --- a/dist/lib/renderer/color/relativecolor.js +++ b/dist/lib/renderer/color/relativecolor.js @@ -98,23 +98,19 @@ function computeComponentValue(expr, converted, values) { } else if ([EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.LengthTokenType].includes(exp.typ)) ; else if (exp.typ == EnumToken.IdenTokenType && exp.val in values) { - // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: EnumToken.NumberTokenType, - // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { - // @ts-ignore expr[key] = values[exp.val]; } } else if (exp.typ == EnumToken.FunctionTokenType && mathFuncs.includes(exp.val)) { for (let { value, parent } of walkValues(exp.chi, exp)) { if (parent == null) { - // @ts-ignore parent = exp; } if (value.typ == EnumToken.PercentageTokenType) { @@ -146,30 +142,19 @@ function computeComponentValue(expr, converted, values) { return expr; } function replaceValue(parent, value, newValue) { - if (parent.typ == EnumToken.BinaryExpressionTokenType) { - if (parent.l == value) { - parent.l = newValue; - } - else { - parent.r = newValue; - } - } - else { - for (let i = 0; i < parent.chi.length; i++) { - if (parent.chi[i] == value) { - parent.chi.splice(i, 1, newValue); - break; - } - if (parent.chi[i].typ == EnumToken.BinaryExpressionTokenType) { - if (parent.chi[i].l == value) { - parent.chi[i].l = newValue; - break; + for (const { value: val, parent: pr } of walkValues([parent])) { + if (val.typ == value.typ && val.val == value.val) { + if (pr.typ == EnumToken.BinaryExpressionTokenType) { + if (pr.l == val) { + pr.l = newValue; } - else if (parent.chi[i].r == value) { - parent.chi[i].r = newValue; - break; + else { + pr.r = newValue; } } + else { + pr.chi.splice(pr.chi.indexOf(val), 1, newValue); + } } } } diff --git a/dist/lib/renderer/color/srgb.js b/dist/lib/renderer/color/srgb.js index 4f7dedde..3fdbcd56 100644 --- a/dist/lib/renderer/color/srgb.js +++ b/dist/lib/renderer/color/srgb.js @@ -217,7 +217,7 @@ function hsl2srgbvalues(h, s, l, a = null) { return values; } function lab2srgb(token) { - const [l, a, b, alpha] = getLABComponents(token); + const [l, a, b, alpha] = getLABComponents(token) ?? []; if (l == null || a == null || b == null) { return null; } diff --git a/dist/lib/renderer/render.js b/dist/lib/renderer/render.js index 32599e33..8ff46997 100644 --- a/dist/lib/renderer/render.js +++ b/dist/lib/renderer/render.js @@ -6,7 +6,7 @@ import { EnumToken, funcLike } from '../ast/types.js'; import '../ast/minify.js'; import '../ast/walk.js'; import { expand } from '../ast/expand.js'; -import { colorMix } from './color/colormix.js'; +import { colorMix } from './color/color-mix.js'; import { parseRelativeColor } from './color/relativecolor.js'; import { SourceMap } from './sourcemap/sourcemap.js'; import { isColor, pseudoElements, mathFuncs, isNewLine } from '../syntax/syntax.js'; @@ -344,7 +344,7 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, if (value != null) { token = value; } - else { + else if (!token.chi.some(t => t.typ == EnumToken.CommaTokenType)) { token.chi = children.reduce((acc, curr, index) => { if (acc.length > 0) { acc.push({ typ: EnumToken.CommaTokenType }); @@ -369,8 +369,11 @@ function renderToken(token, options = {}, cache = Object.create(null), reducer, } if (token.val == 'color') { if (token.chi[0].typ == EnumToken.IdenTokenType && colorFuncColorSpace.includes(token.chi[0].val.toLowerCase())) { - // @ts-ignore - return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token))); + const values = color2srgbvalues(token); + if (Array.isArray(values) && values.every(t => !Number.isNaN(t))) { + // @ts-ignore + return reduceHexValue(srgb2hexvalues(...values)); + } } } if (token.cal != null) { diff --git a/dist/lib/validation/selector.js b/dist/lib/validation/selector.js index 6f0c26c0..f79a8ba5 100644 --- a/dist/lib/validation/selector.js +++ b/dist/lib/validation/selector.js @@ -35,7 +35,10 @@ function validateSelector(selector, options, root) { } const nestedSelector = isNested > 0; // @ts-ignore - return nestedSelector ? validateRelativeSelectorList(selector, root, { ...(options ?? {}), nestedSelector }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); + return nestedSelector ? validateRelativeSelectorList(selector, root, { + ...(options ?? {}), + nestedSelector + }) : validateSelectorList(selector, root, { ...(options ?? {}), nestedSelector }); } export { validateSelector }; diff --git a/jsr.json b/jsr.json index 95bc4e18..2db47925 100644 --- a/jsr.json +++ b/jsr.json @@ -1,6 +1,6 @@ { "name": "@tbela99/css-parser", - "version": "1.0.0-alpha6", + "version": "1.0.0-alpha7", "publish": { "include": [ "src", diff --git a/package.json b/package.json index 9a08d1bf..67d67244 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@tbela99/css-parser", "description": "CSS parser for node and the browser", - "version": "v1.0.0-alpha6", + "version": "v1.0.0-alpha7", "exports": { ".": "./dist/node/index.js", "./node": "./dist/node/index.js", diff --git a/src/@types/walker.d.ts b/src/@types/walker.d.ts index ce431538..48100159 100644 --- a/src/@types/walker.d.ts +++ b/src/@types/walker.d.ts @@ -2,13 +2,13 @@ import {AstNode, AstRuleList} from "./ast.d.ts"; import {BinaryExpressionToken, FunctionToken, ParensToken, Token} from "./token.d.ts"; import {WalkerValueEvent} from '../lib/ast/walk.ts'; -export declare type WalkerOption = 'ignore' | 'stop' | 'children' | 'ignore-children' | Token | null; +export declare type WalkerOption = WalkerOptionEnum | Token | null; /** * returned value: - * - 'ignore': ignore this node and its children - * - 'stop': stop walking the tree - * - 'children': walk the children and ignore the node itself - * - 'ignore-children': walk the node and ignore children + * - WalkerOptionEnum.Ignore: ignore this node and its children + * - WalkerOptionEnum.Stop: stop walking the tree + * - WalkerOptionEnum.Children: walk the children and ignore the node itself + * - WalkerOptionEnum.IgnoreChildren: walk the node and ignore children */ export declare type WalkerFilter = (node: AstNode) => WalkerOption; @@ -19,7 +19,7 @@ export declare type WalkerFilter = (node: AstNode) => WalkerOption; * - 'children': walk the children and ignore the node itself * - 'ignore-children': walk the node and ignore children */ -export declare type WalkerValueFilter = (node: AstNode | Token, parent: FunctionToken | ParensToken | BinaryExpressionToken, event?: WalkerValueEvent) => WalkerOption | null; +export declare type WalkerValueFilter = (node: AstNode | Token, parent?: FunctionToken | ParensToken | BinaryExpressionToken, event?: WalkerValueEvent) => WalkerOption | null; export declare interface WalkResult { node: AstNode; diff --git a/src/lib/ast/features/calc.ts b/src/lib/ast/features/calc.ts index 10f39c8c..0c90b90b 100644 --- a/src/lib/ast/features/calc.ts +++ b/src/lib/ast/features/calc.ts @@ -12,7 +12,7 @@ import type { WalkerOption } from "../../../@types/index.d.ts"; import {EnumToken} from "../types.ts"; -import {WalkerValueEvent, walkValues} from "../walk.ts"; +import {WalkerOptionEnum, WalkerValueEvent, walkValues} from "../walk.ts"; import {evaluate} from "../math/index.ts"; import {renderToken} from "../../renderer/index.ts"; import {mathFuncs} from "../../syntax/index.ts"; @@ -39,8 +39,7 @@ export class ComputeCalcExpressionFeature { return; } - // @ts-ignore - for (const node of ast.chi) { + for (const node of ast.chi! as Token[]) { if (node.typ != EnumToken.DeclarationNodeType) { @@ -52,17 +51,20 @@ export class ComputeCalcExpressionFeature { for (const {value, parent} of walkValues((node).val, node, { event: WalkerValueEvent.Enter, - fn(node: AstNode | Token, parent: AstNode | FunctionToken | ParensToken | BinaryExpressionToken, event?: WalkerValueEvent): WalkerOption | null { + // @ts-ignore + fn(node: AstNode | Token, parent: FunctionToken | ParensToken | BinaryExpressionToken): WalkerOption | null { if (parent != null && + // @ts-ignore (parent as AstDeclaration).typ == EnumToken.DeclarationNodeType && + // @ts-ignore (parent as AstDeclaration).val.length == 1 && node.typ == EnumToken.FunctionTokenType && mathFuncs.includes((node as FunctionToken).val) && (node as FunctionToken).chi.length == 1 && (node as FunctionToken).chi[0].typ == EnumToken.IdenTokenType) { - return 'ignore' + return WalkerOptionEnum.Ignore } if ((node.typ == EnumToken.FunctionTokenType && (node as FunctionToken).val == 'var') || (!mathFuncs.includes((parent as FunctionToken).val) && [EnumToken.ColorTokenType, EnumToken.DeclarationNodeType, EnumToken.RuleNodeType, EnumToken.AtRuleNodeType, EnumToken.StyleSheetNodeType].includes(parent?.typ))) { @@ -94,7 +96,7 @@ export class ComputeCalcExpressionFeature { node[key] = values; } - return 'ignore'; + return WalkerOptionEnum.Ignore; } return null; diff --git a/src/lib/ast/math/expression.ts b/src/lib/ast/math/expression.ts index 08d6d5ab..d24c6d62 100644 --- a/src/lib/ast/math/expression.ts +++ b/src/lib/ast/math/expression.ts @@ -77,15 +77,23 @@ export function evaluate(tokens: Token[]): Token[] { if (nodes.length <= 1) { - // @ts-ignore - if (nodes.length == 1 && nodes[0].typ == EnumToken.IdenTokenType && typeof Math[(nodes[0]).val.toUpperCase()] == 'number') { + if (nodes.length == 1) { - return [{ - ...nodes[0], - // @ts-ignore - val: ('' + Math[(nodes[0]).val.toUpperCase()] as number), - typ: EnumToken.NumberTokenType - }]; + if (nodes[0].typ == EnumToken.BinaryExpressionTokenType) { + + return inlineExpression(nodes[0]); + } + + // @ts-ignore + if (nodes[0].typ == EnumToken.IdenTokenType && typeof Math[(nodes[0]).val.toUpperCase()] == 'number') { + + return [{ + ...nodes[0], + // @ts-ignore + val: ('' + Math[(nodes[0]).val.toUpperCase()] as number), + typ: EnumToken.NumberTokenType + }]; + } } return nodes; @@ -256,7 +264,7 @@ function doEvaluate(l: Token, r: Token, op: EnumToken.Add | EnumToken.Sub | Enum // @ts-ignore const val: number | FractionToken = compute(v1, v2, op); - // typ = typeof val == 'number' ? EnumToken.NumberTokenType : EnumToken.FractionTokenType; + const token = { ...(l.typ == EnumToken.NumberTokenType ? r : l), typ, @@ -590,7 +598,7 @@ export function evaluateFunc(token: FunctionToken): Token[] { * convert BinaryExpression into an array * @param token */ -function inlineExpression(token: Token): Token[] { +export function inlineExpression(token: Token): Token[] { const result: Token[] = []; diff --git a/src/lib/ast/walk.ts b/src/lib/ast/walk.ts index 4993c49b..e43a35a2 100644 --- a/src/lib/ast/walk.ts +++ b/src/lib/ast/walk.ts @@ -13,6 +13,14 @@ import type { } from "../../@types/index.d.ts"; import {EnumToken} from "./types.ts"; +export enum WalkerOptionEnum { + + Ignore, + Stop, + Children, + IgnoreChildren +} + export enum WalkerValueEvent { Enter, Leave @@ -38,12 +46,12 @@ export function* walk(node: AstNode, filter?: WalkerFilter): Generatormap.get(node), root}; } - if (option !== 'ignore-children' && 'chi' in node) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in node) { parents.unshift(...(node).chi); @@ -76,7 +84,7 @@ export function* walk(node: AstNode, filter?: WalkerFilter): Generator boolean) }, reverse?: boolean): Generator { @@ -102,12 +110,14 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null } } + const eventType = filter.event ?? WalkerValueEvent.Enter; + while (stack.length > 0) { let value: Token = reverse ? stack.pop() : stack.shift(); let option: WalkerOption = null; - if (filter.fn != null && filter.event == WalkerValueEvent.Enter) { + if (filter.fn != null && eventType == WalkerValueEvent.Enter) { const isValid: boolean = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || @@ -115,14 +125,14 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null if (isValid) { - option = filter.fn(value, map.get(value) ?? root, WalkerValueEvent.Enter); + option = filter.fn(value, map.get(value) ?? root); - if (option === 'ignore') { + if (option === WalkerOptionEnum.Ignore) { continue; } - if (option === 'stop') { + if (option === WalkerOptionEnum.Stop) { break; } @@ -135,8 +145,7 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Enter && option !== 'children') { + if (eventType == WalkerValueEvent.Enter && option !== WalkerOptionEnum.Children) { yield { value, @@ -148,7 +157,7 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null }; } - if (option !== 'ignore-children' && 'chi' in value) { + if (option !== WalkerOptionEnum.IgnoreChildren && 'chi' in value) { const sliced = (value).chi.slice(); @@ -167,13 +176,13 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null } else if (value.typ == EnumToken.BinaryExpressionTokenType) { - map.set( (value as BinaryExpressionToken).l, map.get(value) ?? root as FunctionToken | ParensToken); - map.set( (value as BinaryExpressionToken).r, map.get(value) ?? root as FunctionToken | ParensToken); + map.set( (value as BinaryExpressionToken).l, value); + map.set( (value as BinaryExpressionToken).r, value); stack.unshift( (value as BinaryExpressionToken).l, (value as BinaryExpressionToken).r); } - if (filter.event == WalkerValueEvent.Leave && filter.fn != null) { + if (eventType == WalkerValueEvent.Leave && filter.fn != null) { const isValid: boolean = filter.type == null || value.typ == filter.type || (Array.isArray(filter.type) && filter.type.includes(value.typ)) || @@ -181,7 +190,7 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null if (isValid) { - option = filter.fn(value, map.get(value), WalkerValueEvent.Leave); + option = filter.fn(value, map.get(value)); // @ts-ignore if (option != null && 'typ' in option) { @@ -191,8 +200,7 @@ export function* walkValues(values: Token[], root: AstNode | Token | null = null } } - // @ts-ignore - if (filter.event == WalkerValueEvent.Leave && option !== 'children') { + if (eventType == WalkerValueEvent.Leave && option !== WalkerOptionEnum.Children) { yield { value, diff --git a/src/lib/parser/parse.ts b/src/lib/parser/parse.ts index 37f73383..e49e11de 100644 --- a/src/lib/parser/parse.ts +++ b/src/lib/parser/parse.ts @@ -29,6 +29,7 @@ import { minify, ValidationLevel, walk, + WalkerOptionEnum, walkValues } from "../ast/index.ts"; import {tokenize} from "./tokenize.ts"; @@ -1072,6 +1073,28 @@ async function parseNode(results: TokenizeResult[], context: AstRuleList | AstIn return null; } + for (const {value: token} of walkValues(value as Token[], null, { + + fn: (node: AstNode | Token) => node.typ == EnumToken.FunctionTokenType && (node as FunctionToken).val == 'calc' ? WalkerOptionEnum.IgnoreChildren : null, + type: EnumToken.FunctionTokenType + })) { + + if (token.typ == EnumToken.FunctionTokenType && (token as FunctionToken).val == 'calc') { + + for (const {value: node, parent} of walkValues((token as FunctionToken).chi, token)) { + + // fix expressions starting with '/' or '*' such as '/4' in (1 + 1)/4 + if (node.typ == EnumToken.LiteralTokenType && (node as LiteralToken).val.length > 0) { + + if ((node as LiteralToken).val[0] == '/' || (node as LiteralToken).val[0] == '*') { + + (parent as FunctionToken).chi.splice((parent as FunctionToken).chi.indexOf(node), 1,{typ: (node as LiteralToken).val[0] == '/' ? EnumToken.Div : EnumToken.Mul}, ...parseString((node as LiteralToken).val.slice(1))); + } + } + } + } + } + const node: AstDeclaration = { typ: EnumToken.DeclarationNodeType, // @ts-ignore diff --git a/src/lib/renderer/color/colormix.ts b/src/lib/renderer/color/color-mix.ts similarity index 100% rename from src/lib/renderer/color/colormix.ts rename to src/lib/renderer/color/color-mix.ts diff --git a/src/lib/renderer/color/index.ts b/src/lib/renderer/color/index.ts index 9f3e7736..12434ced 100644 --- a/src/lib/renderer/color/index.ts +++ b/src/lib/renderer/color/index.ts @@ -3,7 +3,7 @@ export * from './rgb.ts'; export * from './hex.ts'; export * from './hwb.ts'; export * from './hsl.ts'; -export * from './colormix.ts'; +export * from './color-mix.ts'; export * from './oklab.ts'; export * from './oklch.ts'; export * from './srgb.ts'; diff --git a/src/lib/renderer/color/relativecolor.ts b/src/lib/renderer/color/relativecolor.ts index 5be1dcdb..86e9e31c 100644 --- a/src/lib/renderer/color/relativecolor.ts +++ b/src/lib/renderer/color/relativecolor.ts @@ -1,5 +1,4 @@ import type { - AngleToken, BinaryExpressionToken, ColorToken, FunctionToken, @@ -121,7 +120,7 @@ function computeComponentValue(expr: Record, converte // @ts-ignore for (const k of walkValues([object.h])) { - if (k.value.typ == EnumToken.AngleTokenType && (k.value as AngleToken).unit == 'deg') { + if (k.value.typ == EnumToken.AngleTokenType && k.value.unit == 'deg') { // @ts-ignore k.value.typ = EnumToken.NumberTokenType; @@ -149,32 +148,28 @@ function computeComponentValue(expr: Record, converte expr[key] = values[key]; } } + } else if ([EnumToken.NumberTokenType, EnumToken.PercentageTokenType, EnumToken.AngleTokenType, EnumToken.LengthTokenType].includes(exp.typ)) { - // expr[key] = exp; - // @ts-ignore } else if (exp.typ == EnumToken.IdenTokenType && exp.val in values) { - // @ts-ignore if (typeof values[exp.val] == 'number') { expr[key] = { typ: EnumToken.NumberTokenType, - // @ts-ignore val: reduceNumber(values[exp.val]) }; } else { - // @ts-ignore expr[key] = values[exp.val]; } + } else if (exp.typ == EnumToken.FunctionTokenType && mathFuncs.includes((exp as FunctionToken).val)) { for (let {value, parent} of walkValues((exp as FunctionToken).chi, exp)) { if (parent == null) { - // @ts-ignore parent = exp; } @@ -215,37 +210,28 @@ function computeComponentValue(expr: Record, converte } function replaceValue(parent: FunctionToken | ParensToken | BinaryExpressionToken, value: Token, newValue: Token) { - if (parent.typ == EnumToken.BinaryExpressionTokenType) { - - if ((parent as BinaryExpressionToken).l == value) { - (parent as BinaryExpressionToken).l = newValue; - } else { + for (const {value: val, parent: pr} of walkValues([parent])) { - (parent as BinaryExpressionToken).r = newValue; - } - } else { + if (val.typ == value.typ && (val as IdentToken).val == (value as IdentToken).val) { - for (let i = 0; i < (parent as FunctionToken | ParensToken).chi.length; i++) { + if (pr!.typ == EnumToken.BinaryExpressionTokenType) { - if ((parent as FunctionToken | ParensToken).chi[i] == value) { + if ((pr as BinaryExpressionToken).l == val) { - (parent as FunctionToken | ParensToken).chi.splice(i, 1, newValue); - break; - } + (pr as BinaryExpressionToken).l = newValue + } - if ((parent as FunctionToken | ParensToken).chi[i].typ == EnumToken.BinaryExpressionTokenType) { + else { - if (((parent as FunctionToken | ParensToken).chi[i] as BinaryExpressionToken).l == value) { + (pr as BinaryExpressionToken).r = newValue; + } + } - ((parent as FunctionToken | ParensToken).chi[i] as BinaryExpressionToken).l = newValue; - break; - } else if (((parent as FunctionToken | ParensToken).chi[i] as BinaryExpressionToken).r == value) { + else { - ((parent as FunctionToken | ParensToken).chi[i] as BinaryExpressionToken).r = newValue; - break - } + (pr as FunctionToken | ParensToken).chi.splice((pr as FunctionToken | ParensToken).chi.indexOf(val), 1, newValue); } } } -} +} \ No newline at end of file diff --git a/src/lib/renderer/color/srgb.ts b/src/lib/renderer/color/srgb.ts index ae235b0d..4a1badaa 100644 --- a/src/lib/renderer/color/srgb.ts +++ b/src/lib/renderer/color/srgb.ts @@ -313,7 +313,7 @@ export function hsl2srgbvalues(h: number, s: number, l: number, a: number | null export function lab2srgb(token: ColorToken): number[] | null{ - const [l, a, b, alpha] = getLABComponents(token); + const [l, a, b, alpha] = getLABComponents(token) ?? []; if (l == null || a == null || b == null) { diff --git a/src/lib/renderer/render.ts b/src/lib/renderer/render.ts index fcb18e03..1fa6312f 100644 --- a/src/lib/renderer/render.ts +++ b/src/lib/renderer/render.ts @@ -566,7 +566,7 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { if (value != null) { token = value; - } else { + } else if (!(token as ColorToken).chi!.some(t => t.typ == EnumToken.CommaTokenType)) { (token as ColorToken).chi = children.reduce((acc, curr, index) => { @@ -605,8 +605,13 @@ export function renderToken(token: Token, options: RenderOptions = {}, cache: { if ((((token as ColorToken).chi)[0]).typ == EnumToken.IdenTokenType && colorFuncColorSpace.includes((((token as ColorToken).chi)[0] as IdentToken).val.toLowerCase())) { - // @ts-ignore - return reduceHexValue(srgb2hexvalues(...color2srgbvalues(token as ColorToken))); + const values = color2srgbvalues(token as ColorToken) as number[]; + + if (Array.isArray(values) && values.every(t => !Number.isNaN(t))) { + + // @ts-ignore + return reduceHexValue(srgb2hexvalues(...values)); + } } } diff --git a/test/specs/code/color.js b/test/specs/code/color.js index 1ecf0ffa..ba8fa72f 100644 --- a/test/specs/code/color.js +++ b/test/specs/code/color.js @@ -1294,6 +1294,53 @@ color: lch(from slateblue calc(l * sin(pi / 4)) c h); color: color-mix(in oklch,currentcolor 35%,#0000); background-color: color-mix(in oklab,oklch(var(--btn-color,var(--b2))/var(--tw-bg-opacity,1)) 90%,#000); transform: rotateY(180deg) +}`)); + }); + + it('current color #131', function () { + return transform(` + + .now { + color: color(from green srgb r g calc((r + g + b)/4) / 0.5); +} +`, {beautify: true}).then(result => expect(result.code).equals(`.now { + color: #00802080 +}`)); + }); + + it('current color #132', function () { + return transform(` + + .from { + --top: transform: translate3d(0,0,0); + transform: translate(0, 0); + background-color:color(from green srgb r g calc((r + g + b)/4) / 0.5); /* #7fb77f */ +} + .to { + --top: transform: translate3d(0,0,0); + transform: scaleX(.5)scaleY(1)scaleZ(1.7)rotate3d(1,1,1,67deg); + color: color-mix(in lab, oklch(from currentColor l c calc(h/2) ) 80%, #f00 50%); + border-color: color-mix(in lab, oklab(from currentColor l a calc(b/2) ) 80%, #f00 50%); + background-color:color(from currentcolor srgb r calc((g + b)/2) b / 0.5); + outline-color:color(from currentColor a98-rgb r calc((g + b)/2) b / 0.8); + accent-color: color-mix(in lab, oklab(from currentColor l a calc(b/2) ) 80%, #f00 50%); + accent-color: color-mix(in lab, lab(from currentColor l a calc(b/2) ) 80%, #f00 50%); +} + +`, {beautify: true}).then(result => expect(result.code).equals(`.from,.to { + --top: transform: translate3d(0,0,0) +} +.to { + transform: scale3d(.5,1,1.7)rotate3d(1,1,1,67deg); + color: color-mix(in lab,oklch(from currentcolor l c calc(h/2)) 80%,red 50%); + border-color: color-mix(in lab,oklab(from currentcolor l a calc(b/2)) 80%,red 50%); + background-color: color(from currentcolor srgb r calc((g + b)/2) b/.5); + outline-color: color(from currentcolor a98-rgb r calc((g + b)/2) b/.8); + accent-color: color-mix(in lab,lab(from currentcolor l a calc(b/2)) 80%,red 50%) +} +.from { + transform: none; + background-color: #00802080 }`)); }); } \ No newline at end of file From 21036c72d66725507d8040883d0eaf88e338b626 Mon Sep 17 00:00:00 2001 From: Thierry Bela Nanga Date: Sat, 26 Apr 2025 09:23:39 -0400 Subject: [PATCH 4/5] remove comments #78 --- dist/index-umd-web.js | 5 ----- dist/index.cjs | 5 ----- dist/lib/ast/math/expression.js | 5 ----- 3 files changed, 15 deletions(-) diff --git a/dist/index-umd-web.js b/dist/index-umd-web.js index 0bf0a9ac..dc54a54d 100644 --- a/dist/index-umd-web.js +++ b/dist/index-umd-web.js @@ -2892,7 +2892,6 @@ } // @ts-ignore const val = compute$1(v1, v2, op); - // typ = typeof val == 'number' ? EnumToken.NumberTokenType : EnumToken.FractionTokenType; const token = { ...(l.typ == exports.EnumToken.NumberTokenType ? r : l), typ, @@ -3152,10 +3151,6 @@ * @param token */ function evaluateExpression(token) { - // if (token.typ == EnumToken.ParensTokenType) { - // - // return evaluateExpression(buildExpression((token as ParensToken).chi)); - // } if (token.typ != exports.EnumToken.BinaryExpressionTokenType) { return token; } diff --git a/dist/index.cjs b/dist/index.cjs index 63f292eb..658ddd96 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -2891,7 +2891,6 @@ function doEvaluate(l, r, op) { } // @ts-ignore const val = compute$1(v1, v2, op); - // typ = typeof val == 'number' ? EnumToken.NumberTokenType : EnumToken.FractionTokenType; const token = { ...(l.typ == exports.EnumToken.NumberTokenType ? r : l), typ, @@ -3151,10 +3150,6 @@ function inlineExpression(token) { * @param token */ function evaluateExpression(token) { - // if (token.typ == EnumToken.ParensTokenType) { - // - // return evaluateExpression(buildExpression((token as ParensToken).chi)); - // } if (token.typ != exports.EnumToken.BinaryExpressionTokenType) { return token; } diff --git a/dist/lib/ast/math/expression.js b/dist/lib/ast/math/expression.js index a1f37d89..520dface 100644 --- a/dist/lib/ast/math/expression.js +++ b/dist/lib/ast/math/expression.js @@ -172,7 +172,6 @@ function doEvaluate(l, r, op) { } // @ts-ignore const val = compute(v1, v2, op); - // typ = typeof val == 'number' ? EnumToken.NumberTokenType : EnumToken.FractionTokenType; const token = { ...(l.typ == EnumToken.NumberTokenType ? r : l), typ, @@ -432,10 +431,6 @@ function inlineExpression(token) { * @param token */ function evaluateExpression(token) { - // if (token.typ == EnumToken.ParensTokenType) { - // - // return evaluateExpression(buildExpression((token as ParensToken).chi)); - // } if (token.typ != EnumToken.BinaryExpressionTokenType) { return token; } From 2240f2153d8fdddf3c3be1ccdc5689217bb4b9fc Mon Sep 17 00:00:00 2001 From: Thierry Bela Nanga Date: Sat, 26 Apr 2025 09:43:59 -0400 Subject: [PATCH 5/5] update type import #78 --- src/@types/walker.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/@types/walker.d.ts b/src/@types/walker.d.ts index 48100159..01ea7057 100644 --- a/src/@types/walker.d.ts +++ b/src/@types/walker.d.ts @@ -1,6 +1,6 @@ import {AstNode, AstRuleList} from "./ast.d.ts"; import {BinaryExpressionToken, FunctionToken, ParensToken, Token} from "./token.d.ts"; -import {WalkerValueEvent} from '../lib/ast/walk.ts'; +import {WalkerOptionEnum, WalkerValueEvent} from '../lib/ast/walk.ts'; export declare type WalkerOption = WalkerOptionEnum | Token | null; /**